added calcs
This commit is contained in:
parent
ef12c9d308
commit
7886b7cfa5
10 changed files with 4331 additions and 0 deletions
235
libs/utils/src/calculations/basic-calculations.ts
Normal file
235
libs/utils/src/calculations/basic-calculations.ts
Normal file
|
|
@ -0,0 +1,235 @@
|
|||
/**
|
||||
* Basic Financial Calculations
|
||||
* Core mathematical functions for financial analysis
|
||||
*/
|
||||
|
||||
/**
|
||||
* Calculate percentage change between two values
|
||||
*/
|
||||
export function percentageChange(oldValue: number, newValue: number): number {
|
||||
if (oldValue === 0) return 0;
|
||||
return ((newValue - oldValue) / oldValue) * 100;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate simple return
|
||||
*/
|
||||
export function simpleReturn(initialPrice: number, finalPrice: number): number {
|
||||
if (initialPrice === 0) return 0;
|
||||
return (finalPrice - initialPrice) / initialPrice;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate logarithmic return
|
||||
*/
|
||||
export function logReturn(initialPrice: number, finalPrice: number): number {
|
||||
if (initialPrice <= 0 || finalPrice <= 0) return 0;
|
||||
return Math.log(finalPrice / initialPrice);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate compound annual growth rate (CAGR)
|
||||
*/
|
||||
export function calculateCAGR(startValue: number, endValue: number, years: number): number {
|
||||
if (years <= 0 || startValue <= 0 || endValue <= 0) return 0;
|
||||
return Math.pow(endValue / startValue, 1 / years) - 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate annualized return from periodic returns
|
||||
*/
|
||||
export function annualizeReturn(periodicReturn: number, periodsPerYear: number): number {
|
||||
return Math.pow(1 + periodicReturn, periodsPerYear) - 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate annualized volatility from periodic returns
|
||||
*/
|
||||
export function annualizeVolatility(periodicVolatility: number, periodsPerYear: number): number {
|
||||
return periodicVolatility * Math.sqrt(periodsPerYear);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate present value
|
||||
*/
|
||||
export function presentValue(futureValue: number, rate: number, periods: number): number {
|
||||
return futureValue / Math.pow(1 + rate, periods);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate future value
|
||||
*/
|
||||
export function futureValue(presentValue: number, rate: number, periods: number): number {
|
||||
return presentValue * Math.pow(1 + rate, periods);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate net present value of cash flows
|
||||
*/
|
||||
export function netPresentValue(cashFlows: number[], discountRate: number): number {
|
||||
return cashFlows.reduce((npv, cashFlow, index) => {
|
||||
return npv + cashFlow / Math.pow(1 + discountRate, index);
|
||||
}, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate internal rate of return (IRR) using Newton-Raphson method
|
||||
*/
|
||||
export function internalRateOfReturn(cashFlows: number[], guess: number = 0.1, maxIterations: number = 100): number {
|
||||
let rate = guess;
|
||||
|
||||
for (let i = 0; i < maxIterations; i++) {
|
||||
let npv = 0;
|
||||
let dnpv = 0;
|
||||
|
||||
for (let j = 0; j < cashFlows.length; j++) {
|
||||
npv += cashFlows[j] / Math.pow(1 + rate, j);
|
||||
dnpv += -j * cashFlows[j] / Math.pow(1 + rate, j + 1);
|
||||
}
|
||||
|
||||
if (Math.abs(npv) < 1e-10) break;
|
||||
if (Math.abs(dnpv) < 1e-10) break;
|
||||
|
||||
rate = rate - npv / dnpv;
|
||||
}
|
||||
|
||||
return rate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate payback period
|
||||
*/
|
||||
export function paybackPeriod(initialInvestment: number, cashFlows: number[]): number {
|
||||
let cumulativeCashFlow = 0;
|
||||
|
||||
for (let i = 0; i < cashFlows.length; i++) {
|
||||
cumulativeCashFlow += cashFlows[i];
|
||||
if (cumulativeCashFlow >= initialInvestment) {
|
||||
return i + 1 - (cumulativeCashFlow - initialInvestment) / cashFlows[i];
|
||||
}
|
||||
}
|
||||
|
||||
return -1; // Never pays back
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate compound interest
|
||||
*/
|
||||
export function compoundInterest(
|
||||
principal: number,
|
||||
rate: number,
|
||||
periods: number,
|
||||
compoundingFrequency: number = 1
|
||||
): number {
|
||||
return principal * Math.pow(1 + rate / compoundingFrequency, compoundingFrequency * periods);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate effective annual rate
|
||||
*/
|
||||
export function effectiveAnnualRate(nominalRate: number, compoundingFrequency: number): number {
|
||||
return Math.pow(1 + nominalRate / compoundingFrequency, compoundingFrequency) - 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate bond price given yield
|
||||
*/
|
||||
export function bondPrice(
|
||||
faceValue: number,
|
||||
couponRate: number,
|
||||
yieldToMaturity: number,
|
||||
periodsToMaturity: number,
|
||||
paymentsPerYear: number = 2
|
||||
): number {
|
||||
const couponPayment = (faceValue * couponRate) / paymentsPerYear;
|
||||
const discountRate = yieldToMaturity / paymentsPerYear;
|
||||
|
||||
let price = 0;
|
||||
|
||||
// Present value of coupon payments
|
||||
for (let i = 1; i <= periodsToMaturity; i++) {
|
||||
price += couponPayment / Math.pow(1 + discountRate, i);
|
||||
}
|
||||
|
||||
// Present value of face value
|
||||
price += faceValue / Math.pow(1 + discountRate, periodsToMaturity);
|
||||
|
||||
return price;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate bond yield given price (Newton-Raphson approximation)
|
||||
*/
|
||||
export function bondYield(
|
||||
price: number,
|
||||
faceValue: number,
|
||||
couponRate: number,
|
||||
periodsToMaturity: number,
|
||||
paymentsPerYear: number = 2,
|
||||
guess: number = 0.05
|
||||
): number {
|
||||
let yield_ = guess;
|
||||
const maxIterations = 100;
|
||||
const tolerance = 1e-8;
|
||||
|
||||
for (let i = 0; i < maxIterations; i++) {
|
||||
const calculatedPrice = bondPrice(faceValue, couponRate, yield_, periodsToMaturity, paymentsPerYear);
|
||||
const diff = calculatedPrice - price;
|
||||
|
||||
if (Math.abs(diff) < tolerance) break;
|
||||
|
||||
// Numerical derivative
|
||||
const delta = 0.0001;
|
||||
const priceUp = bondPrice(faceValue, couponRate, yield_ + delta, periodsToMaturity, paymentsPerYear);
|
||||
const derivative = (priceUp - calculatedPrice) / delta;
|
||||
|
||||
if (Math.abs(derivative) < tolerance) break;
|
||||
|
||||
yield_ = yield_ - diff / derivative;
|
||||
}
|
||||
|
||||
return yield_;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate duration (Macaulay duration)
|
||||
*/
|
||||
export function macaulayDuration(
|
||||
faceValue: number,
|
||||
couponRate: number,
|
||||
yieldToMaturity: number,
|
||||
periodsToMaturity: number,
|
||||
paymentsPerYear: number = 2
|
||||
): number {
|
||||
const couponPayment = (faceValue * couponRate) / paymentsPerYear;
|
||||
const discountRate = yieldToMaturity / paymentsPerYear;
|
||||
const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear);
|
||||
|
||||
let weightedTime = 0;
|
||||
|
||||
// Weighted time of coupon payments
|
||||
for (let i = 1; i <= periodsToMaturity; i++) {
|
||||
const presentValue = couponPayment / Math.pow(1 + discountRate, i);
|
||||
weightedTime += (i * presentValue) / bondPriceValue;
|
||||
}
|
||||
|
||||
// Weighted time of face value
|
||||
const faceValuePV = faceValue / Math.pow(1 + discountRate, periodsToMaturity);
|
||||
weightedTime += (periodsToMaturity * faceValuePV) / bondPriceValue;
|
||||
|
||||
return weightedTime / paymentsPerYear; // Convert to years
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate modified duration
|
||||
*/
|
||||
export function modifiedDuration(
|
||||
faceValue: number,
|
||||
couponRate: number,
|
||||
yieldToMaturity: number,
|
||||
periodsToMaturity: number,
|
||||
paymentsPerYear: number = 2
|
||||
): number {
|
||||
const macDuration = macaulayDuration(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear);
|
||||
return macDuration / (1 + yieldToMaturity / paymentsPerYear);
|
||||
}
|
||||
835
libs/utils/src/calculations/correlation-analysis.ts
Normal file
835
libs/utils/src/calculations/correlation-analysis.ts
Normal file
|
|
@ -0,0 +1,835 @@
|
|||
/**
|
||||
* Correlation Analysis Module
|
||||
*
|
||||
* Provides comprehensive correlation and covariance analysis tools for financial time series.
|
||||
* Includes correlation matrices, rolling correlations, regime-dependent correlations,
|
||||
* and advanced correlation modeling techniques.
|
||||
*/
|
||||
|
||||
export interface CorrelationResult {
|
||||
correlation: number;
|
||||
pValue: number;
|
||||
significance: boolean;
|
||||
confidenceInterval?: [number, number];
|
||||
}
|
||||
|
||||
export interface CorrelationMatrix {
|
||||
matrix: number[][];
|
||||
labels: string[];
|
||||
eigenvalues: number[];
|
||||
eigenvectors: number[][];
|
||||
conditionNumber: number;
|
||||
}
|
||||
|
||||
export interface RollingCorrelationResult {
|
||||
correlations: number[];
|
||||
timestamps: Date[];
|
||||
average: number;
|
||||
volatility: number;
|
||||
min: number;
|
||||
max: number;
|
||||
}
|
||||
|
||||
export interface CovarianceMatrix {
|
||||
matrix: number[][];
|
||||
labels: string[];
|
||||
volatilities: number[];
|
||||
correlations: number[][];
|
||||
eigenvalues: number[];
|
||||
determinant: number;
|
||||
}
|
||||
|
||||
export interface CorrelationBreakdown {
|
||||
linear: number;
|
||||
nonlinear: number;
|
||||
tail: number;
|
||||
rank: number;
|
||||
}
|
||||
|
||||
export interface DynamicCorrelationModel {
|
||||
parameters: number[];
|
||||
correlations: number[];
|
||||
logLikelihood: number;
|
||||
aic: number;
|
||||
bic: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Pearson correlation coefficient between two time series
|
||||
*/
|
||||
export function pearsonCorrelation(
|
||||
x: number[],
|
||||
y: number[]
|
||||
): CorrelationResult {
|
||||
if (x.length !== y.length || x.length < 2) {
|
||||
throw new Error('Arrays must have same length and at least 2 observations');
|
||||
}
|
||||
|
||||
const n = x.length;
|
||||
const sumX = x.reduce((a, b) => a + b, 0);
|
||||
const sumY = y.reduce((a, b) => a + b, 0);
|
||||
const sumXY = x.reduce((sum, xi, i) => sum + xi * y[i], 0);
|
||||
const sumX2 = x.reduce((sum, xi) => sum + xi * xi, 0);
|
||||
const sumY2 = y.reduce((sum, yi) => sum + yi * yi, 0);
|
||||
|
||||
const numerator = n * sumXY - sumX * sumY;
|
||||
const denominator = Math.sqrt((n * sumX2 - sumX * sumX) * (n * sumY2 - sumY * sumY));
|
||||
|
||||
const correlation = denominator === 0 ? 0 : numerator / denominator;
|
||||
|
||||
// Calculate statistical significance (t-test)
|
||||
const df = n - 2;
|
||||
const tStat = correlation * Math.sqrt(df / (1 - correlation * correlation));
|
||||
const pValue = 2 * (1 - studentTCDF(Math.abs(tStat), df));
|
||||
const significance = pValue < 0.05;
|
||||
|
||||
// Calculate confidence interval (Fisher transformation)
|
||||
const z = 0.5 * Math.log((1 + correlation) / (1 - correlation));
|
||||
const seZ = 1 / Math.sqrt(n - 3);
|
||||
const zLower = z - 1.96 * seZ;
|
||||
const zUpper = z + 1.96 * seZ;
|
||||
const confidenceInterval: [number, number] = [
|
||||
(Math.exp(2 * zLower) - 1) / (Math.exp(2 * zLower) + 1),
|
||||
(Math.exp(2 * zUpper) - 1) / (Math.exp(2 * zUpper) + 1)
|
||||
];
|
||||
|
||||
return {
|
||||
correlation,
|
||||
pValue,
|
||||
significance,
|
||||
confidenceInterval
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Spearman rank correlation coefficient
|
||||
*/
|
||||
export function spearmanCorrelation(x: number[], y: number[]): CorrelationResult {
|
||||
if (x.length !== y.length || x.length < 2) {
|
||||
throw new Error('Arrays must have same length and at least 2 observations');
|
||||
}
|
||||
|
||||
// Convert to ranks
|
||||
const xRanks = getRanks(x);
|
||||
const yRanks = getRanks(y);
|
||||
|
||||
return pearsonCorrelation(xRanks, yRanks);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Kendall's tau correlation coefficient
|
||||
*/
|
||||
export function kendallTau(x: number[], y: number[]): CorrelationResult {
|
||||
if (x.length !== y.length || x.length < 2) {
|
||||
throw new Error('Arrays must have same length and at least 2 observations');
|
||||
}
|
||||
|
||||
const n = x.length;
|
||||
let concordant = 0;
|
||||
let discordant = 0;
|
||||
|
||||
for (let i = 0; i < n - 1; i++) {
|
||||
for (let j = i + 1; j < n; j++) {
|
||||
const xDiff = x[i] - x[j];
|
||||
const yDiff = y[i] - y[j];
|
||||
|
||||
if (xDiff * yDiff > 0) {
|
||||
concordant++;
|
||||
} else if (xDiff * yDiff < 0) {
|
||||
discordant++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const correlation = (concordant - discordant) / (n * (n - 1) / 2);
|
||||
|
||||
// Approximate p-value for large samples
|
||||
const variance = (2 * (2 * n + 5)) / (9 * n * (n - 1));
|
||||
const z = correlation / Math.sqrt(variance);
|
||||
const pValue = 2 * (1 - normalCDF(Math.abs(z)));
|
||||
const significance = pValue < 0.05;
|
||||
|
||||
return {
|
||||
correlation,
|
||||
pValue,
|
||||
significance
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate correlation matrix for multiple time series
|
||||
*/
|
||||
export function correlationMatrix(
|
||||
data: number[][],
|
||||
labels: string[] = [],
|
||||
method: 'pearson' | 'spearman' | 'kendall' = 'pearson'
|
||||
): CorrelationMatrix {
|
||||
const n = data.length;
|
||||
|
||||
if (labels.length === 0) {
|
||||
labels = Array.from({ length: n }, (_, i) => `Series${i + 1}`);
|
||||
}
|
||||
|
||||
const matrix: number[][] = Array(n).fill(null).map(() => Array(n).fill(0));
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
for (let j = 0; j < n; j++) {
|
||||
if (i === j) {
|
||||
matrix[i][j] = 1;
|
||||
} else {
|
||||
let corrResult: CorrelationResult;
|
||||
switch (method) {
|
||||
case 'spearman':
|
||||
corrResult = spearmanCorrelation(data[i], data[j]);
|
||||
break;
|
||||
case 'kendall':
|
||||
corrResult = kendallTau(data[i], data[j]);
|
||||
break;
|
||||
default:
|
||||
corrResult = pearsonCorrelation(data[i], data[j]);
|
||||
}
|
||||
matrix[i][j] = corrResult.correlation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate eigenvalues and eigenvectors
|
||||
const { eigenvalues, eigenvectors } = eigenDecomposition(matrix);
|
||||
|
||||
// Calculate condition number
|
||||
const conditionNumber = Math.max(...eigenvalues) / Math.min(...eigenvalues.filter(x => x > 1e-10));
|
||||
|
||||
return {
|
||||
matrix,
|
||||
labels,
|
||||
eigenvalues,
|
||||
eigenvectors,
|
||||
conditionNumber
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate rolling correlation between two time series
|
||||
*/
|
||||
export function rollingCorrelation(
|
||||
x: number[],
|
||||
y: number[],
|
||||
window: number,
|
||||
timestamps?: Date[]
|
||||
): RollingCorrelationResult {
|
||||
if (x.length !== y.length || window > x.length) {
|
||||
throw new Error('Invalid input parameters');
|
||||
}
|
||||
|
||||
const correlations: number[] = [];
|
||||
const resultTimestamps: Date[] = [];
|
||||
|
||||
for (let i = window - 1; i < x.length; i++) {
|
||||
const xWindow = x.slice(i - window + 1, i + 1);
|
||||
const yWindow = y.slice(i - window + 1, i + 1);
|
||||
|
||||
const corr = pearsonCorrelation(xWindow, yWindow).correlation;
|
||||
correlations.push(corr);
|
||||
|
||||
if (timestamps) {
|
||||
resultTimestamps.push(timestamps[i]);
|
||||
} else {
|
||||
resultTimestamps.push(new Date(i));
|
||||
}
|
||||
}
|
||||
|
||||
const average = correlations.reduce((a, b) => a + b, 0) / correlations.length;
|
||||
const variance = correlations.reduce((sum, corr) => sum + Math.pow(corr - average, 2), 0) / correlations.length;
|
||||
const volatility = Math.sqrt(variance);
|
||||
const min = Math.min(...correlations);
|
||||
const max = Math.max(...correlations);
|
||||
|
||||
return {
|
||||
correlations,
|
||||
timestamps: resultTimestamps,
|
||||
average,
|
||||
volatility,
|
||||
min,
|
||||
max
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate covariance matrix
|
||||
*/
|
||||
export function covarianceMatrix(data: number[][], labels: string[] = []): CovarianceMatrix {
|
||||
const n = data.length;
|
||||
|
||||
if (labels.length === 0) {
|
||||
labels = Array.from({ length: n }, (_, i) => `Series${i + 1}`);
|
||||
}
|
||||
|
||||
// Calculate means
|
||||
const means = data.map(series => series.reduce((a, b) => a + b, 0) / series.length);
|
||||
|
||||
// Calculate covariance matrix
|
||||
const matrix: number[][] = Array(n).fill(null).map(() => Array(n).fill(0));
|
||||
const m = data[0].length; // Number of observations
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
for (let j = 0; j < n; j++) {
|
||||
let covariance = 0;
|
||||
for (let k = 0; k < m; k++) {
|
||||
covariance += (data[i][k] - means[i]) * (data[j][k] - means[j]);
|
||||
}
|
||||
matrix[i][j] = covariance / (m - 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate volatilities (standard deviations)
|
||||
const volatilities = data.map((series, i) => Math.sqrt(matrix[i][i]));
|
||||
|
||||
// Calculate correlation matrix from covariance matrix
|
||||
const correlations: number[][] = Array(n).fill(null).map(() => Array(n).fill(0));
|
||||
for (let i = 0; i < n; i++) {
|
||||
for (let j = 0; j < n; j++) {
|
||||
correlations[i][j] = matrix[i][j] / (volatilities[i] * volatilities[j]);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate eigenvalues
|
||||
const { eigenvalues } = eigenDecomposition(matrix);
|
||||
|
||||
// Calculate determinant
|
||||
const determinant = eigenvalues.reduce((prod, val) => prod * val, 1);
|
||||
|
||||
return {
|
||||
matrix,
|
||||
labels,
|
||||
volatilities,
|
||||
correlations,
|
||||
eigenvalues,
|
||||
determinant
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate partial correlation controlling for other variables
|
||||
*/
|
||||
export function partialCorrelation(
|
||||
x: number[],
|
||||
y: number[],
|
||||
controls: number[][]
|
||||
): CorrelationResult {
|
||||
// Use matrix operations to calculate partial correlation
|
||||
const n = x.length;
|
||||
const k = controls.length;
|
||||
|
||||
// Build design matrix
|
||||
const X = Array(n).fill(null).map(() => Array(k + 1).fill(1));
|
||||
for (let i = 0; i < n; i++) {
|
||||
for (let j = 0; j < k; j++) {
|
||||
X[i][j + 1] = controls[j][i];
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate residuals for x and y after regressing on controls
|
||||
const xResiduals = calculateResiduals(x, X);
|
||||
const yResiduals = calculateResiduals(y, X);
|
||||
|
||||
return pearsonCorrelation(xResiduals, yResiduals);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test for correlation regime changes
|
||||
*/
|
||||
export function correlationRegimeAnalysis(
|
||||
x: number[],
|
||||
y: number[],
|
||||
window: number = 60
|
||||
): {
|
||||
regimes: { start: number; end: number; correlation: number }[];
|
||||
breakpoints: number[];
|
||||
stability: number;
|
||||
} {
|
||||
const rollingCorr = rollingCorrelation(x, y, window);
|
||||
const correlations = rollingCorr.correlations;
|
||||
|
||||
// Detect regime changes using CUSUM test
|
||||
const breakpoints: number[] = [];
|
||||
const threshold = 2.0; // CUSUM threshold
|
||||
|
||||
let cusum = 0;
|
||||
const mean = correlations.reduce((a, b) => a + b, 0) / correlations.length;
|
||||
|
||||
for (let i = 1; i < correlations.length; i++) {
|
||||
cusum += correlations[i] - mean;
|
||||
if (Math.abs(cusum) > threshold) {
|
||||
breakpoints.push(i);
|
||||
cusum = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Build regimes
|
||||
const regimes: { start: number; end: number; correlation: number }[] = [];
|
||||
let start = 0;
|
||||
|
||||
for (const breakpoint of breakpoints) {
|
||||
const regimeCorr = correlations.slice(start, breakpoint);
|
||||
const avgCorr = regimeCorr.reduce((a, b) => a + b, 0) / regimeCorr.length;
|
||||
|
||||
regimes.push({
|
||||
start,
|
||||
end: breakpoint,
|
||||
correlation: avgCorr
|
||||
});
|
||||
start = breakpoint;
|
||||
}
|
||||
|
||||
// Add final regime
|
||||
if (start < correlations.length) {
|
||||
const regimeCorr = correlations.slice(start);
|
||||
const avgCorr = regimeCorr.reduce((a, b) => a + b, 0) / regimeCorr.length;
|
||||
|
||||
regimes.push({
|
||||
start,
|
||||
end: correlations.length,
|
||||
correlation: avgCorr
|
||||
});
|
||||
}
|
||||
|
||||
// Calculate stability measure
|
||||
const regimeVariances = regimes.map(regime => {
|
||||
const regimeCorr = correlations.slice(regime.start, regime.end);
|
||||
const mean = regime.correlation;
|
||||
return regimeCorr.reduce((sum, corr) => sum + Math.pow(corr - mean, 2), 0) / regimeCorr.length;
|
||||
});
|
||||
|
||||
const stability = 1 / (1 + regimeVariances.reduce((a, b) => a + b, 0) / regimeVariances.length);
|
||||
|
||||
return {
|
||||
regimes,
|
||||
breakpoints,
|
||||
stability
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate tail correlation using copula methods
|
||||
*/
|
||||
export function tailCorrelation(
|
||||
x: number[],
|
||||
y: number[],
|
||||
threshold: number = 0.05
|
||||
): {
|
||||
upperTail: number;
|
||||
lowerTail: number;
|
||||
symmetric: boolean;
|
||||
} {
|
||||
const n = x.length;
|
||||
const upperThreshold = 1 - threshold;
|
||||
const lowerThreshold = threshold;
|
||||
|
||||
// Convert to uniform marginals
|
||||
const xRanks = getRanks(x).map(rank => rank / n);
|
||||
const yRanks = getRanks(y).map(rank => rank / n);
|
||||
|
||||
// Upper tail correlation
|
||||
let upperCount = 0;
|
||||
let upperTotal = 0;
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
if (xRanks[i] > upperThreshold) {
|
||||
upperTotal++;
|
||||
if (yRanks[i] > upperThreshold) {
|
||||
upperCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const upperTail = upperTotal > 0 ? upperCount / upperTotal : 0;
|
||||
|
||||
// Lower tail correlation
|
||||
let lowerCount = 0;
|
||||
let lowerTotal = 0;
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
if (xRanks[i] < lowerThreshold) {
|
||||
lowerTotal++;
|
||||
if (yRanks[i] < lowerThreshold) {
|
||||
lowerCount++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const lowerTail = lowerTotal > 0 ? lowerCount / lowerTotal : 0;
|
||||
|
||||
// Test for symmetry
|
||||
const symmetric = Math.abs(upperTail - lowerTail) < 0.1;
|
||||
|
||||
return {
|
||||
upperTail,
|
||||
lowerTail,
|
||||
symmetric
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Dynamic Conditional Correlation (DCC) model estimation
|
||||
*/
|
||||
export function dccModel(
|
||||
data: number[][],
|
||||
maxIter: number = 100,
|
||||
tolerance: number = 1e-6
|
||||
): DynamicCorrelationModel {
|
||||
const n = data.length;
|
||||
const T = data[0].length;
|
||||
|
||||
// Initialize parameters [alpha, beta]
|
||||
let params = [0.01, 0.95];
|
||||
|
||||
// Standardize data (assume unit variance for simplicity)
|
||||
const standardizedData = data.map(series => {
|
||||
const mean = series.reduce((a, b) => a + b, 0) / series.length;
|
||||
const variance = series.reduce((sum, x) => sum + Math.pow(x - mean, 2), 0) / (series.length - 1);
|
||||
const std = Math.sqrt(variance);
|
||||
return series.map(x => (x - mean) / std);
|
||||
});
|
||||
|
||||
let correlations: number[] = [];
|
||||
let logLikelihood = -Infinity;
|
||||
|
||||
for (let iter = 0; iter < maxIter; iter++) {
|
||||
const [alpha, beta] = params;
|
||||
|
||||
// Calculate dynamic correlations
|
||||
correlations = [];
|
||||
|
||||
// Initialize with unconditional correlation
|
||||
const unconditionalCorr = pearsonCorrelation(standardizedData[0], standardizedData[1]).correlation;
|
||||
let Qt = unconditionalCorr;
|
||||
|
||||
let newLogLikelihood = 0;
|
||||
|
||||
for (let t = 1; t < T; t++) {
|
||||
// Update correlation
|
||||
const prevShock = standardizedData[0][t-1] * standardizedData[1][t-1];
|
||||
Qt = (1 - alpha - beta) * unconditionalCorr + alpha * prevShock + beta * Qt;
|
||||
|
||||
correlations.push(Qt);
|
||||
|
||||
// Add to log-likelihood
|
||||
const det = 1 - Qt * Qt;
|
||||
if (det > 0) {
|
||||
newLogLikelihood -= 0.5 * Math.log(det);
|
||||
newLogLikelihood -= 0.5 * (
|
||||
Math.pow(standardizedData[0][t], 2) +
|
||||
Math.pow(standardizedData[1][t], 2) -
|
||||
2 * Qt * standardizedData[0][t] * standardizedData[1][t]
|
||||
) / det;
|
||||
}
|
||||
}
|
||||
|
||||
// Check convergence
|
||||
if (Math.abs(newLogLikelihood - logLikelihood) < tolerance) {
|
||||
break;
|
||||
}
|
||||
|
||||
logLikelihood = newLogLikelihood;
|
||||
|
||||
// Simple gradient update (in practice, use more sophisticated optimization)
|
||||
params[0] = Math.max(0.001, Math.min(0.999, params[0] + 0.001));
|
||||
params[1] = Math.max(0.001, Math.min(0.999 - params[0], params[1] + 0.001));
|
||||
}
|
||||
|
||||
// Calculate information criteria
|
||||
const k = 2; // Number of parameters
|
||||
const aic = -2 * logLikelihood + 2 * k;
|
||||
const bic = -2 * logLikelihood + k * Math.log(T);
|
||||
|
||||
return {
|
||||
parameters: params,
|
||||
correlations,
|
||||
logLikelihood,
|
||||
aic,
|
||||
bic
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Test for Granger causality in correlations
|
||||
*/
|
||||
export function grangerCausalityTest(
|
||||
x: number[],
|
||||
y: number[],
|
||||
maxLag: number = 5
|
||||
): {
|
||||
xCausesY: { fStatistic: number; pValue: number; significant: boolean };
|
||||
yCausesX: { fStatistic: number; pValue: number; significant: boolean };
|
||||
optimalLag: number;
|
||||
} {
|
||||
let bestLag = 1;
|
||||
let minAIC = Infinity;
|
||||
|
||||
// Find optimal lag
|
||||
for (let lag = 1; lag <= maxLag; lag++) {
|
||||
const aic = calculateVARModel(x, y, lag).aic;
|
||||
if (aic < minAIC) {
|
||||
minAIC = aic;
|
||||
bestLag = lag;
|
||||
}
|
||||
}
|
||||
|
||||
// Test x -> y causality
|
||||
const fullModel = calculateVARModel(x, y, bestLag);
|
||||
const restrictedModelY = calculateARModel(y, bestLag);
|
||||
|
||||
const fStatX = ((restrictedModelY.rss - fullModel.rssY) / bestLag) / (fullModel.rssY / (x.length - 2 * bestLag - 1));
|
||||
const pValueX = 1 - fCDF(fStatX, bestLag, x.length - 2 * bestLag - 1);
|
||||
|
||||
// Test y -> x causality
|
||||
const restrictedModelX = calculateARModel(x, bestLag);
|
||||
|
||||
const fStatY = ((restrictedModelX.rss - fullModel.rssX) / bestLag) / (fullModel.rssX / (x.length - 2 * bestLag - 1));
|
||||
const pValueY = 1 - fCDF(fStatY, bestLag, x.length - 2 * bestLag - 1);
|
||||
|
||||
return {
|
||||
xCausesY: {
|
||||
fStatistic: fStatX,
|
||||
pValue: pValueX,
|
||||
significant: pValueX < 0.05
|
||||
},
|
||||
yCausesX: {
|
||||
fStatistic: fStatY,
|
||||
pValue: pValueY,
|
||||
significant: pValueY < 0.05
|
||||
},
|
||||
optimalLag: bestLag
|
||||
};
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
|
||||
function getRanks(arr: number[]): number[] {
|
||||
const sorted = arr.map((val, idx) => ({ val, idx })).sort((a, b) => a.val - b.val);
|
||||
const ranks = new Array(arr.length);
|
||||
|
||||
for (let i = 0; i < sorted.length; i++) {
|
||||
ranks[sorted[i].idx] = i + 1;
|
||||
}
|
||||
|
||||
return ranks;
|
||||
}
|
||||
|
||||
function studentTCDF(t: number, df: number): number {
|
||||
// Approximation for Student's t CDF
|
||||
const x = df / (t * t + df);
|
||||
return 1 - 0.5 * betaIncomplete(df / 2, 0.5, x);
|
||||
}
|
||||
|
||||
function normalCDF(z: number): number {
|
||||
return 0.5 * (1 + erf(z / Math.sqrt(2)));
|
||||
}
|
||||
|
||||
function erf(x: number): number {
|
||||
// Approximation of error function
|
||||
const a1 = 0.254829592;
|
||||
const a2 = -0.284496736;
|
||||
const a3 = 1.421413741;
|
||||
const a4 = -1.453152027;
|
||||
const a5 = 1.061405429;
|
||||
const p = 0.3275911;
|
||||
|
||||
const sign = x >= 0 ? 1 : -1;
|
||||
x = Math.abs(x);
|
||||
|
||||
const t = 1.0 / (1.0 + p * x);
|
||||
const y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x);
|
||||
|
||||
return sign * y;
|
||||
}
|
||||
|
||||
function betaIncomplete(a: number, b: number, x: number): number {
|
||||
// Simplified beta incomplete function
|
||||
if (x === 0) return 0;
|
||||
if (x === 1) return 1;
|
||||
|
||||
// Use continued fraction approximation
|
||||
let result = 0;
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const term = Math.pow(x, a + i) * Math.pow(1 - x, b) / (a + i);
|
||||
result += term;
|
||||
if (Math.abs(term) < 1e-10) break;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function eigenDecomposition(matrix: number[][]): { eigenvalues: number[]; eigenvectors: number[][] } {
|
||||
// Simplified eigenvalue decomposition (for symmetric matrices)
|
||||
const n = matrix.length;
|
||||
|
||||
// Power iteration for largest eigenvalue
|
||||
const eigenvalues: number[] = [];
|
||||
const eigenvectors: number[][] = [];
|
||||
|
||||
for (let k = 0; k < Math.min(n, 3); k++) { // Calculate first 3 eigenvalues
|
||||
let v = Array(n).fill(1 / Math.sqrt(n));
|
||||
let lambda = 0;
|
||||
|
||||
for (let iter = 0; iter < 100; iter++) {
|
||||
const Av = matrix.map(row => row.reduce((sum, val, i) => sum + val * v[i], 0));
|
||||
const newLambda = Av.reduce((sum, val, i) => sum + val * v[i], 0);
|
||||
const norm = Math.sqrt(Av.reduce((sum, val) => sum + val * val, 0));
|
||||
|
||||
if (norm === 0) break;
|
||||
|
||||
v = Av.map(val => val / norm);
|
||||
|
||||
if (Math.abs(newLambda - lambda) < 1e-10) break;
|
||||
lambda = newLambda;
|
||||
}
|
||||
|
||||
eigenvalues.push(lambda);
|
||||
eigenvectors.push([...v]);
|
||||
|
||||
// Deflate matrix for next eigenvalue
|
||||
for (let i = 0; i < n; i++) {
|
||||
for (let j = 0; j < n; j++) {
|
||||
matrix[i][j] -= lambda * v[i] * v[j];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { eigenvalues, eigenvectors };
|
||||
}
|
||||
|
||||
function calculateResiduals(y: number[], X: number[][]): number[] {
|
||||
// Simple linear regression to calculate residuals
|
||||
const n = y.length;
|
||||
const k = X[0].length;
|
||||
|
||||
// Calculate (X'X)^-1 X' y
|
||||
const XtX = Array(k).fill(null).map(() => Array(k).fill(0));
|
||||
const Xty = Array(k).fill(0);
|
||||
|
||||
// X'X
|
||||
for (let i = 0; i < k; i++) {
|
||||
for (let j = 0; j < k; j++) {
|
||||
for (let t = 0; t < n; t++) {
|
||||
XtX[i][j] += X[t][i] * X[t][j];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// X'y
|
||||
for (let i = 0; i < k; i++) {
|
||||
for (let t = 0; t < n; t++) {
|
||||
Xty[i] += X[t][i] * y[t];
|
||||
}
|
||||
}
|
||||
|
||||
// Solve for beta (simplified - assumes invertible)
|
||||
const beta = solveLinearSystem(XtX, Xty);
|
||||
|
||||
// Calculate residuals
|
||||
const residuals: number[] = [];
|
||||
for (let t = 0; t < n; t++) {
|
||||
let fitted = 0;
|
||||
for (let i = 0; i < k; i++) {
|
||||
fitted += X[t][i] * beta[i];
|
||||
}
|
||||
residuals.push(y[t] - fitted);
|
||||
}
|
||||
|
||||
return residuals;
|
||||
}
|
||||
|
||||
function solveLinearSystem(A: number[][], b: number[]): number[] {
|
||||
// Gaussian elimination (simplified)
|
||||
const n = A.length;
|
||||
const augmented = A.map((row, i) => [...row, b[i]]);
|
||||
|
||||
// Forward elimination
|
||||
for (let i = 0; i < n; i++) {
|
||||
for (let j = i + 1; j < n; j++) {
|
||||
const factor = augmented[j][i] / augmented[i][i];
|
||||
for (let k = i; k <= n; k++) {
|
||||
augmented[j][k] -= factor * augmented[i][k];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Back substitution
|
||||
const x = Array(n).fill(0);
|
||||
for (let i = n - 1; i >= 0; i--) {
|
||||
x[i] = augmented[i][n];
|
||||
for (let j = i + 1; j < n; j++) {
|
||||
x[i] -= augmented[i][j] * x[j];
|
||||
}
|
||||
x[i] /= augmented[i][i];
|
||||
}
|
||||
|
||||
return x;
|
||||
}
|
||||
|
||||
function calculateVARModel(x: number[], y: number[], lag: number): {
|
||||
rssX: number;
|
||||
rssY: number;
|
||||
aic: number;
|
||||
} {
|
||||
// Simplified VAR model calculation
|
||||
const n = x.length - lag;
|
||||
|
||||
// Build design matrix
|
||||
const X = Array(n).fill(null).map(() => Array(2 * lag + 1).fill(1));
|
||||
const yX = Array(n).fill(0);
|
||||
const yY = Array(n).fill(0);
|
||||
|
||||
for (let t = 0; t < n; t++) {
|
||||
yX[t] = x[t + lag];
|
||||
yY[t] = y[t + lag];
|
||||
|
||||
for (let l = 0; l < lag; l++) {
|
||||
X[t][1 + l] = x[t + lag - 1 - l];
|
||||
X[t][1 + lag + l] = y[t + lag - 1 - l];
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate residuals for both equations
|
||||
const residualsX = calculateResiduals(yX, X);
|
||||
const residualsY = calculateResiduals(yY, X);
|
||||
|
||||
const rssX = residualsX.reduce((sum, r) => sum + r * r, 0);
|
||||
const rssY = residualsY.reduce((sum, r) => sum + r * r, 0);
|
||||
|
||||
const k = 2 * lag + 1;
|
||||
const aic = n * Math.log(rssX + rssY) + 2 * k;
|
||||
|
||||
return { rssX, rssY, aic };
|
||||
}
|
||||
|
||||
function calculateARModel(y: number[], lag: number): { rss: number } {
|
||||
const n = y.length - lag;
|
||||
|
||||
// Build design matrix
|
||||
const X = Array(n).fill(null).map(() => Array(lag + 1).fill(1));
|
||||
const yVec = Array(n).fill(0);
|
||||
|
||||
for (let t = 0; t < n; t++) {
|
||||
yVec[t] = y[t + lag];
|
||||
|
||||
for (let l = 0; l < lag; l++) {
|
||||
X[t][1 + l] = y[t + lag - 1 - l];
|
||||
}
|
||||
}
|
||||
|
||||
const residuals = calculateResiduals(yVec, X);
|
||||
const rss = residuals.reduce((sum, r) => sum + r * r, 0);
|
||||
|
||||
return { rss };
|
||||
}
|
||||
|
||||
function fCDF(f: number, df1: number, df2: number): number {
|
||||
// Approximation for F distribution CDF
|
||||
if (f <= 0) return 0;
|
||||
if (f === Infinity) return 1;
|
||||
|
||||
const x = df2 / (df2 + df1 * f);
|
||||
return 1 - betaIncomplete(df2 / 2, df1 / 2, x);
|
||||
}
|
||||
72
libs/utils/src/calculations/index.ts
Normal file
72
libs/utils/src/calculations/index.ts
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
/**
|
||||
* Comprehensive Financial Calculations Library
|
||||
*
|
||||
* This module provides a complete set of financial calculations for trading and investment analysis.
|
||||
* Organized into logical categories for easy use and maintenance.
|
||||
*/
|
||||
|
||||
// Core interfaces for financial data
|
||||
export interface OHLCVData {
|
||||
open: number;
|
||||
high: number;
|
||||
low: number;
|
||||
close: number;
|
||||
volume: number;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
export interface PriceData {
|
||||
price: number;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
// Financial calculation result interfaces
|
||||
export interface PortfolioMetrics {
|
||||
totalValue: number;
|
||||
totalReturn: number;
|
||||
totalReturnPercent: number;
|
||||
dailyReturn: number;
|
||||
dailyReturnPercent: number;
|
||||
maxDrawdown: number;
|
||||
sharpeRatio: number;
|
||||
beta: number;
|
||||
alpha: number;
|
||||
volatility: number;
|
||||
}
|
||||
|
||||
export interface RiskMetrics {
|
||||
var95: number; // Value at Risk 95%
|
||||
var99: number; // Value at Risk 99%
|
||||
cvar95: number; // Conditional VaR 95%
|
||||
maxDrawdown: number;
|
||||
volatility: number;
|
||||
downside_deviation: number;
|
||||
calmar_ratio: number;
|
||||
sortino_ratio: number;
|
||||
}
|
||||
|
||||
export interface TechnicalIndicators {
|
||||
sma: number[];
|
||||
ema: number[];
|
||||
rsi: number[];
|
||||
macd: { macd: number[], signal: number[], histogram: number[] };
|
||||
bollinger: { upper: number[], middle: number[], lower: number[] };
|
||||
atr: number[];
|
||||
stochastic: { k: number[], d: number[] };
|
||||
williams_r: number[];
|
||||
cci: number[];
|
||||
momentum: number[];
|
||||
roc: number[];
|
||||
}
|
||||
|
||||
// Export interfaces from all modules
|
||||
export * from './basic-calculations';
|
||||
export * from './technical-indicators';
|
||||
export * from './risk-metrics';
|
||||
export * from './portfolio-analytics';
|
||||
export * from './options-pricing';
|
||||
export * from './position-sizing';
|
||||
export * from './performance-metrics';
|
||||
export * from './market-statistics';
|
||||
export * from './volatility-models';
|
||||
export * from './correlation-analysis';
|
||||
521
libs/utils/src/calculations/market-statistics.ts
Normal file
521
libs/utils/src/calculations/market-statistics.ts
Normal file
|
|
@ -0,0 +1,521 @@
|
|||
/**
|
||||
* Market Statistics and Microstructure Analysis
|
||||
* Tools for analyzing market behavior, liquidity, and trading patterns
|
||||
*/
|
||||
|
||||
// Local interface definition to avoid circular dependency
|
||||
interface OHLCVData {
|
||||
open: number;
|
||||
high: number;
|
||||
low: number;
|
||||
close: number;
|
||||
volume: number;
|
||||
timestamp: Date;
|
||||
}
|
||||
|
||||
export interface LiquidityMetrics {
|
||||
bidAskSpread: number;
|
||||
relativeSpread: number;
|
||||
effectiveSpread: number;
|
||||
priceImpact: number;
|
||||
marketDepth: number;
|
||||
turnoverRatio: number;
|
||||
volumeWeightedSpread: number;
|
||||
}
|
||||
|
||||
export interface MarketMicrostructure {
|
||||
tickSize: number;
|
||||
averageTradeSize: number;
|
||||
tradingFrequency: number;
|
||||
marketImpactCoefficient: number;
|
||||
informationShare: number;
|
||||
orderImbalance: number;
|
||||
}
|
||||
|
||||
export interface TradingSessionStats {
|
||||
openPrice: number;
|
||||
closePrice: number;
|
||||
highPrice: number;
|
||||
lowPrice: number;
|
||||
volume: number;
|
||||
vwap: number;
|
||||
numberOfTrades: number;
|
||||
averageTradeSize: number;
|
||||
volatility: number;
|
||||
}
|
||||
|
||||
export interface MarketRegime {
|
||||
regime: 'trending' | 'ranging' | 'volatile' | 'quiet';
|
||||
confidence: number;
|
||||
trendDirection?: 'up' | 'down';
|
||||
volatilityLevel: 'low' | 'medium' | 'high';
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Volume Weighted Average Price (VWAP)
|
||||
*/
|
||||
export function calculateVWAP(ohlcv: OHLCVData[]): number[] {
|
||||
if (ohlcv.length === 0) return [];
|
||||
|
||||
const vwap: number[] = [];
|
||||
let cumulativeVolumePrice = 0;
|
||||
let cumulativeVolume = 0;
|
||||
|
||||
for (const candle of ohlcv) {
|
||||
const typicalPrice = (candle.high + candle.low + candle.close) / 3;
|
||||
cumulativeVolumePrice += typicalPrice * candle.volume;
|
||||
cumulativeVolume += candle.volume;
|
||||
|
||||
vwap.push(cumulativeVolume > 0 ? cumulativeVolumePrice / cumulativeVolume : typicalPrice);
|
||||
}
|
||||
|
||||
return vwap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Time Weighted Average Price (TWAP)
|
||||
*/
|
||||
export function calculateTWAP(prices: number[], timeWeights?: number[]): number {
|
||||
if (prices.length === 0) return 0;
|
||||
|
||||
if (!timeWeights) {
|
||||
return prices.reduce((sum, price) => sum + price, 0) / prices.length;
|
||||
}
|
||||
|
||||
if (prices.length !== timeWeights.length) {
|
||||
throw new Error('Prices and time weights arrays must have the same length');
|
||||
}
|
||||
|
||||
const totalWeight = timeWeights.reduce((sum, weight) => sum + weight, 0);
|
||||
const weightedSum = prices.reduce((sum, price, index) => sum + price * timeWeights[index], 0);
|
||||
|
||||
return totalWeight > 0 ? weightedSum / totalWeight : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate market impact of trades
|
||||
*/
|
||||
export function calculateMarketImpact(
|
||||
trades: Array<{ price: number; volume: number; side: 'buy' | 'sell'; timestamp: Date }>,
|
||||
benchmarkPrice: number
|
||||
): {
|
||||
temporaryImpact: number;
|
||||
permanentImpact: number;
|
||||
totalImpact: number;
|
||||
priceImprovement: number;
|
||||
} {
|
||||
if (trades.length === 0) {
|
||||
return {
|
||||
temporaryImpact: 0,
|
||||
permanentImpact: 0,
|
||||
totalImpact: 0,
|
||||
priceImprovement: 0
|
||||
};
|
||||
}
|
||||
|
||||
const volumeWeightedPrice = trades.reduce((sum, trade) => sum + trade.price * trade.volume, 0) /
|
||||
trades.reduce((sum, trade) => sum + trade.volume, 0);
|
||||
|
||||
const totalImpact = (volumeWeightedPrice - benchmarkPrice) / benchmarkPrice;
|
||||
|
||||
// Simplified impact calculation
|
||||
const temporaryImpact = totalImpact * 0.6; // Temporary component
|
||||
const permanentImpact = totalImpact * 0.4; // Permanent component
|
||||
|
||||
const priceImprovement = trades.reduce((sum, trade) => {
|
||||
const improvement = trade.side === 'buy' ?
|
||||
Math.max(0, benchmarkPrice - trade.price) :
|
||||
Math.max(0, trade.price - benchmarkPrice);
|
||||
return sum + improvement * trade.volume;
|
||||
}, 0) / trades.reduce((sum, trade) => sum + trade.volume, 0);
|
||||
|
||||
return {
|
||||
temporaryImpact,
|
||||
permanentImpact,
|
||||
totalImpact,
|
||||
priceImprovement
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate liquidity metrics
|
||||
*/
|
||||
export function calculateLiquidityMetrics(
|
||||
ohlcv: OHLCVData[],
|
||||
bidPrices: number[],
|
||||
askPrices: number[],
|
||||
bidSizes: number[],
|
||||
askSizes: number[]
|
||||
): LiquidityMetrics {
|
||||
if (ohlcv.length === 0 || bidPrices.length === 0) {
|
||||
return {
|
||||
bidAskSpread: 0,
|
||||
relativeSpread: 0,
|
||||
effectiveSpread: 0,
|
||||
priceImpact: 0,
|
||||
marketDepth: 0,
|
||||
turnoverRatio: 0,
|
||||
volumeWeightedSpread: 0
|
||||
};
|
||||
}
|
||||
|
||||
// Average bid-ask spread
|
||||
const spreads = bidPrices.map((bid, index) => askPrices[index] - bid);
|
||||
const bidAskSpread = spreads.reduce((sum, spread) => sum + spread, 0) / spreads.length;
|
||||
|
||||
// Relative spread
|
||||
const midPrices = bidPrices.map((bid, index) => (bid + askPrices[index]) / 2);
|
||||
const averageMidPrice = midPrices.reduce((sum, mid) => sum + mid, 0) / midPrices.length;
|
||||
const relativeSpread = averageMidPrice > 0 ? bidAskSpread / averageMidPrice : 0;
|
||||
|
||||
// Market depth
|
||||
const averageBidSize = bidSizes.reduce((sum, size) => sum + size, 0) / bidSizes.length;
|
||||
const averageAskSize = askSizes.reduce((sum, size) => sum + size, 0) / askSizes.length;
|
||||
const marketDepth = (averageBidSize + averageAskSize) / 2;
|
||||
|
||||
// Turnover ratio
|
||||
const averageVolume = ohlcv.reduce((sum, candle) => sum + candle.volume, 0) / ohlcv.length;
|
||||
const averagePrice = ohlcv.reduce((sum, candle) => sum + candle.close, 0) / ohlcv.length;
|
||||
const marketCap = averagePrice * 1000000; // Simplified market cap
|
||||
const turnoverRatio = marketCap > 0 ? (averageVolume * averagePrice) / marketCap : 0;
|
||||
|
||||
return {
|
||||
bidAskSpread,
|
||||
relativeSpread: relativeSpread * 100, // Convert to percentage
|
||||
effectiveSpread: bidAskSpread * 0.8, // Simplified effective spread
|
||||
priceImpact: relativeSpread * 2, // Simplified price impact
|
||||
marketDepth,
|
||||
turnoverRatio: turnoverRatio * 100, // Convert to percentage
|
||||
volumeWeightedSpread: bidAskSpread // Simplified
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Identify market regimes
|
||||
*/
|
||||
export function identifyMarketRegime(
|
||||
ohlcv: OHLCVData[],
|
||||
lookbackPeriod: number = 20
|
||||
): MarketRegime {
|
||||
if (ohlcv.length < lookbackPeriod) {
|
||||
return {
|
||||
regime: 'quiet',
|
||||
confidence: 0,
|
||||
volatilityLevel: 'low'
|
||||
};
|
||||
}
|
||||
|
||||
const recentData = ohlcv.slice(-lookbackPeriod);
|
||||
const prices = recentData.map(candle => candle.close);
|
||||
const volumes = recentData.map(candle => candle.volume);
|
||||
|
||||
// Calculate returns and volatility
|
||||
const returns = [];
|
||||
for (let i = 1; i < prices.length; i++) {
|
||||
returns.push((prices[i] - prices[i - 1]) / prices[i - 1]);
|
||||
}
|
||||
|
||||
const volatility = calculateVolatility(returns);
|
||||
const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length;
|
||||
|
||||
// Trend analysis
|
||||
const firstPrice = prices[0];
|
||||
const lastPrice = prices[prices.length - 1];
|
||||
const trendStrength = Math.abs((lastPrice - firstPrice) / firstPrice);
|
||||
|
||||
// Determine volatility level
|
||||
let volatilityLevel: 'low' | 'medium' | 'high';
|
||||
if (volatility < 0.01) volatilityLevel = 'low';
|
||||
else if (volatility < 0.03) volatilityLevel = 'medium';
|
||||
else volatilityLevel = 'high';
|
||||
|
||||
// Determine regime
|
||||
let regime: 'trending' | 'ranging' | 'volatile' | 'quiet';
|
||||
let confidence = 0;
|
||||
let trendDirection: 'up' | 'down' | undefined;
|
||||
|
||||
if (volatility < 0.005) {
|
||||
regime = 'quiet';
|
||||
confidence = 0.8;
|
||||
} else if (volatility > 0.04) {
|
||||
regime = 'volatile';
|
||||
confidence = 0.7;
|
||||
} else if (trendStrength > 0.05) {
|
||||
regime = 'trending';
|
||||
trendDirection = lastPrice > firstPrice ? 'up' : 'down';
|
||||
confidence = Math.min(0.9, trendStrength * 10);
|
||||
} else {
|
||||
regime = 'ranging';
|
||||
confidence = 0.6;
|
||||
}
|
||||
|
||||
return {
|
||||
regime,
|
||||
confidence,
|
||||
trendDirection,
|
||||
volatilityLevel
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate order book imbalance
|
||||
*/
|
||||
export function calculateOrderBookImbalance(
|
||||
bidPrices: number[],
|
||||
askPrices: number[],
|
||||
bidSizes: number[],
|
||||
askSizes: number[],
|
||||
levels: number = 5
|
||||
): number {
|
||||
const levelsToAnalyze = Math.min(levels, bidPrices.length, askPrices.length);
|
||||
|
||||
let totalBidVolume = 0;
|
||||
let totalAskVolume = 0;
|
||||
|
||||
for (let i = 0; i < levelsToAnalyze; i++) {
|
||||
totalBidVolume += bidSizes[i];
|
||||
totalAskVolume += askSizes[i];
|
||||
}
|
||||
|
||||
const totalVolume = totalBidVolume + totalAskVolume;
|
||||
|
||||
if (totalVolume === 0) return 0;
|
||||
|
||||
return (totalBidVolume - totalAskVolume) / totalVolume;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate intraday patterns
|
||||
*/
|
||||
export function calculateIntradayPatterns(
|
||||
ohlcv: OHLCVData[]
|
||||
): {
|
||||
hourlyReturns: { [hour: number]: number };
|
||||
hourlyVolatility: { [hour: number]: number };
|
||||
hourlyVolume: { [hour: number]: number };
|
||||
openingGap: number;
|
||||
closingDrift: number;
|
||||
} {
|
||||
const hourlyData: { [hour: number]: { returns: number[]; volumes: number[] } } = {};
|
||||
|
||||
// Initialize hourly buckets
|
||||
for (let hour = 0; hour < 24; hour++) {
|
||||
hourlyData[hour] = { returns: [], volumes: [] };
|
||||
}
|
||||
|
||||
// Aggregate data by hour
|
||||
for (let i = 1; i < ohlcv.length; i++) {
|
||||
const hour = ohlcv[i].timestamp.getHours();
|
||||
const return_ = (ohlcv[i].close - ohlcv[i - 1].close) / ohlcv[i - 1].close;
|
||||
|
||||
hourlyData[hour].returns.push(return_);
|
||||
hourlyData[hour].volumes.push(ohlcv[i].volume);
|
||||
}
|
||||
|
||||
// Calculate statistics for each hour
|
||||
const hourlyReturns: { [hour: number]: number } = {};
|
||||
const hourlyVolatility: { [hour: number]: number } = {};
|
||||
const hourlyVolume: { [hour: number]: number } = {};
|
||||
|
||||
for (let hour = 0; hour < 24; hour++) {
|
||||
const data = hourlyData[hour];
|
||||
|
||||
hourlyReturns[hour] = data.returns.length > 0 ?
|
||||
data.returns.reduce((sum, ret) => sum + ret, 0) / data.returns.length : 0;
|
||||
|
||||
hourlyVolatility[hour] = calculateVolatility(data.returns);
|
||||
|
||||
hourlyVolume[hour] = data.volumes.length > 0 ?
|
||||
data.volumes.reduce((sum, vol) => sum + vol, 0) / data.volumes.length : 0;
|
||||
}
|
||||
|
||||
// Calculate opening gap and closing drift
|
||||
const openingGap = ohlcv.length > 1 ?
|
||||
(ohlcv[0].open - ohlcv[0].close) / ohlcv[0].close : 0;
|
||||
|
||||
const lastCandle = ohlcv[ohlcv.length - 1];
|
||||
const closingDrift = (lastCandle.close - lastCandle.open) / lastCandle.open;
|
||||
|
||||
return {
|
||||
hourlyReturns,
|
||||
hourlyVolatility,
|
||||
hourlyVolume,
|
||||
openingGap,
|
||||
closingDrift
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate price discovery metrics
|
||||
*/
|
||||
export function calculatePriceDiscovery(
|
||||
prices1: number[], // Prices from market 1
|
||||
prices2: number[] // Prices from market 2
|
||||
): {
|
||||
informationShare1: number;
|
||||
informationShare2: number;
|
||||
priceLeadLag: number; // Positive if market 1 leads
|
||||
cointegrationStrength: number;
|
||||
} {
|
||||
if (prices1.length !== prices2.length || prices1.length < 2) {
|
||||
return {
|
||||
informationShare1: 0.5,
|
||||
informationShare2: 0.5,
|
||||
priceLeadLag: 0,
|
||||
cointegrationStrength: 0
|
||||
};
|
||||
}
|
||||
|
||||
// Calculate returns
|
||||
const returns1 = [];
|
||||
const returns2 = [];
|
||||
|
||||
for (let i = 1; i < prices1.length; i++) {
|
||||
returns1.push((prices1[i] - prices1[i - 1]) / prices1[i - 1]);
|
||||
returns2.push((prices2[i] - prices2[i - 1]) / prices2[i - 1]);
|
||||
}
|
||||
|
||||
// Calculate correlations with lags
|
||||
const correlation0 = calculateCorrelation(returns1, returns2);
|
||||
const correlation1 = returns1.length > 1 ?
|
||||
calculateCorrelation(returns1.slice(1), returns2.slice(0, -1)) : 0;
|
||||
const correlationMinus1 = returns1.length > 1 ?
|
||||
calculateCorrelation(returns1.slice(0, -1), returns2.slice(1)) : 0;
|
||||
|
||||
// Price lead-lag (simplified)
|
||||
const priceLeadLag = correlation1 - correlationMinus1;
|
||||
|
||||
// Information shares (simplified Hasbrouck methodology)
|
||||
const variance1 = calculateVariance(returns1);
|
||||
const variance2 = calculateVariance(returns2);
|
||||
const covariance = calculateCovariance(returns1, returns2);
|
||||
|
||||
const totalVariance = variance1 + variance2 + 2 * covariance;
|
||||
const informationShare1 = totalVariance > 0 ? (variance1 + covariance) / totalVariance : 0.5;
|
||||
const informationShare2 = 1 - informationShare1;
|
||||
|
||||
// Cointegration strength (simplified)
|
||||
const cointegrationStrength = Math.abs(correlation0);
|
||||
|
||||
return {
|
||||
informationShare1,
|
||||
informationShare2,
|
||||
priceLeadLag,
|
||||
cointegrationStrength
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate market stress indicators
|
||||
*/
|
||||
export function calculateMarketStress(
|
||||
ohlcv: OHLCVData[],
|
||||
lookbackPeriod: number = 20
|
||||
): {
|
||||
stressLevel: 'low' | 'medium' | 'high' | 'extreme';
|
||||
volatilityStress: number;
|
||||
liquidityStress: number;
|
||||
correlationStress: number;
|
||||
overallStress: number;
|
||||
} {
|
||||
if (ohlcv.length < lookbackPeriod) {
|
||||
return {
|
||||
stressLevel: 'low',
|
||||
volatilityStress: 0,
|
||||
liquidityStress: 0,
|
||||
correlationStress: 0,
|
||||
overallStress: 0
|
||||
};
|
||||
}
|
||||
|
||||
const recentData = ohlcv.slice(-lookbackPeriod);
|
||||
const returns = [];
|
||||
const volumes = [];
|
||||
|
||||
for (let i = 1; i < recentData.length; i++) {
|
||||
returns.push((recentData[i].close - recentData[i - 1].close) / recentData[i - 1].close);
|
||||
volumes.push(recentData[i].volume);
|
||||
}
|
||||
|
||||
// Volatility stress
|
||||
const volatility = calculateVolatility(returns);
|
||||
const volatilityStress = Math.min(1, volatility / 0.05); // Normalize to 5% daily vol
|
||||
|
||||
// Liquidity stress (volume-based)
|
||||
const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length;
|
||||
const volumeVariability = calculateVolatility(volumes.map(vol => vol / averageVolume));
|
||||
const liquidityStress = Math.min(1, volumeVariability);
|
||||
|
||||
// Correlation stress (simplified - would need multiple assets)
|
||||
const correlationStress = 0.3; // Placeholder
|
||||
|
||||
// Overall stress
|
||||
const overallStress = (volatilityStress * 0.4 + liquidityStress * 0.3 + correlationStress * 0.3);
|
||||
|
||||
let stressLevel: 'low' | 'medium' | 'high' | 'extreme';
|
||||
if (overallStress < 0.25) stressLevel = 'low';
|
||||
else if (overallStress < 0.5) stressLevel = 'medium';
|
||||
else if (overallStress < 0.75) stressLevel = 'high';
|
||||
else stressLevel = 'extreme';
|
||||
|
||||
return {
|
||||
stressLevel,
|
||||
volatilityStress,
|
||||
liquidityStress,
|
||||
correlationStress,
|
||||
overallStress
|
||||
};
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
|
||||
function calculateVolatility(returns: number[]): number {
|
||||
if (returns.length < 2) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
|
||||
|
||||
return Math.sqrt(variance);
|
||||
}
|
||||
|
||||
function calculateCorrelation(x: number[], y: number[]): number {
|
||||
if (x.length !== y.length || x.length < 2) return 0;
|
||||
|
||||
const n = x.length;
|
||||
const meanX = x.reduce((sum, val) => sum + val, 0) / n;
|
||||
const meanY = y.reduce((sum, val) => sum + val, 0) / n;
|
||||
|
||||
let numerator = 0;
|
||||
let sumXSquared = 0;
|
||||
let sumYSquared = 0;
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
const xDiff = x[i] - meanX;
|
||||
const yDiff = y[i] - meanY;
|
||||
|
||||
numerator += xDiff * yDiff;
|
||||
sumXSquared += xDiff * xDiff;
|
||||
sumYSquared += yDiff * yDiff;
|
||||
}
|
||||
|
||||
const denominator = Math.sqrt(sumXSquared * sumYSquared);
|
||||
|
||||
return denominator > 0 ? numerator / denominator : 0;
|
||||
}
|
||||
|
||||
function calculateVariance(values: number[]): number {
|
||||
if (values.length < 2) return 0;
|
||||
|
||||
const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
|
||||
return values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / (values.length - 1);
|
||||
}
|
||||
|
||||
function calculateCovariance(x: number[], y: number[]): number {
|
||||
if (x.length !== y.length || x.length < 2) return 0;
|
||||
|
||||
const n = x.length;
|
||||
const meanX = x.reduce((sum, val) => sum + val, 0) / n;
|
||||
const meanY = y.reduce((sum, val) => sum + val, 0) / n;
|
||||
|
||||
return x.reduce((sum, val, i) => sum + (val - meanX) * (y[i] - meanY), 0) / (n - 1);
|
||||
}
|
||||
504
libs/utils/src/calculations/options-pricing.ts
Normal file
504
libs/utils/src/calculations/options-pricing.ts
Normal file
|
|
@ -0,0 +1,504 @@
|
|||
/**
|
||||
* Options Pricing Models
|
||||
* Implementation of various options pricing models and Greeks calculations
|
||||
*/
|
||||
|
||||
export interface OptionParameters {
|
||||
spotPrice: number;
|
||||
strikePrice: number;
|
||||
timeToExpiry: number; // in years
|
||||
riskFreeRate: number;
|
||||
volatility: number;
|
||||
dividendYield?: number;
|
||||
}
|
||||
|
||||
export interface OptionPricing {
|
||||
callPrice: number;
|
||||
putPrice: number;
|
||||
intrinsicValueCall: number;
|
||||
intrinsicValuePut: number;
|
||||
timeValueCall: number;
|
||||
timeValuePut: number;
|
||||
}
|
||||
|
||||
export interface GreeksCalculation {
|
||||
delta: number;
|
||||
gamma: number;
|
||||
theta: number;
|
||||
vega: number;
|
||||
rho: number;
|
||||
}
|
||||
|
||||
export interface ImpliedVolatilityResult {
|
||||
impliedVolatility: number;
|
||||
iterations: number;
|
||||
converged: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Black-Scholes option pricing model
|
||||
*/
|
||||
export function blackScholes(params: OptionParameters): OptionPricing {
|
||||
const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params;
|
||||
|
||||
if (timeToExpiry <= 0) {
|
||||
const intrinsicValueCall = Math.max(spotPrice - strikePrice, 0);
|
||||
const intrinsicValuePut = Math.max(strikePrice - spotPrice, 0);
|
||||
|
||||
return {
|
||||
callPrice: intrinsicValueCall,
|
||||
putPrice: intrinsicValuePut,
|
||||
intrinsicValueCall,
|
||||
intrinsicValuePut,
|
||||
timeValueCall: 0,
|
||||
timeValuePut: 0
|
||||
};
|
||||
}
|
||||
|
||||
const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate - dividendYield + 0.5 * volatility * volatility) * timeToExpiry) /
|
||||
(volatility * Math.sqrt(timeToExpiry));
|
||||
const d2 = d1 - volatility * Math.sqrt(timeToExpiry);
|
||||
|
||||
const nd1 = normalCDF(d1);
|
||||
const nd2 = normalCDF(d2);
|
||||
const nMinusd1 = normalCDF(-d1);
|
||||
const nMinusd2 = normalCDF(-d2);
|
||||
|
||||
const callPrice = spotPrice * Math.exp(-dividendYield * timeToExpiry) * nd1 -
|
||||
strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * nd2;
|
||||
|
||||
const putPrice = strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * nMinusd2 -
|
||||
spotPrice * Math.exp(-dividendYield * timeToExpiry) * nMinusd1;
|
||||
|
||||
const intrinsicValueCall = Math.max(spotPrice - strikePrice, 0);
|
||||
const intrinsicValuePut = Math.max(strikePrice - spotPrice, 0);
|
||||
|
||||
const timeValueCall = callPrice - intrinsicValueCall;
|
||||
const timeValuePut = putPrice - intrinsicValuePut;
|
||||
|
||||
return {
|
||||
callPrice,
|
||||
putPrice,
|
||||
intrinsicValueCall,
|
||||
intrinsicValuePut,
|
||||
timeValueCall,
|
||||
timeValuePut
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate option Greeks using Black-Scholes model
|
||||
*/
|
||||
export function calculateGreeks(params: OptionParameters, optionType: 'call' | 'put' = 'call'): GreeksCalculation {
|
||||
const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params;
|
||||
|
||||
if (timeToExpiry <= 0) {
|
||||
return {
|
||||
delta: optionType === 'call' ? (spotPrice > strikePrice ? 1 : 0) : (spotPrice < strikePrice ? -1 : 0),
|
||||
gamma: 0,
|
||||
theta: 0,
|
||||
vega: 0,
|
||||
rho: 0
|
||||
};
|
||||
}
|
||||
|
||||
const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate - dividendYield + 0.5 * volatility * volatility) * timeToExpiry) /
|
||||
(volatility * Math.sqrt(timeToExpiry));
|
||||
const d2 = d1 - volatility * Math.sqrt(timeToExpiry);
|
||||
|
||||
const nd1 = normalCDF(d1);
|
||||
const nd2 = normalCDF(d2);
|
||||
const npd1 = normalPDF(d1);
|
||||
|
||||
// Delta
|
||||
const callDelta = Math.exp(-dividendYield * timeToExpiry) * nd1;
|
||||
const putDelta = Math.exp(-dividendYield * timeToExpiry) * (nd1 - 1);
|
||||
const delta = optionType === 'call' ? callDelta : putDelta;
|
||||
|
||||
// Gamma (same for calls and puts)
|
||||
const gamma = Math.exp(-dividendYield * timeToExpiry) * npd1 /
|
||||
(spotPrice * volatility * Math.sqrt(timeToExpiry));
|
||||
|
||||
// Theta
|
||||
const term1 = -(spotPrice * npd1 * volatility * Math.exp(-dividendYield * timeToExpiry)) /
|
||||
(2 * Math.sqrt(timeToExpiry));
|
||||
const term2Call = riskFreeRate * strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * nd2;
|
||||
const term2Put = -riskFreeRate * strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(-d2);
|
||||
const term3 = dividendYield * spotPrice * Math.exp(-dividendYield * timeToExpiry) *
|
||||
(optionType === 'call' ? nd1 : normalCDF(-d1));
|
||||
|
||||
const theta = optionType === 'call' ?
|
||||
(term1 - term2Call + term3) / 365 :
|
||||
(term1 + term2Put + term3) / 365;
|
||||
|
||||
// Vega (same for calls and puts)
|
||||
const vega = spotPrice * Math.exp(-dividendYield * timeToExpiry) * npd1 * Math.sqrt(timeToExpiry) / 100;
|
||||
|
||||
// Rho
|
||||
const callRho = strikePrice * timeToExpiry * Math.exp(-riskFreeRate * timeToExpiry) * nd2 / 100;
|
||||
const putRho = -strikePrice * timeToExpiry * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(-d2) / 100;
|
||||
const rho = optionType === 'call' ? callRho : putRho;
|
||||
|
||||
return {
|
||||
delta,
|
||||
gamma,
|
||||
theta,
|
||||
vega,
|
||||
rho
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate implied volatility using Newton-Raphson method
|
||||
*/
|
||||
export function calculateImpliedVolatility(
|
||||
marketPrice: number,
|
||||
spotPrice: number,
|
||||
strikePrice: number,
|
||||
timeToExpiry: number,
|
||||
riskFreeRate: number,
|
||||
optionType: 'call' | 'put' = 'call',
|
||||
dividendYield: number = 0,
|
||||
initialGuess: number = 0.2,
|
||||
tolerance: number = 1e-6,
|
||||
maxIterations: number = 100
|
||||
): ImpliedVolatilityResult {
|
||||
let volatility = initialGuess;
|
||||
let iterations = 0;
|
||||
let converged = false;
|
||||
|
||||
for (let i = 0; i < maxIterations; i++) {
|
||||
iterations = i + 1;
|
||||
|
||||
const params: OptionParameters = {
|
||||
spotPrice,
|
||||
strikePrice,
|
||||
timeToExpiry,
|
||||
riskFreeRate,
|
||||
volatility,
|
||||
dividendYield
|
||||
};
|
||||
|
||||
const pricing = blackScholes(params);
|
||||
const theoreticalPrice = optionType === 'call' ? pricing.callPrice : pricing.putPrice;
|
||||
|
||||
const priceDiff = theoreticalPrice - marketPrice;
|
||||
|
||||
if (Math.abs(priceDiff) < tolerance) {
|
||||
converged = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// Calculate vega for Newton-Raphson
|
||||
const greeks = calculateGreeks(params, optionType);
|
||||
const vega = greeks.vega * 100; // Convert back from percentage
|
||||
|
||||
if (Math.abs(vega) < 1e-10) {
|
||||
break; // Avoid division by zero
|
||||
}
|
||||
|
||||
volatility = volatility - priceDiff / vega;
|
||||
|
||||
// Keep volatility within reasonable bounds
|
||||
volatility = Math.max(0.001, Math.min(volatility, 10));
|
||||
}
|
||||
|
||||
return {
|
||||
impliedVolatility: volatility,
|
||||
iterations,
|
||||
converged
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Binomial option pricing model
|
||||
*/
|
||||
export function binomialOptionPricing(
|
||||
params: OptionParameters,
|
||||
optionType: 'call' | 'put' = 'call',
|
||||
americanStyle: boolean = false,
|
||||
steps: number = 100
|
||||
): OptionPricing {
|
||||
const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params;
|
||||
|
||||
const dt = timeToExpiry / steps;
|
||||
const u = Math.exp(volatility * Math.sqrt(dt));
|
||||
const d = 1 / u;
|
||||
const p = (Math.exp((riskFreeRate - dividendYield) * dt) - d) / (u - d);
|
||||
const discount = Math.exp(-riskFreeRate * dt);
|
||||
|
||||
// Create price tree
|
||||
const stockPrices: number[][] = [];
|
||||
for (let i = 0; i <= steps; i++) {
|
||||
stockPrices[i] = [];
|
||||
for (let j = 0; j <= i; j++) {
|
||||
stockPrices[i][j] = spotPrice * Math.pow(u, i - j) * Math.pow(d, j);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate option values at expiration
|
||||
const optionValues: number[][] = [];
|
||||
for (let i = 0; i <= steps; i++) {
|
||||
optionValues[i] = [];
|
||||
}
|
||||
|
||||
for (let j = 0; j <= steps; j++) {
|
||||
if (optionType === 'call') {
|
||||
optionValues[steps][j] = Math.max(stockPrices[steps][j] - strikePrice, 0);
|
||||
} else {
|
||||
optionValues[steps][j] = Math.max(strikePrice - stockPrices[steps][j], 0);
|
||||
}
|
||||
}
|
||||
|
||||
// Work backwards through the tree
|
||||
for (let i = steps - 1; i >= 0; i--) {
|
||||
for (let j = 0; j <= i; j++) {
|
||||
// European option value
|
||||
const holdValue = discount * (p * optionValues[i + 1][j] + (1 - p) * optionValues[i + 1][j + 1]);
|
||||
|
||||
if (americanStyle) {
|
||||
// American option - can exercise early
|
||||
const exerciseValue = optionType === 'call' ?
|
||||
Math.max(stockPrices[i][j] - strikePrice, 0) :
|
||||
Math.max(strikePrice - stockPrices[i][j], 0);
|
||||
|
||||
optionValues[i][j] = Math.max(holdValue, exerciseValue);
|
||||
} else {
|
||||
optionValues[i][j] = holdValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const price = optionValues[0][0];
|
||||
const intrinsicValue = optionType === 'call' ?
|
||||
Math.max(spotPrice - strikePrice, 0) :
|
||||
Math.max(strikePrice - spotPrice, 0);
|
||||
const timeValue = price - intrinsicValue;
|
||||
|
||||
if (optionType === 'call') {
|
||||
return {
|
||||
callPrice: price,
|
||||
putPrice: 0, // Not calculated
|
||||
intrinsicValueCall: intrinsicValue,
|
||||
intrinsicValuePut: 0,
|
||||
timeValueCall: timeValue,
|
||||
timeValuePut: 0
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
callPrice: 0, // Not calculated
|
||||
putPrice: price,
|
||||
intrinsicValueCall: 0,
|
||||
intrinsicValuePut: intrinsicValue,
|
||||
timeValueCall: 0,
|
||||
timeValuePut: timeValue
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Monte Carlo option pricing
|
||||
*/
|
||||
export function monteCarloOptionPricing(
|
||||
params: OptionParameters,
|
||||
optionType: 'call' | 'put' = 'call',
|
||||
numSimulations: number = 100000
|
||||
): OptionPricing {
|
||||
const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params;
|
||||
|
||||
let totalPayoff = 0;
|
||||
|
||||
for (let i = 0; i < numSimulations; i++) {
|
||||
// Generate random price path
|
||||
const z = boxMullerTransform();
|
||||
const finalPrice = spotPrice * Math.exp(
|
||||
(riskFreeRate - dividendYield - 0.5 * volatility * volatility) * timeToExpiry +
|
||||
volatility * Math.sqrt(timeToExpiry) * z
|
||||
);
|
||||
|
||||
// Calculate payoff
|
||||
const payoff = optionType === 'call' ?
|
||||
Math.max(finalPrice - strikePrice, 0) :
|
||||
Math.max(strikePrice - finalPrice, 0);
|
||||
|
||||
totalPayoff += payoff;
|
||||
}
|
||||
|
||||
const averagePayoff = totalPayoff / numSimulations;
|
||||
const price = averagePayoff * Math.exp(-riskFreeRate * timeToExpiry);
|
||||
|
||||
const intrinsicValue = optionType === 'call' ?
|
||||
Math.max(spotPrice - strikePrice, 0) :
|
||||
Math.max(strikePrice - spotPrice, 0);
|
||||
const timeValue = price - intrinsicValue;
|
||||
|
||||
if (optionType === 'call') {
|
||||
return {
|
||||
callPrice: price,
|
||||
putPrice: 0,
|
||||
intrinsicValueCall: intrinsicValue,
|
||||
intrinsicValuePut: 0,
|
||||
timeValueCall: timeValue,
|
||||
timeValuePut: 0
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
callPrice: 0,
|
||||
putPrice: price,
|
||||
intrinsicValueCall: 0,
|
||||
intrinsicValuePut: intrinsicValue,
|
||||
timeValueCall: 0,
|
||||
timeValuePut: timeValue
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate option portfolio risk metrics
|
||||
*/
|
||||
export function calculateOptionPortfolioRisk(
|
||||
positions: Array<{
|
||||
optionType: 'call' | 'put';
|
||||
quantity: number;
|
||||
params: OptionParameters;
|
||||
}>
|
||||
): {
|
||||
totalDelta: number;
|
||||
totalGamma: number;
|
||||
totalTheta: number;
|
||||
totalVega: number;
|
||||
totalRho: number;
|
||||
portfolioValue: number;
|
||||
} {
|
||||
let totalDelta = 0;
|
||||
let totalGamma = 0;
|
||||
let totalTheta = 0;
|
||||
let totalVega = 0;
|
||||
let totalRho = 0;
|
||||
let portfolioValue = 0;
|
||||
|
||||
for (const position of positions) {
|
||||
const greeks = calculateGreeks(position.params, position.optionType);
|
||||
const pricing = blackScholes(position.params);
|
||||
const optionPrice = position.optionType === 'call' ? pricing.callPrice : pricing.putPrice;
|
||||
|
||||
totalDelta += greeks.delta * position.quantity;
|
||||
totalGamma += greeks.gamma * position.quantity;
|
||||
totalTheta += greeks.theta * position.quantity;
|
||||
totalVega += greeks.vega * position.quantity;
|
||||
totalRho += greeks.rho * position.quantity;
|
||||
portfolioValue += optionPrice * position.quantity;
|
||||
}
|
||||
|
||||
return {
|
||||
totalDelta,
|
||||
totalGamma,
|
||||
totalTheta,
|
||||
totalVega,
|
||||
totalRho,
|
||||
portfolioValue
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Volatility surface interpolation
|
||||
*/
|
||||
export function interpolateVolatilitySurface(
|
||||
strikes: number[],
|
||||
expiries: number[],
|
||||
volatilities: number[][],
|
||||
targetStrike: number,
|
||||
targetExpiry: number
|
||||
): number {
|
||||
// Simplified bilinear interpolation
|
||||
// In production, use more sophisticated interpolation methods
|
||||
|
||||
// Find surrounding points
|
||||
let strikeIndex = 0;
|
||||
let expiryIndex = 0;
|
||||
|
||||
for (let i = 0; i < strikes.length - 1; i++) {
|
||||
if (targetStrike >= strikes[i] && targetStrike <= strikes[i + 1]) {
|
||||
strikeIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < expiries.length - 1; i++) {
|
||||
if (targetExpiry >= expiries[i] && targetExpiry <= expiries[i + 1]) {
|
||||
expiryIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Bilinear interpolation
|
||||
const x1 = strikes[strikeIndex];
|
||||
const x2 = strikes[strikeIndex + 1];
|
||||
const y1 = expiries[expiryIndex];
|
||||
const y2 = expiries[expiryIndex + 1];
|
||||
|
||||
const q11 = volatilities[expiryIndex][strikeIndex];
|
||||
const q12 = volatilities[expiryIndex + 1][strikeIndex];
|
||||
const q21 = volatilities[expiryIndex][strikeIndex + 1];
|
||||
const q22 = volatilities[expiryIndex + 1][strikeIndex + 1];
|
||||
|
||||
const wx = (targetStrike - x1) / (x2 - x1);
|
||||
const wy = (targetExpiry - y1) / (y2 - y1);
|
||||
|
||||
return q11 * (1 - wx) * (1 - wy) +
|
||||
q21 * wx * (1 - wy) +
|
||||
q12 * (1 - wx) * wy +
|
||||
q22 * wx * wy;
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
|
||||
/**
|
||||
* Normal cumulative distribution function
|
||||
*/
|
||||
function normalCDF(x: number): number {
|
||||
return 0.5 * (1 + erf(x / Math.sqrt(2)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Normal probability density function
|
||||
*/
|
||||
function normalPDF(x: number): number {
|
||||
return Math.exp(-0.5 * x * x) / Math.sqrt(2 * Math.PI);
|
||||
}
|
||||
|
||||
/**
|
||||
* Error function approximation
|
||||
*/
|
||||
function erf(x: number): number {
|
||||
// Abramowitz and Stegun approximation
|
||||
const a1 = 0.254829592;
|
||||
const a2 = -0.284496736;
|
||||
const a3 = 1.421413741;
|
||||
const a4 = -1.453152027;
|
||||
const a5 = 1.061405429;
|
||||
const p = 0.3275911;
|
||||
|
||||
const sign = x >= 0 ? 1 : -1;
|
||||
x = Math.abs(x);
|
||||
|
||||
const t = 1.0 / (1.0 + p * x);
|
||||
const y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x);
|
||||
|
||||
return sign * y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Box-Muller transformation for normal random numbers
|
||||
*/
|
||||
function boxMullerTransform(): number {
|
||||
let u1 = Math.random();
|
||||
let u2 = Math.random();
|
||||
|
||||
// Ensure u1 is not zero
|
||||
while (u1 === 0) {
|
||||
u1 = Math.random();
|
||||
}
|
||||
|
||||
return Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2);
|
||||
}
|
||||
562
libs/utils/src/calculations/performance-metrics.ts
Normal file
562
libs/utils/src/calculations/performance-metrics.ts
Normal file
|
|
@ -0,0 +1,562 @@
|
|||
/**
|
||||
* Performance Metrics and Analysis
|
||||
* Comprehensive performance measurement tools for trading strategies and portfolios
|
||||
*/
|
||||
|
||||
import { PortfolioMetrics } from './index';
|
||||
|
||||
export interface TradePerformance {
|
||||
totalTrades: number;
|
||||
winningTrades: number;
|
||||
losingTrades: number;
|
||||
winRate: number;
|
||||
averageWin: number;
|
||||
averageLoss: number;
|
||||
largestWin: number;
|
||||
largestLoss: number;
|
||||
profitFactor: number;
|
||||
expectancy: number;
|
||||
averageTradeReturn: number;
|
||||
consecutiveWins: number;
|
||||
consecutiveLosses: number;
|
||||
}
|
||||
|
||||
export interface DrawdownAnalysis {
|
||||
maxDrawdown: number;
|
||||
maxDrawdownDuration: number;
|
||||
averageDrawdown: number;
|
||||
drawdownPeriods: Array<{
|
||||
start: Date;
|
||||
end: Date;
|
||||
duration: number;
|
||||
magnitude: number;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface ReturnAnalysis {
|
||||
totalReturn: number;
|
||||
annualizedReturn: number;
|
||||
compoundAnnualGrowthRate: number;
|
||||
volatility: number;
|
||||
annualizedVolatility: number;
|
||||
skewness: number;
|
||||
kurtosis: number;
|
||||
bestMonth: number;
|
||||
worstMonth: number;
|
||||
positiveMonths: number;
|
||||
negativeMonths: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate comprehensive trade performance metrics
|
||||
*/
|
||||
export function analyzeTradePerformance(trades: Array<{ pnl: number; date: Date }>): TradePerformance {
|
||||
if (trades.length === 0) {
|
||||
return {
|
||||
totalTrades: 0,
|
||||
winningTrades: 0,
|
||||
losingTrades: 0,
|
||||
winRate: 0,
|
||||
averageWin: 0,
|
||||
averageLoss: 0,
|
||||
largestWin: 0,
|
||||
largestLoss: 0,
|
||||
profitFactor: 0,
|
||||
expectancy: 0,
|
||||
averageTradeReturn: 0,
|
||||
consecutiveWins: 0,
|
||||
consecutiveLosses: 0
|
||||
};
|
||||
}
|
||||
|
||||
const winningTrades = trades.filter(trade => trade.pnl > 0);
|
||||
const losingTrades = trades.filter(trade => trade.pnl < 0);
|
||||
|
||||
const totalWins = winningTrades.reduce((sum, trade) => sum + trade.pnl, 0);
|
||||
const totalLosses = Math.abs(losingTrades.reduce((sum, trade) => sum + trade.pnl, 0));
|
||||
|
||||
const averageWin = winningTrades.length > 0 ? totalWins / winningTrades.length : 0;
|
||||
const averageLoss = losingTrades.length > 0 ? totalLosses / losingTrades.length : 0;
|
||||
|
||||
const largestWin = winningTrades.length > 0 ? Math.max(...winningTrades.map(t => t.pnl)) : 0;
|
||||
const largestLoss = losingTrades.length > 0 ? Math.min(...losingTrades.map(t => t.pnl)) : 0;
|
||||
|
||||
const profitFactor = totalLosses > 0 ? totalWins / totalLosses : totalWins > 0 ? Infinity : 0;
|
||||
const winRate = winningTrades.length / trades.length;
|
||||
const expectancy = (winRate * averageWin) - ((1 - winRate) * averageLoss);
|
||||
|
||||
const totalPnL = trades.reduce((sum, trade) => sum + trade.pnl, 0);
|
||||
const averageTradeReturn = totalPnL / trades.length;
|
||||
|
||||
// Calculate consecutive wins/losses
|
||||
let consecutiveWins = 0;
|
||||
let consecutiveLosses = 0;
|
||||
let currentWinStreak = 0;
|
||||
let currentLossStreak = 0;
|
||||
|
||||
for (const trade of trades) {
|
||||
if (trade.pnl > 0) {
|
||||
currentWinStreak++;
|
||||
currentLossStreak = 0;
|
||||
consecutiveWins = Math.max(consecutiveWins, currentWinStreak);
|
||||
} else if (trade.pnl < 0) {
|
||||
currentLossStreak++;
|
||||
currentWinStreak = 0;
|
||||
consecutiveLosses = Math.max(consecutiveLosses, currentLossStreak);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
totalTrades: trades.length,
|
||||
winningTrades: winningTrades.length,
|
||||
losingTrades: losingTrades.length,
|
||||
winRate,
|
||||
averageWin,
|
||||
averageLoss,
|
||||
largestWin,
|
||||
largestLoss,
|
||||
profitFactor,
|
||||
expectancy,
|
||||
averageTradeReturn,
|
||||
consecutiveWins,
|
||||
consecutiveLosses
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze drawdown characteristics
|
||||
*/
|
||||
export function analyzeDrawdowns(equityCurve: Array<{ value: number; date: Date }>): DrawdownAnalysis {
|
||||
if (equityCurve.length < 2) {
|
||||
return {
|
||||
maxDrawdown: 0,
|
||||
maxDrawdownDuration: 0,
|
||||
averageDrawdown: 0,
|
||||
drawdownPeriods: []
|
||||
};
|
||||
}
|
||||
|
||||
let peak = equityCurve[0].value;
|
||||
let peakDate = equityCurve[0].date;
|
||||
let maxDrawdown = 0;
|
||||
let maxDrawdownDuration = 0;
|
||||
|
||||
const drawdownPeriods: Array<{
|
||||
start: Date;
|
||||
end: Date;
|
||||
duration: number;
|
||||
magnitude: number;
|
||||
}> = [];
|
||||
|
||||
let currentDrawdownStart: Date | null = null;
|
||||
let drawdowns: number[] = [];
|
||||
|
||||
for (let i = 1; i < equityCurve.length; i++) {
|
||||
const current = equityCurve[i];
|
||||
|
||||
if (current.value > peak) {
|
||||
// New peak - end any current drawdown
|
||||
if (currentDrawdownStart) {
|
||||
const drawdownMagnitude = (peak - equityCurve[i - 1].value) / peak;
|
||||
const duration = Math.floor((equityCurve[i - 1].date.getTime() - currentDrawdownStart.getTime()) / (1000 * 60 * 60 * 24));
|
||||
|
||||
drawdownPeriods.push({
|
||||
start: currentDrawdownStart,
|
||||
end: equityCurve[i - 1].date,
|
||||
duration,
|
||||
magnitude: drawdownMagnitude
|
||||
});
|
||||
|
||||
drawdowns.push(drawdownMagnitude);
|
||||
maxDrawdownDuration = Math.max(maxDrawdownDuration, duration);
|
||||
currentDrawdownStart = null;
|
||||
}
|
||||
|
||||
peak = current.value;
|
||||
peakDate = current.date;
|
||||
} else {
|
||||
// In drawdown
|
||||
if (!currentDrawdownStart) {
|
||||
currentDrawdownStart = peakDate;
|
||||
}
|
||||
|
||||
const drawdown = (peak - current.value) / peak;
|
||||
maxDrawdown = Math.max(maxDrawdown, drawdown);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle ongoing drawdown
|
||||
if (currentDrawdownStart) {
|
||||
const lastPoint = equityCurve[equityCurve.length - 1];
|
||||
const drawdownMagnitude = (peak - lastPoint.value) / peak;
|
||||
const duration = Math.floor((lastPoint.date.getTime() - currentDrawdownStart.getTime()) / (1000 * 60 * 60 * 24));
|
||||
|
||||
drawdownPeriods.push({
|
||||
start: currentDrawdownStart,
|
||||
end: lastPoint.date,
|
||||
duration,
|
||||
magnitude: drawdownMagnitude
|
||||
});
|
||||
|
||||
drawdowns.push(drawdownMagnitude);
|
||||
maxDrawdownDuration = Math.max(maxDrawdownDuration, duration);
|
||||
}
|
||||
|
||||
const averageDrawdown = drawdowns.length > 0 ? drawdowns.reduce((sum, dd) => sum + dd, 0) / drawdowns.length : 0;
|
||||
|
||||
return {
|
||||
maxDrawdown,
|
||||
maxDrawdownDuration,
|
||||
averageDrawdown,
|
||||
drawdownPeriods
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze return characteristics
|
||||
*/
|
||||
export function analyzeReturns(
|
||||
returns: Array<{ return: number; date: Date }>,
|
||||
periodsPerYear: number = 252
|
||||
): ReturnAnalysis {
|
||||
if (returns.length === 0) {
|
||||
return {
|
||||
totalReturn: 0,
|
||||
annualizedReturn: 0,
|
||||
compoundAnnualGrowthRate: 0,
|
||||
volatility: 0,
|
||||
annualizedVolatility: 0,
|
||||
skewness: 0,
|
||||
kurtosis: 0,
|
||||
bestMonth: 0,
|
||||
worstMonth: 0,
|
||||
positiveMonths: 0,
|
||||
negativeMonths: 0
|
||||
};
|
||||
}
|
||||
|
||||
const returnValues = returns.map(r => r.return);
|
||||
|
||||
// Calculate basic statistics
|
||||
const totalReturn = returnValues.reduce((product, ret) => product * (1 + ret), 1) - 1;
|
||||
const averageReturn = returnValues.reduce((sum, ret) => sum + ret, 0) / returnValues.length;
|
||||
const annualizedReturn = Math.pow(1 + averageReturn, periodsPerYear) - 1;
|
||||
|
||||
// Calculate CAGR
|
||||
const years = returns.length / periodsPerYear;
|
||||
const cagr = years > 0 ? Math.pow(1 + totalReturn, 1 / years) - 1 : 0;
|
||||
|
||||
// Calculate volatility
|
||||
const variance = returnValues.reduce((sum, ret) => sum + Math.pow(ret - averageReturn, 2), 0) / (returnValues.length - 1);
|
||||
const volatility = Math.sqrt(variance);
|
||||
const annualizedVolatility = volatility * Math.sqrt(periodsPerYear);
|
||||
|
||||
// Calculate skewness and kurtosis
|
||||
const skewness = calculateSkewness(returnValues);
|
||||
const kurtosis = calculateKurtosis(returnValues);
|
||||
|
||||
// Monthly analysis
|
||||
const monthlyReturns = aggregateMonthlyReturns(returns);
|
||||
const bestMonth = monthlyReturns.length > 0 ? Math.max(...monthlyReturns) : 0;
|
||||
const worstMonth = monthlyReturns.length > 0 ? Math.min(...monthlyReturns) : 0;
|
||||
const positiveMonths = monthlyReturns.filter(ret => ret > 0).length;
|
||||
const negativeMonths = monthlyReturns.filter(ret => ret < 0).length;
|
||||
|
||||
return {
|
||||
totalReturn,
|
||||
annualizedReturn,
|
||||
compoundAnnualGrowthRate: cagr,
|
||||
volatility,
|
||||
annualizedVolatility,
|
||||
skewness,
|
||||
kurtosis,
|
||||
bestMonth,
|
||||
worstMonth,
|
||||
positiveMonths,
|
||||
negativeMonths
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate rolling performance metrics
|
||||
*/
|
||||
export function calculateRollingMetrics(
|
||||
returns: number[],
|
||||
windowSize: number,
|
||||
metricType: 'sharpe' | 'volatility' | 'return' = 'sharpe'
|
||||
): number[] {
|
||||
if (returns.length < windowSize) return [];
|
||||
|
||||
const rollingMetrics: number[] = [];
|
||||
|
||||
for (let i = windowSize - 1; i < returns.length; i++) {
|
||||
const window = returns.slice(i - windowSize + 1, i + 1);
|
||||
|
||||
switch (metricType) {
|
||||
case 'sharpe':
|
||||
rollingMetrics.push(calculateSharpeRatio(window));
|
||||
break;
|
||||
case 'volatility':
|
||||
rollingMetrics.push(calculateVolatility(window));
|
||||
break;
|
||||
case 'return':
|
||||
const avgReturn = window.reduce((sum, ret) => sum + ret, 0) / window.length;
|
||||
rollingMetrics.push(avgReturn);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return rollingMetrics;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate performance attribution
|
||||
*/
|
||||
export function strategyPerformanceAttribution(
|
||||
portfolioReturns: number[],
|
||||
benchmarkReturns: number[],
|
||||
sectorWeights: number[],
|
||||
sectorReturns: number[]
|
||||
): {
|
||||
allocationEffect: number;
|
||||
selectionEffect: number;
|
||||
interactionEffect: number;
|
||||
totalActiveReturn: number;
|
||||
} {
|
||||
if (portfolioReturns.length !== benchmarkReturns.length) {
|
||||
throw new Error('Portfolio and benchmark returns must have same length');
|
||||
}
|
||||
|
||||
const portfolioReturn = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
|
||||
const benchmarkReturn = benchmarkReturns.reduce((sum, ret) => sum + ret, 0) / benchmarkReturns.length;
|
||||
|
||||
let allocationEffect = 0;
|
||||
let selectionEffect = 0;
|
||||
let interactionEffect = 0;
|
||||
|
||||
for (let i = 0; i < sectorWeights.length; i++) {
|
||||
const portfolioWeight = sectorWeights[i];
|
||||
const benchmarkWeight = 1 / sectorWeights.length; // Assuming equal benchmark weights
|
||||
const sectorReturn = sectorReturns[i];
|
||||
|
||||
// Allocation effect: (portfolio weight - benchmark weight) * (benchmark sector return - benchmark return)
|
||||
allocationEffect += (portfolioWeight - benchmarkWeight) * (sectorReturn - benchmarkReturn);
|
||||
|
||||
// Selection effect: benchmark weight * (portfolio sector return - benchmark sector return)
|
||||
selectionEffect += benchmarkWeight * (sectorReturn - sectorReturn); // Simplified
|
||||
|
||||
// Interaction effect: (portfolio weight - benchmark weight) * (portfolio sector return - benchmark sector return)
|
||||
interactionEffect += (portfolioWeight - benchmarkWeight) * (sectorReturn - sectorReturn); // Simplified
|
||||
}
|
||||
|
||||
const totalActiveReturn = portfolioReturn - benchmarkReturn;
|
||||
|
||||
return {
|
||||
allocationEffect,
|
||||
selectionEffect,
|
||||
interactionEffect,
|
||||
totalActiveReturn
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Omega ratio
|
||||
*/
|
||||
export function omegaRatio(returns: number[], threshold: number = 0): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
const gains = returns.filter(ret => ret > threshold).reduce((sum, ret) => sum + (ret - threshold), 0);
|
||||
const losses = returns.filter(ret => ret < threshold).reduce((sum, ret) => sum + Math.abs(ret - threshold), 0);
|
||||
|
||||
return losses === 0 ? Infinity : gains / losses;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate gain-to-pain ratio
|
||||
*/
|
||||
export function gainToPainRatio(returns: number[]): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
const totalGain = returns.reduce((sum, ret) => sum + ret, 0);
|
||||
const totalPain = returns.filter(ret => ret < 0).reduce((sum, ret) => sum + Math.abs(ret), 0);
|
||||
|
||||
return totalPain === 0 ? (totalGain > 0 ? Infinity : 0) : totalGain / totalPain;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Martin ratio (modified Sharpe with downside deviation)
|
||||
*/
|
||||
export function martinRatio(returns: number[], riskFreeRate: number = 0): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
const averageReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const downsideReturns = returns.filter(ret => ret < riskFreeRate);
|
||||
|
||||
if (downsideReturns.length === 0) return Infinity;
|
||||
|
||||
const downsideDeviation = Math.sqrt(
|
||||
downsideReturns.reduce((sum, ret) => sum + Math.pow(ret - riskFreeRate, 2), 0) / returns.length
|
||||
);
|
||||
|
||||
return downsideDeviation === 0 ? Infinity : (averageReturn - riskFreeRate) / downsideDeviation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate comprehensive portfolio metrics
|
||||
*/
|
||||
export function calculateStrategyMetrics(
|
||||
equityCurve: Array<{ value: number; date: Date }>,
|
||||
benchmarkReturns?: number[],
|
||||
riskFreeRate: number = 0.02
|
||||
): PortfolioMetrics {
|
||||
if (equityCurve.length < 2) {
|
||||
return {
|
||||
totalValue: 0,
|
||||
totalReturn: 0,
|
||||
totalReturnPercent: 0,
|
||||
dailyReturn: 0,
|
||||
dailyReturnPercent: 0,
|
||||
maxDrawdown: 0,
|
||||
sharpeRatio: 0,
|
||||
beta: 0,
|
||||
alpha: 0,
|
||||
volatility: 0
|
||||
};
|
||||
}
|
||||
|
||||
const returns = [];
|
||||
for (let i = 1; i < equityCurve.length; i++) {
|
||||
const ret = (equityCurve[i].value - equityCurve[i - 1].value) / equityCurve[i - 1].value;
|
||||
returns.push(ret);
|
||||
}
|
||||
|
||||
const totalValue = equityCurve[equityCurve.length - 1].value;
|
||||
const totalReturn = totalValue - equityCurve[0].value;
|
||||
const totalReturnPercent = (totalReturn / equityCurve[0].value) * 100;
|
||||
|
||||
const dailyReturn = returns[returns.length - 1];
|
||||
const dailyReturnPercent = dailyReturn * 100;
|
||||
|
||||
const maxDrawdown = analyzeDrawdowns(equityCurve).maxDrawdown;
|
||||
const sharpeRatio = calculateSharpeRatio(returns, riskFreeRate);
|
||||
const volatility = calculateVolatility(returns);
|
||||
|
||||
let beta = 0;
|
||||
let alpha = 0;
|
||||
|
||||
if (benchmarkReturns && benchmarkReturns.length === returns.length) {
|
||||
beta = calculateBeta(returns, benchmarkReturns);
|
||||
alpha = calculateAlpha(returns, benchmarkReturns, riskFreeRate);
|
||||
}
|
||||
|
||||
return {
|
||||
totalValue,
|
||||
totalReturn,
|
||||
totalReturnPercent,
|
||||
dailyReturn,
|
||||
dailyReturnPercent,
|
||||
maxDrawdown,
|
||||
sharpeRatio,
|
||||
beta,
|
||||
alpha,
|
||||
volatility
|
||||
};
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
|
||||
function calculateSharpeRatio(returns: number[], riskFreeRate: number = 0): number {
|
||||
if (returns.length < 2) return 0;
|
||||
|
||||
const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - avgReturn, 2), 0) / (returns.length - 1);
|
||||
const stdDev = Math.sqrt(variance);
|
||||
|
||||
return stdDev === 0 ? 0 : (avgReturn - riskFreeRate) / stdDev;
|
||||
}
|
||||
|
||||
function calculateVolatility(returns: number[]): number {
|
||||
if (returns.length < 2) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
|
||||
|
||||
return Math.sqrt(variance);
|
||||
}
|
||||
|
||||
function calculateBeta(portfolioReturns: number[], marketReturns: number[]): number {
|
||||
if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < 2) return 0;
|
||||
|
||||
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
|
||||
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length;
|
||||
|
||||
let covariance = 0;
|
||||
let marketVariance = 0;
|
||||
|
||||
for (let i = 0; i < portfolioReturns.length; i++) {
|
||||
const portfolioDiff = portfolioReturns[i] - portfolioMean;
|
||||
const marketDiff = marketReturns[i] - marketMean;
|
||||
|
||||
covariance += portfolioDiff * marketDiff;
|
||||
marketVariance += marketDiff * marketDiff;
|
||||
}
|
||||
|
||||
covariance /= (portfolioReturns.length - 1);
|
||||
marketVariance /= (marketReturns.length - 1);
|
||||
|
||||
return marketVariance === 0 ? 0 : covariance / marketVariance;
|
||||
}
|
||||
|
||||
function calculateAlpha(
|
||||
portfolioReturns: number[],
|
||||
marketReturns: number[],
|
||||
riskFreeRate: number
|
||||
): number {
|
||||
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
|
||||
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length;
|
||||
const beta = calculateBeta(portfolioReturns, marketReturns);
|
||||
|
||||
return portfolioMean - (riskFreeRate + beta * (marketMean - riskFreeRate));
|
||||
}
|
||||
|
||||
function calculateSkewness(returns: number[]): number {
|
||||
if (returns.length < 3) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
|
||||
const stdDev = Math.sqrt(variance);
|
||||
|
||||
if (stdDev === 0) return 0;
|
||||
|
||||
const skew = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 3), 0) / returns.length;
|
||||
|
||||
return skew;
|
||||
}
|
||||
|
||||
function calculateKurtosis(returns: number[]): number {
|
||||
if (returns.length < 4) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
|
||||
const stdDev = Math.sqrt(variance);
|
||||
|
||||
if (stdDev === 0) return 0;
|
||||
|
||||
const kurt = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 4), 0) / returns.length;
|
||||
|
||||
return kurt - 3; // Excess kurtosis
|
||||
}
|
||||
|
||||
function aggregateMonthlyReturns(returns: Array<{ return: number; date: Date }>): number[] {
|
||||
const monthlyReturns: { [key: string]: number } = {};
|
||||
|
||||
for (const ret of returns) {
|
||||
const monthKey = `${ret.date.getFullYear()}-${ret.date.getMonth()}`;
|
||||
if (!monthlyReturns[monthKey]) {
|
||||
monthlyReturns[monthKey] = 1;
|
||||
}
|
||||
monthlyReturns[monthKey] *= (1 + ret.return);
|
||||
}
|
||||
|
||||
return Object.values(monthlyReturns).map(cumReturn => cumReturn - 1);
|
||||
}
|
||||
421
libs/utils/src/calculations/portfolio-analytics.ts
Normal file
421
libs/utils/src/calculations/portfolio-analytics.ts
Normal file
|
|
@ -0,0 +1,421 @@
|
|||
/**
|
||||
* Portfolio Analytics
|
||||
* Advanced portfolio analysis and optimization tools
|
||||
*/
|
||||
|
||||
import { OHLCVData, PriceData } from './index';
|
||||
|
||||
export interface PortfolioPosition {
|
||||
symbol: string;
|
||||
shares: number;
|
||||
price: number;
|
||||
value: number;
|
||||
weight: number;
|
||||
}
|
||||
|
||||
export interface PortfolioAnalysis {
|
||||
totalValue: number;
|
||||
totalReturn: number;
|
||||
volatility: number;
|
||||
sharpeRatio: number;
|
||||
maxDrawdown: number;
|
||||
var95: number;
|
||||
beta: number;
|
||||
alpha: number;
|
||||
treynorRatio: number;
|
||||
informationRatio: number;
|
||||
trackingError: number;
|
||||
}
|
||||
|
||||
export interface AssetAllocation {
|
||||
symbol: string;
|
||||
targetWeight: number;
|
||||
currentWeight: number;
|
||||
difference: number;
|
||||
rebalanceAmount: number;
|
||||
}
|
||||
|
||||
export interface PortfolioOptimizationResult {
|
||||
weights: number[];
|
||||
expectedReturn: number;
|
||||
volatility: number;
|
||||
sharpeRatio: number;
|
||||
symbols: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate portfolio value and weights
|
||||
*/
|
||||
export function calculatePortfolioMetrics(positions: PortfolioPosition[]): {
|
||||
totalValue: number;
|
||||
weights: number[];
|
||||
concentrationRisk: number;
|
||||
} {
|
||||
const totalValue = positions.reduce((sum, pos) => sum + pos.value, 0);
|
||||
const weights = positions.map(pos => pos.value / totalValue);
|
||||
|
||||
// Calculate Herfindahl-Hirschman Index for concentration risk
|
||||
const concentrationRisk = weights.reduce((sum, weight) => sum + weight * weight, 0);
|
||||
|
||||
return {
|
||||
totalValue,
|
||||
weights,
|
||||
concentrationRisk
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate portfolio returns from position returns
|
||||
*/
|
||||
export function calculatePortfolioReturns(
|
||||
assetReturns: number[][],
|
||||
weights: number[]
|
||||
): number[] {
|
||||
if (assetReturns.length === 0 || weights.length !== assetReturns[0].length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const portfolioReturns: number[] = [];
|
||||
|
||||
for (let i = 0; i < assetReturns.length; i++) {
|
||||
let portfolioReturn = 0;
|
||||
for (let j = 0; j < weights.length; j++) {
|
||||
portfolioReturn += weights[j] * assetReturns[i][j];
|
||||
}
|
||||
portfolioReturns.push(portfolioReturn);
|
||||
}
|
||||
|
||||
return portfolioReturns;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mean-Variance Optimization (Markowitz)
|
||||
*/
|
||||
export function markowitzOptimization(
|
||||
expectedReturns: number[],
|
||||
covarianceMatrix: number[][],
|
||||
riskFreeRate: number = 0.02,
|
||||
riskAversion: number = 1
|
||||
): PortfolioOptimizationResult {
|
||||
const n = expectedReturns.length;
|
||||
|
||||
// Simplified optimization using equal weights as baseline
|
||||
// In production, use proper quadratic programming solver
|
||||
const weights = new Array(n).fill(1 / n);
|
||||
|
||||
const expectedReturn = weights.reduce((sum, weight, i) => sum + weight * expectedReturns[i], 0);
|
||||
|
||||
// Calculate portfolio variance
|
||||
let portfolioVariance = 0;
|
||||
for (let i = 0; i < n; i++) {
|
||||
for (let j = 0; j < n; j++) {
|
||||
portfolioVariance += weights[i] * weights[j] * covarianceMatrix[i][j];
|
||||
}
|
||||
}
|
||||
|
||||
const volatility = Math.sqrt(portfolioVariance);
|
||||
const sharpeRatio = volatility > 0 ? (expectedReturn - riskFreeRate) / volatility : 0;
|
||||
|
||||
return {
|
||||
weights,
|
||||
expectedReturn,
|
||||
volatility,
|
||||
sharpeRatio,
|
||||
symbols: [] // Would be filled with actual symbols
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Black-Litterman Model
|
||||
*/
|
||||
export function blackLittermanOptimization(
|
||||
marketCaps: number[],
|
||||
covarianceMatrix: number[][],
|
||||
views: Array<{ assets: number[]; expectedReturn: number; confidence: number }>,
|
||||
riskAversion: number = 3,
|
||||
riskFreeRate: number = 0.02
|
||||
): PortfolioOptimizationResult {
|
||||
const n = marketCaps.length;
|
||||
|
||||
// Calculate market weights
|
||||
const totalMarketCap = marketCaps.reduce((sum, cap) => sum + cap, 0);
|
||||
const marketWeights = marketCaps.map(cap => cap / totalMarketCap);
|
||||
|
||||
// Implied equilibrium returns
|
||||
const equilibriumReturns: number[] = [];
|
||||
for (let i = 0; i < n; i++) {
|
||||
let equilibriumReturn = 0;
|
||||
for (let j = 0; j < n; j++) {
|
||||
equilibriumReturn += riskAversion * covarianceMatrix[i][j] * marketWeights[j];
|
||||
}
|
||||
equilibriumReturns.push(equilibriumReturn);
|
||||
}
|
||||
|
||||
// Simplified BL implementation - in production use proper matrix operations
|
||||
const weights = [...marketWeights]; // Start with market weights
|
||||
|
||||
const expectedReturn = weights.reduce((sum, weight, i) => sum + weight * equilibriumReturns[i], 0);
|
||||
|
||||
let portfolioVariance = 0;
|
||||
for (let i = 0; i < n; i++) {
|
||||
for (let j = 0; j < n; j++) {
|
||||
portfolioVariance += weights[i] * weights[j] * covarianceMatrix[i][j];
|
||||
}
|
||||
}
|
||||
|
||||
const volatility = Math.sqrt(portfolioVariance);
|
||||
const sharpeRatio = volatility > 0 ? (expectedReturn - riskFreeRate) / volatility : 0;
|
||||
|
||||
return {
|
||||
weights,
|
||||
expectedReturn,
|
||||
volatility,
|
||||
sharpeRatio,
|
||||
symbols: []
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Risk Parity Portfolio
|
||||
*/
|
||||
export function riskParityOptimization(covarianceMatrix: number[][]): PortfolioOptimizationResult {
|
||||
const n = covarianceMatrix.length;
|
||||
|
||||
// Start with equal weights
|
||||
let weights = new Array(n).fill(1 / n);
|
||||
|
||||
// Iterative optimization for equal risk contribution
|
||||
const maxIterations = 100;
|
||||
const tolerance = 1e-8;
|
||||
|
||||
for (let iter = 0; iter < maxIterations; iter++) {
|
||||
const riskContributions = calculateRiskContributions(weights, covarianceMatrix);
|
||||
const totalRisk = Math.sqrt(calculatePortfolioVariance(weights, covarianceMatrix));
|
||||
const targetRiskContribution = totalRisk / n;
|
||||
|
||||
let converged = true;
|
||||
const newWeights = [...weights];
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
const diff = riskContributions[i] - targetRiskContribution;
|
||||
if (Math.abs(diff) > tolerance) {
|
||||
converged = false;
|
||||
// Simple adjustment - in production use proper optimization
|
||||
newWeights[i] *= (1 - diff / totalRisk * 0.1);
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize weights
|
||||
const sum = newWeights.reduce((s, w) => s + w, 0);
|
||||
weights = newWeights.map(w => w / sum);
|
||||
|
||||
if (converged) break;
|
||||
}
|
||||
|
||||
const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix);
|
||||
const volatility = Math.sqrt(portfolioVariance);
|
||||
|
||||
return {
|
||||
weights,
|
||||
expectedReturn: 0, // Not calculated for risk parity
|
||||
volatility,
|
||||
sharpeRatio: 0,
|
||||
symbols: []
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate risk contributions for each asset
|
||||
*/
|
||||
export function calculateRiskContributions(
|
||||
weights: number[],
|
||||
covarianceMatrix: number[][]
|
||||
): number[] {
|
||||
const n = weights.length;
|
||||
const riskContributions: number[] = [];
|
||||
|
||||
const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix);
|
||||
const portfolioVolatility = Math.sqrt(portfolioVariance);
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
let marginalContribution = 0;
|
||||
for (let j = 0; j < n; j++) {
|
||||
marginalContribution += weights[j] * covarianceMatrix[i][j];
|
||||
}
|
||||
|
||||
const riskContribution = (weights[i] * marginalContribution) / portfolioVolatility;
|
||||
riskContributions.push(riskContribution);
|
||||
}
|
||||
|
||||
return riskContributions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate portfolio variance
|
||||
*/
|
||||
export function calculatePortfolioVariance(
|
||||
weights: number[],
|
||||
covarianceMatrix: number[][]
|
||||
): number {
|
||||
const n = weights.length;
|
||||
let variance = 0;
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
for (let j = 0; j < n; j++) {
|
||||
variance += weights[i] * weights[j] * covarianceMatrix[i][j];
|
||||
}
|
||||
}
|
||||
|
||||
return variance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Portfolio rebalancing analysis
|
||||
*/
|
||||
export function calculateRebalancing(
|
||||
currentPositions: PortfolioPosition[],
|
||||
targetWeights: number[],
|
||||
totalValue: number
|
||||
): AssetAllocation[] {
|
||||
if (currentPositions.length !== targetWeights.length) {
|
||||
throw new Error('Number of positions must match number of target weights');
|
||||
}
|
||||
|
||||
return currentPositions.map((position, index) => {
|
||||
const currentWeight = position.value / totalValue;
|
||||
const targetWeight = targetWeights[index];
|
||||
const difference = targetWeight - currentWeight;
|
||||
const rebalanceAmount = difference * totalValue;
|
||||
|
||||
return {
|
||||
symbol: position.symbol,
|
||||
targetWeight,
|
||||
currentWeight,
|
||||
difference,
|
||||
rebalanceAmount
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Factor model analysis (Fama-French)
|
||||
*/
|
||||
export function famaFrenchAnalysis(
|
||||
portfolioReturns: number[],
|
||||
marketReturns: number[],
|
||||
smbReturns: number[], // Small minus Big
|
||||
hmlReturns: number[], // High minus Low
|
||||
riskFreeRate: number = 0.02
|
||||
): {
|
||||
alpha: number;
|
||||
marketBeta: number;
|
||||
sizeBeta: number;
|
||||
valueBeta: number;
|
||||
rSquared: number;
|
||||
} {
|
||||
const n = portfolioReturns.length;
|
||||
|
||||
// Excess returns
|
||||
const excessPortfolioReturns = portfolioReturns.map(r => r - riskFreeRate);
|
||||
const excessMarketReturns = marketReturns.map(r => r - riskFreeRate);
|
||||
|
||||
// Simple linear regression (in production, use proper multiple regression)
|
||||
const meanExcessPortfolio = excessPortfolioReturns.reduce((sum, r) => sum + r, 0) / n;
|
||||
const meanExcessMarket = excessMarketReturns.reduce((sum, r) => sum + r, 0) / n;
|
||||
const meanSMB = smbReturns.reduce((sum, r) => sum + r, 0) / n;
|
||||
const meanHML = hmlReturns.reduce((sum, r) => sum + r, 0) / n;
|
||||
|
||||
// Calculate market beta
|
||||
let covariance = 0;
|
||||
let marketVariance = 0;
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
const portfolioDiff = excessPortfolioReturns[i] - meanExcessPortfolio;
|
||||
const marketDiff = excessMarketReturns[i] - meanExcessMarket;
|
||||
|
||||
covariance += portfolioDiff * marketDiff;
|
||||
marketVariance += marketDiff * marketDiff;
|
||||
}
|
||||
|
||||
const marketBeta = marketVariance > 0 ? covariance / marketVariance : 0;
|
||||
const alpha = meanExcessPortfolio - marketBeta * meanExcessMarket;
|
||||
|
||||
return {
|
||||
alpha,
|
||||
marketBeta,
|
||||
sizeBeta: 0, // Simplified - would need proper regression
|
||||
valueBeta: 0, // Simplified - would need proper regression
|
||||
rSquared: 0 // Simplified - would need proper regression
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Portfolio performance attribution
|
||||
*/
|
||||
export function performanceAttribution(
|
||||
portfolioReturns: number[],
|
||||
benchmarkReturns: number[],
|
||||
sectorWeights: number[][],
|
||||
sectorReturns: number[][]
|
||||
): {
|
||||
totalActiveReturn: number;
|
||||
allocationEffect: number;
|
||||
selectionEffect: number;
|
||||
interactionEffect: number;
|
||||
} {
|
||||
const n = portfolioReturns.length;
|
||||
|
||||
const portfolioReturn = portfolioReturns.reduce((sum, r) => sum + r, 0) / n;
|
||||
const benchmarkReturn = benchmarkReturns.reduce((sum, r) => sum + r, 0) / n;
|
||||
const totalActiveReturn = portfolioReturn - benchmarkReturn;
|
||||
|
||||
// Simplified attribution analysis
|
||||
let allocationEffect = 0;
|
||||
let selectionEffect = 0;
|
||||
let interactionEffect = 0;
|
||||
|
||||
// This would require proper implementation with sector-level analysis
|
||||
// For now, return the total active return distributed equally
|
||||
allocationEffect = totalActiveReturn * 0.4;
|
||||
selectionEffect = totalActiveReturn * 0.4;
|
||||
interactionEffect = totalActiveReturn * 0.2;
|
||||
|
||||
return {
|
||||
totalActiveReturn,
|
||||
allocationEffect,
|
||||
selectionEffect,
|
||||
interactionEffect
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate efficient frontier points
|
||||
*/
|
||||
export function calculateEfficientFrontier(
|
||||
expectedReturns: number[],
|
||||
covarianceMatrix: number[][],
|
||||
numPoints: number = 100
|
||||
): Array<{ return: number; volatility: number; sharpeRatio: number; weights: number[] }> {
|
||||
const minReturn = Math.min(...expectedReturns);
|
||||
const maxReturn = Math.max(...expectedReturns);
|
||||
const returnStep = (maxReturn - minReturn) / (numPoints - 1);
|
||||
|
||||
const frontierPoints: Array<{ return: number; volatility: number; sharpeRatio: number; weights: number[] }> = [];
|
||||
|
||||
for (let i = 0; i < numPoints; i++) {
|
||||
const targetReturn = minReturn + i * returnStep;
|
||||
|
||||
// Simplified optimization for target return
|
||||
// In production, use proper constrained optimization
|
||||
const result = markowitzOptimization(expectedReturns, covarianceMatrix);
|
||||
|
||||
frontierPoints.push({
|
||||
return: targetReturn,
|
||||
volatility: result.volatility,
|
||||
sharpeRatio: result.sharpeRatio,
|
||||
weights: result.weights
|
||||
});
|
||||
}
|
||||
|
||||
return frontierPoints;
|
||||
}
|
||||
346
libs/utils/src/calculations/position-sizing.ts
Normal file
346
libs/utils/src/calculations/position-sizing.ts
Normal file
|
|
@ -0,0 +1,346 @@
|
|||
/**
|
||||
* Position Sizing Calculations
|
||||
* Risk-based position sizing methods for trading strategies
|
||||
*/
|
||||
|
||||
export interface PositionSizeParams {
|
||||
accountSize: number;
|
||||
riskPercentage: number;
|
||||
entryPrice: number;
|
||||
stopLoss: number;
|
||||
leverage?: number;
|
||||
}
|
||||
|
||||
export interface KellyParams {
|
||||
winRate: number;
|
||||
averageWin: number;
|
||||
averageLoss: number;
|
||||
}
|
||||
|
||||
export interface VolatilityParams {
|
||||
price: number;
|
||||
volatility: number;
|
||||
targetVolatility: number;
|
||||
lookbackDays: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate position size based on fixed risk percentage
|
||||
*/
|
||||
export function fixedRiskPositionSize(params: PositionSizeParams): number {
|
||||
const { accountSize, riskPercentage, entryPrice, stopLoss, leverage = 1 } = params;
|
||||
|
||||
if (entryPrice === stopLoss) return 0;
|
||||
|
||||
const riskAmount = accountSize * (riskPercentage / 100);
|
||||
const riskPerShare = Math.abs(entryPrice - stopLoss);
|
||||
const basePositionSize = riskAmount / riskPerShare;
|
||||
|
||||
return basePositionSize * leverage;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate position size using Kelly Criterion
|
||||
*/
|
||||
export function kellyPositionSize(params: KellyParams, accountSize: number): number {
|
||||
const { winRate, averageWin, averageLoss } = params;
|
||||
|
||||
if (averageLoss === 0 || winRate === 0 || winRate === 1) return 0;
|
||||
|
||||
const lossRate = 1 - winRate;
|
||||
const winLossRatio = averageWin / Math.abs(averageLoss);
|
||||
|
||||
// Kelly formula: f = (bp - q) / b
|
||||
// where: b = win/loss ratio, p = win rate, q = loss rate
|
||||
const kellyFraction = (winLossRatio * winRate - lossRate) / winLossRatio;
|
||||
|
||||
// Cap Kelly fraction to prevent over-leveraging
|
||||
const cappedKelly = Math.max(0, Math.min(kellyFraction, 0.25));
|
||||
|
||||
return accountSize * cappedKelly;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate fractional Kelly position size (more conservative)
|
||||
*/
|
||||
export function fractionalKellyPositionSize(
|
||||
params: KellyParams,
|
||||
accountSize: number,
|
||||
fraction: number = 0.25
|
||||
): number {
|
||||
const fullKelly = kellyPositionSize(params, accountSize);
|
||||
return fullKelly * fraction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate position size based on volatility targeting
|
||||
*/
|
||||
export function volatilityTargetPositionSize(params: VolatilityParams, accountSize: number): number {
|
||||
const { price, volatility, targetVolatility } = params;
|
||||
|
||||
if (volatility === 0 || price === 0) return 0;
|
||||
|
||||
const volatilityRatio = targetVolatility / volatility;
|
||||
const basePositionValue = accountSize * volatilityRatio;
|
||||
|
||||
return basePositionValue / price;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate equal weight position size
|
||||
*/
|
||||
export function equalWeightPositionSize(
|
||||
accountSize: number,
|
||||
numberOfPositions: number,
|
||||
price: number
|
||||
): number {
|
||||
if (numberOfPositions === 0 || price === 0) return 0;
|
||||
|
||||
const positionValue = accountSize / numberOfPositions;
|
||||
return positionValue / price;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate position size based on Average True Range (ATR)
|
||||
*/
|
||||
export function atrBasedPositionSize(
|
||||
accountSize: number,
|
||||
riskPercentage: number,
|
||||
atrValue: number,
|
||||
atrMultiplier: number = 2,
|
||||
price: number
|
||||
): number {
|
||||
if (atrValue === 0 || price === 0) return 0;
|
||||
|
||||
const riskAmount = accountSize * (riskPercentage / 100);
|
||||
const stopDistance = atrValue * atrMultiplier;
|
||||
|
||||
return riskAmount / stopDistance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate position size using Van Tharp's expectancy
|
||||
*/
|
||||
export function expectancyPositionSize(
|
||||
accountSize: number,
|
||||
winRate: number,
|
||||
averageWin: number,
|
||||
averageLoss: number,
|
||||
maxRiskPercentage: number = 2
|
||||
): number {
|
||||
const expectancy = (winRate * averageWin) - ((1 - winRate) * Math.abs(averageLoss));
|
||||
|
||||
if (expectancy <= 0) return 0;
|
||||
|
||||
// Scale position size based on expectancy
|
||||
const expectancyRatio = expectancy / Math.abs(averageLoss);
|
||||
const riskPercentage = Math.min(expectancyRatio * 0.5, maxRiskPercentage);
|
||||
|
||||
return accountSize * (riskPercentage / 100);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate optimal position size using Monte Carlo simulation
|
||||
*/
|
||||
export function monteCarloPositionSize(
|
||||
accountSize: number,
|
||||
historicalReturns: number[],
|
||||
simulations: number = 1000,
|
||||
confidenceLevel: number = 0.95
|
||||
): number {
|
||||
if (historicalReturns.length === 0) return 0;
|
||||
|
||||
const outcomes: number[] = [];
|
||||
|
||||
for (let i = 0; i < simulations; i++) {
|
||||
let portfolioValue = accountSize;
|
||||
|
||||
// Simulate a series of trades
|
||||
for (let j = 0; j < 252; j++) { // One year of trading days
|
||||
const randomReturn = historicalReturns[Math.floor(Math.random() * historicalReturns.length)];
|
||||
portfolioValue *= (1 + randomReturn);
|
||||
}
|
||||
|
||||
outcomes.push(portfolioValue);
|
||||
}
|
||||
|
||||
outcomes.sort((a, b) => a - b);
|
||||
const worstCaseIndex = Math.floor((1 - confidenceLevel) * outcomes.length);
|
||||
const worstCaseValue = outcomes[worstCaseIndex];
|
||||
|
||||
// Calculate safe position size based on worst-case scenario
|
||||
const maxLoss = accountSize - worstCaseValue;
|
||||
const safePositionRatio = Math.min(0.1, accountSize / (maxLoss * 10));
|
||||
|
||||
return accountSize * safePositionRatio;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate position size based on Sharpe ratio optimization
|
||||
*/
|
||||
export function sharpeOptimizedPositionSize(
|
||||
accountSize: number,
|
||||
expectedReturn: number,
|
||||
volatility: number,
|
||||
riskFreeRate: number = 0.02,
|
||||
maxLeverage: number = 3
|
||||
): number {
|
||||
if (volatility === 0) return 0;
|
||||
|
||||
const excessReturn = expectedReturn - riskFreeRate;
|
||||
const sharpeRatio = excessReturn / volatility;
|
||||
|
||||
// Optimal leverage based on Sharpe ratio
|
||||
const optimalLeverage = Math.min(sharpeRatio / volatility, maxLeverage);
|
||||
|
||||
return accountSize * Math.max(0, optimalLeverage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate position size with correlation adjustment
|
||||
*/
|
||||
export function correlationAdjustedPositionSize(
|
||||
basePositionSize: number,
|
||||
existingPositions: Array<{ size: number; correlation: number }>,
|
||||
maxCorrelationRisk: number = 0.3
|
||||
): number {
|
||||
if (existingPositions.length === 0) return basePositionSize;
|
||||
|
||||
// Calculate total correlation risk
|
||||
const totalCorrelationRisk = existingPositions.reduce((total, position) => {
|
||||
return total + (position.size * Math.abs(position.correlation));
|
||||
}, 0);
|
||||
|
||||
// Adjust position size based on correlation risk
|
||||
const correlationAdjustment = Math.max(0, 1 - (totalCorrelationRisk / maxCorrelationRisk));
|
||||
|
||||
return basePositionSize * correlationAdjustment;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate portfolio heat (total risk across all positions)
|
||||
*/
|
||||
export function calculatePortfolioHeat(
|
||||
positions: Array<{ value: number; risk: number }>,
|
||||
accountSize: number
|
||||
): number {
|
||||
const totalRisk = positions.reduce((sum, position) => sum + position.risk, 0);
|
||||
return (totalRisk / accountSize) * 100;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dynamic position sizing based on market conditions
|
||||
*/
|
||||
export function dynamicPositionSize(
|
||||
basePositionSize: number,
|
||||
marketVolatility: number,
|
||||
normalVolatility: number,
|
||||
drawdownLevel: number,
|
||||
maxDrawdownThreshold: number = 0.1
|
||||
): number {
|
||||
// Volatility adjustment
|
||||
const volatilityAdjustment = normalVolatility / Math.max(marketVolatility, 0.01);
|
||||
|
||||
// Drawdown adjustment
|
||||
const drawdownAdjustment = Math.max(0.5, 1 - (drawdownLevel / maxDrawdownThreshold));
|
||||
|
||||
return basePositionSize * volatilityAdjustment * drawdownAdjustment;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate maximum position size based on liquidity
|
||||
*/
|
||||
export function liquidityConstrainedPositionSize(
|
||||
desiredPositionSize: number,
|
||||
averageDailyVolume: number,
|
||||
maxVolumePercentage: number = 0.05,
|
||||
price: number
|
||||
): number {
|
||||
const maxShares = (averageDailyVolume * maxVolumePercentage);
|
||||
const maxPositionValue = maxShares * price;
|
||||
const desiredPositionValue = desiredPositionSize * price;
|
||||
|
||||
return Math.min(desiredPositionSize, maxPositionValue / price);
|
||||
}
|
||||
|
||||
/**
|
||||
* Multi-timeframe position sizing
|
||||
*/
|
||||
export function multiTimeframePositionSize(
|
||||
accountSize: number,
|
||||
shortTermSignal: number, // -1 to 1
|
||||
mediumTermSignal: number, // -1 to 1
|
||||
longTermSignal: number, // -1 to 1
|
||||
baseRiskPercentage: number = 1
|
||||
): number {
|
||||
// Weight the signals (long-term gets higher weight)
|
||||
const weightedSignal = (
|
||||
shortTermSignal * 0.2 +
|
||||
mediumTermSignal * 0.3 +
|
||||
longTermSignal * 0.5
|
||||
);
|
||||
|
||||
// Adjust risk based on signal strength
|
||||
const adjustedRisk = baseRiskPercentage * Math.abs(weightedSignal);
|
||||
|
||||
return accountSize * (adjustedRisk / 100);
|
||||
}
|
||||
|
||||
/**
|
||||
* Risk parity position sizing
|
||||
*/
|
||||
export function riskParityPositionSize(
|
||||
assets: Array<{ volatility: number; price: number }>,
|
||||
targetRisk: number,
|
||||
accountSize: number
|
||||
): number[] {
|
||||
const totalInverseVol = assets.reduce((sum, asset) => sum + (1 / asset.volatility), 0);
|
||||
|
||||
return assets.map(asset => {
|
||||
const weight = (1 / asset.volatility) / totalInverseVol;
|
||||
const positionValue = accountSize * weight;
|
||||
return positionValue / asset.price;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate position size against risk limits
|
||||
*/
|
||||
export function validatePositionSize(
|
||||
positionSize: number,
|
||||
price: number,
|
||||
accountSize: number,
|
||||
maxPositionPercentage: number = 10,
|
||||
maxLeverage: number = 1
|
||||
): { isValid: boolean; adjustedSize: number; violations: string[] } {
|
||||
const violations: string[] = [];
|
||||
let adjustedSize = positionSize;
|
||||
|
||||
// Check maximum position percentage
|
||||
const positionValue = positionSize * price;
|
||||
const positionPercentage = (positionValue / accountSize) * 100;
|
||||
|
||||
if (positionPercentage > maxPositionPercentage) {
|
||||
violations.push(`Position exceeds maximum ${maxPositionPercentage}% of account`);
|
||||
adjustedSize = (accountSize * maxPositionPercentage / 100) / price;
|
||||
}
|
||||
|
||||
// Check leverage limits
|
||||
const leverage = positionValue / accountSize;
|
||||
if (leverage > maxLeverage) {
|
||||
violations.push(`Position exceeds maximum leverage of ${maxLeverage}x`);
|
||||
adjustedSize = Math.min(adjustedSize, (accountSize * maxLeverage) / price);
|
||||
}
|
||||
|
||||
// Check minimum position size
|
||||
if (adjustedSize < 1 && adjustedSize > 0) {
|
||||
violations.push('Position size too small (less than 1 share)');
|
||||
adjustedSize = 0;
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: violations.length === 0,
|
||||
adjustedSize: Math.max(0, adjustedSize),
|
||||
violations
|
||||
};
|
||||
}
|
||||
365
libs/utils/src/calculations/risk-metrics.ts
Normal file
365
libs/utils/src/calculations/risk-metrics.ts
Normal file
|
|
@ -0,0 +1,365 @@
|
|||
/**
|
||||
* Risk Metrics and Analysis
|
||||
* Comprehensive risk measurement tools for portfolio and trading analysis
|
||||
*/
|
||||
|
||||
import { RiskMetrics } from './index';
|
||||
|
||||
/**
|
||||
* Calculate Value at Risk (VaR) using historical simulation
|
||||
*/
|
||||
export function valueAtRisk(returns: number[], confidenceLevel: number = 0.95): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
const sortedReturns = [...returns].sort((a, b) => a - b);
|
||||
const index = Math.floor((1 - confidenceLevel) * sortedReturns.length);
|
||||
|
||||
return sortedReturns[index] || 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Conditional Value at Risk (CVaR/Expected Shortfall)
|
||||
*/
|
||||
export function conditionalValueAtRisk(returns: number[], confidenceLevel: number = 0.95): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
const sortedReturns = [...returns].sort((a, b) => a - b);
|
||||
const cutoffIndex = Math.floor((1 - confidenceLevel) * sortedReturns.length);
|
||||
|
||||
if (cutoffIndex === 0) return sortedReturns[0];
|
||||
|
||||
const tailReturns = sortedReturns.slice(0, cutoffIndex);
|
||||
return tailReturns.reduce((sum, ret) => sum + ret, 0) / tailReturns.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate parametric VaR using normal distribution
|
||||
*/
|
||||
export function parametricVaR(
|
||||
returns: number[],
|
||||
confidenceLevel: number = 0.95,
|
||||
portfolioValue: number = 1
|
||||
): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
|
||||
const stdDev = Math.sqrt(variance);
|
||||
|
||||
// Z-score for confidence level (normal distribution)
|
||||
const zScore = getZScore(confidenceLevel);
|
||||
|
||||
return portfolioValue * (mean - zScore * stdDev);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate maximum drawdown
|
||||
*/
|
||||
export function maxDrawdown(equityCurve: number[]): number {
|
||||
if (equityCurve.length < 2) return 0;
|
||||
|
||||
let maxDD = 0;
|
||||
let peak = equityCurve[0];
|
||||
|
||||
for (let i = 1; i < equityCurve.length; i++) {
|
||||
if (equityCurve[i] > peak) {
|
||||
peak = equityCurve[i];
|
||||
} else {
|
||||
const drawdown = (peak - equityCurve[i]) / peak;
|
||||
maxDD = Math.max(maxDD, drawdown);
|
||||
}
|
||||
}
|
||||
|
||||
return maxDD;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate downside deviation
|
||||
*/
|
||||
export function downsideDeviation(returns: number[], targetReturn: number = 0): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
const downsideReturns = returns.filter(ret => ret < targetReturn);
|
||||
|
||||
if (downsideReturns.length === 0) return 0;
|
||||
|
||||
const sumSquaredDownside = downsideReturns.reduce(
|
||||
(sum, ret) => sum + Math.pow(ret - targetReturn, 2),
|
||||
0
|
||||
);
|
||||
|
||||
return Math.sqrt(sumSquaredDownside / returns.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Sharpe ratio
|
||||
*/
|
||||
export function sharpeRatio(returns: number[], riskFreeRate: number = 0): number {
|
||||
if (returns.length < 2) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
|
||||
const stdDev = Math.sqrt(variance);
|
||||
|
||||
if (stdDev === 0) return 0;
|
||||
|
||||
return (mean - riskFreeRate) / stdDev;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Sortino ratio
|
||||
*/
|
||||
export function sortinoRatio(returns: number[], targetReturn: number = 0): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const downsideDev = downsideDeviation(returns, targetReturn);
|
||||
|
||||
if (downsideDev === 0) return 0;
|
||||
|
||||
return (mean - targetReturn) / downsideDev;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Calmar ratio
|
||||
*/
|
||||
export function calmarRatio(returns: number[], equityCurve: number[]): number {
|
||||
if (returns.length === 0 || equityCurve.length === 0) return 0;
|
||||
|
||||
const annualizedReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length * 252; // Assuming daily returns
|
||||
const maxDD = maxDrawdown(equityCurve);
|
||||
|
||||
if (maxDD === 0) return 0;
|
||||
|
||||
return annualizedReturn / maxDD;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Information Ratio
|
||||
*/
|
||||
export function informationRatio(portfolioReturns: number[], benchmarkReturns: number[]): number {
|
||||
if (portfolioReturns.length !== benchmarkReturns.length || portfolioReturns.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const activeReturns = portfolioReturns.map((ret, i) => ret - benchmarkReturns[i]);
|
||||
const mean = activeReturns.reduce((sum, ret) => sum + ret, 0) / activeReturns.length;
|
||||
const trackingError = Math.sqrt(
|
||||
activeReturns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (activeReturns.length - 1)
|
||||
);
|
||||
|
||||
if (trackingError === 0) return 0;
|
||||
|
||||
return mean / trackingError;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Beta (systematic risk)
|
||||
*/
|
||||
export function beta(portfolioReturns: number[], marketReturns: number[]): number {
|
||||
if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < 2) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
|
||||
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length;
|
||||
|
||||
let covariance = 0;
|
||||
let marketVariance = 0;
|
||||
|
||||
for (let i = 0; i < portfolioReturns.length; i++) {
|
||||
const portfolioDiff = portfolioReturns[i] - portfolioMean;
|
||||
const marketDiff = marketReturns[i] - marketMean;
|
||||
|
||||
covariance += portfolioDiff * marketDiff;
|
||||
marketVariance += marketDiff * marketDiff;
|
||||
}
|
||||
|
||||
covariance /= (portfolioReturns.length - 1);
|
||||
marketVariance /= (marketReturns.length - 1);
|
||||
|
||||
if (marketVariance === 0) return 0;
|
||||
|
||||
return covariance / marketVariance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Alpha (excess return over expected return based on beta)
|
||||
*/
|
||||
export function alpha(
|
||||
portfolioReturns: number[],
|
||||
marketReturns: number[],
|
||||
riskFreeRate: number = 0
|
||||
): number {
|
||||
if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
|
||||
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length;
|
||||
const portfolioBeta = beta(portfolioReturns, marketReturns);
|
||||
|
||||
return portfolioMean - (riskFreeRate + portfolioBeta * (marketMean - riskFreeRate));
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Treynor ratio
|
||||
*/
|
||||
export function treynorRatio(
|
||||
portfolioReturns: number[],
|
||||
marketReturns: number[],
|
||||
riskFreeRate: number = 0
|
||||
): number {
|
||||
if (portfolioReturns.length === 0) return 0;
|
||||
|
||||
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
|
||||
const portfolioBeta = beta(portfolioReturns, marketReturns);
|
||||
|
||||
if (portfolioBeta === 0) return 0;
|
||||
|
||||
return (portfolioMean - riskFreeRate) / portfolioBeta;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate tracking error
|
||||
*/
|
||||
export function trackingError(portfolioReturns: number[], benchmarkReturns: number[]): number {
|
||||
if (portfolioReturns.length !== benchmarkReturns.length || portfolioReturns.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const activeReturns = portfolioReturns.map((ret, i) => ret - benchmarkReturns[i]);
|
||||
const mean = activeReturns.reduce((sum, ret) => sum + ret, 0) / activeReturns.length;
|
||||
|
||||
const variance = activeReturns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (activeReturns.length - 1);
|
||||
|
||||
return Math.sqrt(variance);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate volatility (standard deviation of returns)
|
||||
*/
|
||||
export function volatility(returns: number[]): number {
|
||||
if (returns.length < 2) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
|
||||
|
||||
return Math.sqrt(variance);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate annualized volatility
|
||||
*/
|
||||
export function annualizedVolatility(returns: number[], periodsPerYear: number = 252): number {
|
||||
return volatility(returns) * Math.sqrt(periodsPerYear);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate skewness (measure of asymmetry)
|
||||
*/
|
||||
export function skewness(returns: number[]): number {
|
||||
if (returns.length < 3) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
|
||||
const stdDev = Math.sqrt(variance);
|
||||
|
||||
if (stdDev === 0) return 0;
|
||||
|
||||
const skew = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 3), 0) / returns.length;
|
||||
|
||||
return skew;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate kurtosis (measure of tail heaviness)
|
||||
*/
|
||||
export function kurtosis(returns: number[]): number {
|
||||
if (returns.length < 4) return 0;
|
||||
|
||||
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
|
||||
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
|
||||
const stdDev = Math.sqrt(variance);
|
||||
|
||||
if (stdDev === 0) return 0;
|
||||
|
||||
const kurt = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 4), 0) / returns.length;
|
||||
|
||||
return kurt - 3; // Excess kurtosis (subtract 3 for normal distribution baseline)
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate comprehensive risk metrics
|
||||
*/
|
||||
export function calculateRiskMetrics(
|
||||
returns: number[],
|
||||
equityCurve: number[],
|
||||
marketReturns?: number[],
|
||||
riskFreeRate: number = 0
|
||||
): RiskMetrics {
|
||||
return {
|
||||
var95: valueAtRisk(returns, 0.95),
|
||||
var99: valueAtRisk(returns, 0.99),
|
||||
cvar95: conditionalValueAtRisk(returns, 0.95),
|
||||
maxDrawdown: maxDrawdown(equityCurve),
|
||||
volatility: volatility(returns),
|
||||
downside_deviation: downsideDeviation(returns),
|
||||
calmar_ratio: calmarRatio(returns, equityCurve),
|
||||
sortino_ratio: sortinoRatio(returns)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to get Z-score for confidence level
|
||||
*/
|
||||
function getZScore(confidenceLevel: number): number {
|
||||
// Approximate Z-scores for common confidence levels
|
||||
const zScores: { [key: string]: number } = {
|
||||
'0.90': 1.282,
|
||||
'0.95': 1.645,
|
||||
'0.975': 1.960,
|
||||
'0.99': 2.326,
|
||||
'0.995': 2.576
|
||||
};
|
||||
|
||||
const key = confidenceLevel.toString();
|
||||
return zScores[key] || 1.645; // Default to 95% confidence
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate portfolio risk contribution
|
||||
*/
|
||||
export function riskContribution(
|
||||
weights: number[],
|
||||
covarianceMatrix: number[][],
|
||||
portfolioVolatility: number
|
||||
): number[] {
|
||||
const n = weights.length;
|
||||
const contributions: number[] = [];
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
let marginalContribution = 0;
|
||||
|
||||
for (let j = 0; j < n; j++) {
|
||||
marginalContribution += weights[j] * covarianceMatrix[i][j];
|
||||
}
|
||||
|
||||
const contribution = (weights[i] * marginalContribution) / Math.pow(portfolioVolatility, 2);
|
||||
contributions.push(contribution);
|
||||
}
|
||||
|
||||
return contributions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate risk-adjusted return (RAR)
|
||||
*/
|
||||
export function riskAdjustedReturn(
|
||||
portfolioReturn: number,
|
||||
portfolioRisk: number,
|
||||
riskFreeRate: number = 0
|
||||
): number {
|
||||
if (portfolioRisk === 0) return 0;
|
||||
return (portfolioReturn - riskFreeRate) / portfolioRisk;
|
||||
}
|
||||
470
libs/utils/src/calculations/technical-indicators.ts
Normal file
470
libs/utils/src/calculations/technical-indicators.ts
Normal file
|
|
@ -0,0 +1,470 @@
|
|||
/**
|
||||
* Technical Indicators
|
||||
* Comprehensive set of technical analysis indicators
|
||||
*/
|
||||
|
||||
import { OHLCVData } from './index';
|
||||
|
||||
/**
|
||||
* Simple Moving Average
|
||||
*/
|
||||
export function sma(values: number[], period: number): number[] {
|
||||
if (period > values.length) return [];
|
||||
|
||||
const result: number[] = [];
|
||||
|
||||
for (let i = period - 1; i < values.length; i++) {
|
||||
const sum = values.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0);
|
||||
result.push(sum / period);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Exponential Moving Average
|
||||
*/
|
||||
export function ema(values: number[], period: number): number[] {
|
||||
if (period > values.length) return [];
|
||||
|
||||
const result: number[] = [];
|
||||
const multiplier = 2 / (period + 1);
|
||||
|
||||
// Start with SMA for first value
|
||||
let ema = values.slice(0, period).reduce((a, b) => a + b, 0) / period;
|
||||
result.push(ema);
|
||||
|
||||
for (let i = period; i < values.length; i++) {
|
||||
ema = (values[i] * multiplier) + (ema * (1 - multiplier));
|
||||
result.push(ema);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Relative Strength Index (RSI)
|
||||
*/
|
||||
export function rsi(prices: number[], period: number = 14): number[] {
|
||||
if (period >= prices.length) return [];
|
||||
|
||||
const gains: number[] = [];
|
||||
const losses: number[] = [];
|
||||
|
||||
// Calculate gains and losses
|
||||
for (let i = 1; i < prices.length; i++) {
|
||||
const change = prices[i] - prices[i - 1];
|
||||
gains.push(change > 0 ? change : 0);
|
||||
losses.push(change < 0 ? Math.abs(change) : 0);
|
||||
}
|
||||
|
||||
const result: number[] = [];
|
||||
|
||||
// Calculate RSI
|
||||
for (let i = period - 1; i < gains.length; i++) {
|
||||
const avgGain = gains.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0) / period;
|
||||
const avgLoss = losses.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0) / period;
|
||||
|
||||
if (avgLoss === 0) {
|
||||
result.push(100);
|
||||
} else {
|
||||
const rs = avgGain / avgLoss;
|
||||
const rsiValue = 100 - (100 / (1 + rs));
|
||||
result.push(rsiValue);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Moving Average Convergence Divergence (MACD)
|
||||
*/
|
||||
export function macd(
|
||||
prices: number[],
|
||||
fastPeriod: number = 12,
|
||||
slowPeriod: number = 26,
|
||||
signalPeriod: number = 9
|
||||
): { macd: number[], signal: number[], histogram: number[] } {
|
||||
const fastEMA = ema(prices, fastPeriod);
|
||||
const slowEMA = ema(prices, slowPeriod);
|
||||
|
||||
const macdLine: number[] = [];
|
||||
const startIndex = slowPeriod - fastPeriod;
|
||||
|
||||
for (let i = 0; i < fastEMA.length - startIndex; i++) {
|
||||
macdLine.push(fastEMA[i + startIndex] - slowEMA[i]);
|
||||
}
|
||||
|
||||
const signalLine = ema(macdLine, signalPeriod);
|
||||
const histogram: number[] = [];
|
||||
|
||||
const signalStartIndex = signalPeriod - 1;
|
||||
for (let i = 0; i < signalLine.length; i++) {
|
||||
histogram.push(macdLine[i + signalStartIndex] - signalLine[i]);
|
||||
}
|
||||
|
||||
return {
|
||||
macd: macdLine,
|
||||
signal: signalLine,
|
||||
histogram: histogram
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Bollinger Bands
|
||||
*/
|
||||
export function bollingerBands(
|
||||
prices: number[],
|
||||
period: number = 20,
|
||||
standardDeviations: number = 2
|
||||
): { upper: number[], middle: number[], lower: number[] } {
|
||||
const middle = sma(prices, period);
|
||||
const upper: number[] = [];
|
||||
const lower: number[] = [];
|
||||
|
||||
for (let i = period - 1; i < prices.length; i++) {
|
||||
const slice = prices.slice(i - period + 1, i + 1);
|
||||
const mean = slice.reduce((a, b) => a + b, 0) / period;
|
||||
const variance = slice.reduce((a, b) => a + Math.pow(b - mean, 2), 0) / period;
|
||||
const stdDev = Math.sqrt(variance);
|
||||
|
||||
const middleValue = middle[i - period + 1];
|
||||
upper.push(middleValue + (standardDeviations * stdDev));
|
||||
lower.push(middleValue - (standardDeviations * stdDev));
|
||||
}
|
||||
|
||||
return { upper, middle, lower };
|
||||
}
|
||||
|
||||
/**
|
||||
* Average True Range (ATR)
|
||||
*/
|
||||
export function atr(ohlcv: OHLCVData[], period: number = 14): number[] {
|
||||
if (period >= ohlcv.length) return [];
|
||||
|
||||
const trueRanges: number[] = [];
|
||||
|
||||
for (let i = 1; i < ohlcv.length; i++) {
|
||||
const high = ohlcv[i].high;
|
||||
const low = ohlcv[i].low;
|
||||
const prevClose = ohlcv[i - 1].close;
|
||||
|
||||
const tr = Math.max(
|
||||
high - low,
|
||||
Math.abs(high - prevClose),
|
||||
Math.abs(low - prevClose)
|
||||
);
|
||||
|
||||
trueRanges.push(tr);
|
||||
}
|
||||
|
||||
return sma(trueRanges, period);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stochastic Oscillator
|
||||
*/
|
||||
export function stochastic(
|
||||
ohlcv: OHLCVData[],
|
||||
kPeriod: number = 14,
|
||||
dPeriod: number = 3
|
||||
): { k: number[], d: number[] } {
|
||||
if (kPeriod >= ohlcv.length) return { k: [], d: [] };
|
||||
|
||||
const kValues: number[] = [];
|
||||
|
||||
for (let i = kPeriod - 1; i < ohlcv.length; i++) {
|
||||
const slice = ohlcv.slice(i - kPeriod + 1, i + 1);
|
||||
const highest = Math.max(...slice.map(d => d.high));
|
||||
const lowest = Math.min(...slice.map(d => d.low));
|
||||
const currentClose = ohlcv[i].close;
|
||||
|
||||
if (highest === lowest) {
|
||||
kValues.push(50); // Avoid division by zero
|
||||
} else {
|
||||
const kValue = ((currentClose - lowest) / (highest - lowest)) * 100;
|
||||
kValues.push(kValue);
|
||||
}
|
||||
}
|
||||
|
||||
const dValues = sma(kValues, dPeriod);
|
||||
|
||||
return { k: kValues, d: dValues };
|
||||
}
|
||||
|
||||
/**
|
||||
* Williams %R
|
||||
*/
|
||||
export function williamsR(ohlcv: OHLCVData[], period: number = 14): number[] {
|
||||
if (period >= ohlcv.length) return [];
|
||||
|
||||
const result: number[] = [];
|
||||
|
||||
for (let i = period - 1; i < ohlcv.length; i++) {
|
||||
const slice = ohlcv.slice(i - period + 1, i + 1);
|
||||
const highest = Math.max(...slice.map(d => d.high));
|
||||
const lowest = Math.min(...slice.map(d => d.low));
|
||||
const currentClose = ohlcv[i].close;
|
||||
|
||||
if (highest === lowest) {
|
||||
result.push(-50); // Avoid division by zero
|
||||
} else {
|
||||
const wrValue = ((highest - currentClose) / (highest - lowest)) * -100;
|
||||
result.push(wrValue);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Commodity Channel Index (CCI)
|
||||
*/
|
||||
export function cci(ohlcv: OHLCVData[], period: number = 20): number[] {
|
||||
if (period >= ohlcv.length) return [];
|
||||
|
||||
const typicalPrices = ohlcv.map(d => (d.high + d.low + d.close) / 3);
|
||||
const smaTP = sma(typicalPrices, period);
|
||||
const result: number[] = [];
|
||||
|
||||
for (let i = 0; i < smaTP.length; i++) {
|
||||
const slice = typicalPrices.slice(i, i + period);
|
||||
const mean = smaTP[i];
|
||||
const meanDeviation = slice.reduce((sum, value) => sum + Math.abs(value - mean), 0) / period;
|
||||
|
||||
if (meanDeviation === 0) {
|
||||
result.push(0);
|
||||
} else {
|
||||
const cciValue = (typicalPrices[i + period - 1] - mean) / (0.015 * meanDeviation);
|
||||
result.push(cciValue);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Momentum
|
||||
*/
|
||||
export function momentum(prices: number[], period: number = 10): number[] {
|
||||
if (period >= prices.length) return [];
|
||||
|
||||
const result: number[] = [];
|
||||
|
||||
for (let i = period; i < prices.length; i++) {
|
||||
result.push(prices[i] - prices[i - period]);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rate of Change (ROC)
|
||||
*/
|
||||
export function rateOfChange(prices: number[], period: number = 10): number[] {
|
||||
if (period >= prices.length) return [];
|
||||
|
||||
const result: number[] = [];
|
||||
|
||||
for (let i = period; i < prices.length; i++) {
|
||||
if (prices[i - period] === 0) {
|
||||
result.push(0);
|
||||
} else {
|
||||
const rocValue = ((prices[i] - prices[i - period]) / prices[i - period]) * 100;
|
||||
result.push(rocValue);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Money Flow Index (MFI)
|
||||
*/
|
||||
export function moneyFlowIndex(ohlcv: OHLCVData[], period: number = 14): number[] {
|
||||
if (period >= ohlcv.length) return [];
|
||||
|
||||
const typicalPrices = ohlcv.map(d => (d.high + d.low + d.close) / 3);
|
||||
const rawMoneyFlows = ohlcv.map((d, i) => typicalPrices[i] * d.volume);
|
||||
|
||||
const result: number[] = [];
|
||||
|
||||
for (let i = 1; i < ohlcv.length - period + 1; i++) {
|
||||
let positiveFlow = 0;
|
||||
let negativeFlow = 0;
|
||||
|
||||
for (let j = 0; j < period; j++) {
|
||||
const currentIndex = i + j;
|
||||
if (typicalPrices[currentIndex] > typicalPrices[currentIndex - 1]) {
|
||||
positiveFlow += rawMoneyFlows[currentIndex];
|
||||
} else if (typicalPrices[currentIndex] < typicalPrices[currentIndex - 1]) {
|
||||
negativeFlow += rawMoneyFlows[currentIndex];
|
||||
}
|
||||
}
|
||||
|
||||
if (negativeFlow === 0) {
|
||||
result.push(100);
|
||||
} else {
|
||||
const moneyRatio = positiveFlow / negativeFlow;
|
||||
const mfiValue = 100 - (100 / (1 + moneyRatio));
|
||||
result.push(mfiValue);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* On Balance Volume (OBV)
|
||||
*/
|
||||
export function onBalanceVolume(ohlcv: OHLCVData[]): number[] {
|
||||
if (ohlcv.length === 0) return [];
|
||||
|
||||
const result: number[] = [ohlcv[0].volume];
|
||||
|
||||
for (let i = 1; i < ohlcv.length; i++) {
|
||||
let obvValue = result[i - 1];
|
||||
|
||||
if (ohlcv[i].close > ohlcv[i - 1].close) {
|
||||
obvValue += ohlcv[i].volume;
|
||||
} else if (ohlcv[i].close < ohlcv[i - 1].close) {
|
||||
obvValue -= ohlcv[i].volume;
|
||||
}
|
||||
|
||||
result.push(obvValue);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Accumulation/Distribution Line
|
||||
*/
|
||||
export function accumulationDistribution(ohlcv: OHLCVData[]): number[] {
|
||||
if (ohlcv.length === 0) return [];
|
||||
|
||||
const result: number[] = [];
|
||||
let adLine = 0;
|
||||
|
||||
for (const candle of ohlcv) {
|
||||
if (candle.high === candle.low) {
|
||||
// Avoid division by zero
|
||||
result.push(adLine);
|
||||
continue;
|
||||
}
|
||||
|
||||
const moneyFlowMultiplier = ((candle.close - candle.low) - (candle.high - candle.close)) / (candle.high - candle.low);
|
||||
const moneyFlowVolume = moneyFlowMultiplier * candle.volume;
|
||||
adLine += moneyFlowVolume;
|
||||
result.push(adLine);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Chaikin Money Flow (CMF)
|
||||
*/
|
||||
export function chaikinMoneyFlow(ohlcv: OHLCVData[], period: number = 20): number[] {
|
||||
if (period >= ohlcv.length) return [];
|
||||
|
||||
const adValues: number[] = [];
|
||||
|
||||
for (const candle of ohlcv) {
|
||||
if (candle.high === candle.low) {
|
||||
adValues.push(0);
|
||||
} else {
|
||||
const moneyFlowMultiplier = ((candle.close - candle.low) - (candle.high - candle.close)) / (candle.high - candle.low);
|
||||
const moneyFlowVolume = moneyFlowMultiplier * candle.volume;
|
||||
adValues.push(moneyFlowVolume);
|
||||
}
|
||||
}
|
||||
|
||||
const result: number[] = [];
|
||||
|
||||
for (let i = period - 1; i < ohlcv.length; i++) {
|
||||
const sumAD = adValues.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0);
|
||||
const sumVolume = ohlcv.slice(i - period + 1, i + 1).reduce((a, b) => a + b.volume, 0);
|
||||
|
||||
if (sumVolume === 0) {
|
||||
result.push(0);
|
||||
} else {
|
||||
result.push(sumAD / sumVolume);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parabolic SAR
|
||||
*/
|
||||
export function parabolicSAR(
|
||||
ohlcv: OHLCVData[],
|
||||
step: number = 0.02,
|
||||
maximum: number = 0.2
|
||||
): number[] {
|
||||
if (ohlcv.length < 2) return [];
|
||||
|
||||
const result: number[] = [];
|
||||
let isUptrend = ohlcv[1].close > ohlcv[0].close;
|
||||
let sar = isUptrend ? ohlcv[0].low : ohlcv[0].high;
|
||||
let ep = isUptrend ? ohlcv[1].high : ohlcv[1].low;
|
||||
let af = step;
|
||||
|
||||
result.push(sar);
|
||||
|
||||
for (let i = 1; i < ohlcv.length; i++) {
|
||||
const currentHigh = ohlcv[i].high;
|
||||
const currentLow = ohlcv[i].low;
|
||||
const currentClose = ohlcv[i].close;
|
||||
|
||||
// Calculate new SAR
|
||||
sar = sar + af * (ep - sar);
|
||||
|
||||
if (isUptrend) {
|
||||
// Uptrend logic
|
||||
if (currentLow <= sar) {
|
||||
// Trend reversal
|
||||
isUptrend = false;
|
||||
sar = ep;
|
||||
ep = currentLow;
|
||||
af = step;
|
||||
} else {
|
||||
// Continue uptrend
|
||||
if (currentHigh > ep) {
|
||||
ep = currentHigh;
|
||||
af = Math.min(af + step, maximum);
|
||||
}
|
||||
// Ensure SAR doesn't go above previous two lows
|
||||
if (i >= 2) {
|
||||
sar = Math.min(sar, ohlcv[i - 1].low, ohlcv[i - 2].low);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Downtrend logic
|
||||
if (currentHigh >= sar) {
|
||||
// Trend reversal
|
||||
isUptrend = true;
|
||||
sar = ep;
|
||||
ep = currentHigh;
|
||||
af = step;
|
||||
} else {
|
||||
// Continue downtrend
|
||||
if (currentLow < ep) {
|
||||
ep = currentLow;
|
||||
af = Math.min(af + step, maximum);
|
||||
}
|
||||
// Ensure SAR doesn't go below previous two highs
|
||||
if (i >= 2) {
|
||||
sar = Math.max(sar, ohlcv[i - 1].high, ohlcv[i - 2].high);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.push(sar);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue