added more functions
This commit is contained in:
parent
a53d8d13ca
commit
a1c82ae0b8
7 changed files with 648 additions and 177 deletions
|
|
@ -326,10 +326,9 @@ export function IntradayPatterns(
|
|||
hourlyVolume[hour] = data.volumes.length > 0 ?
|
||||
data.volumes.reduce((sum, vol) => sum + vol, 0) / data.volumes.length : 0;
|
||||
}
|
||||
|
||||
// opening gap and closing drift
|
||||
// opening gap and closing drift
|
||||
const openingGap = ohlcv.length > 1 ?
|
||||
(ohlcv[0].open - ohlcv[0].close) / ohlcv[0].close : 0;
|
||||
(ohlcv[1].open - ohlcv[0].close) / ohlcv[0].close : 0;
|
||||
|
||||
const lastCandle = ohlcv[ohlcv.length - 1];
|
||||
const closingDrift = (lastCandle.close - lastCandle.open) / lastCandle.open;
|
||||
|
|
@ -536,6 +535,136 @@ export function ImplementationShortfall(
|
|||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Amihud Illiquidity Measure (price impact per unit of volume)
|
||||
*/
|
||||
export function amihudIlliquidity(
|
||||
ohlcv: OHLCVData[],
|
||||
lookbackPeriod: number = 252
|
||||
): number {
|
||||
if (ohlcv.length < lookbackPeriod) return 0;
|
||||
|
||||
const recentData = ohlcv.slice(-lookbackPeriod);
|
||||
let illiquiditySum = 0;
|
||||
let validDays = 0;
|
||||
|
||||
for (const candle of recentData) {
|
||||
if (candle.volume > 0) {
|
||||
const dailyReturn = Math.abs((candle.close - candle.open) / candle.open);
|
||||
const dollarVolume = candle.volume * candle.close;
|
||||
|
||||
if (dollarVolume > 0) {
|
||||
illiquiditySum += dailyReturn / dollarVolume;
|
||||
validDays++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return validDays > 0 ? (illiquiditySum / validDays) * 1000000 : 0; // Scale to millions
|
||||
}
|
||||
|
||||
/**
|
||||
* Roll's Spread Estimator (effective spread from serial covariance)
|
||||
*/
|
||||
export function rollSpreadEstimator(prices: number[]): number {
|
||||
if (prices.length < 3) return 0;
|
||||
|
||||
// Calculate price changes
|
||||
const priceChanges: number[] = [];
|
||||
for (let i = 1; i < prices.length; i++) {
|
||||
priceChanges.push(prices[i] - prices[i - 1]);
|
||||
}
|
||||
|
||||
// Calculate serial covariance
|
||||
let covariance = 0;
|
||||
for (let i = 1; i < priceChanges.length; i++) {
|
||||
covariance += priceChanges[i] * priceChanges[i - 1];
|
||||
}
|
||||
covariance /= (priceChanges.length - 1);
|
||||
|
||||
// Roll's estimator: spread = 2 * sqrt(-covariance)
|
||||
const spread = covariance < 0 ? 2 * Math.sqrt(-covariance) : 0;
|
||||
|
||||
return spread;
|
||||
}
|
||||
|
||||
/**
|
||||
* Kyle's Lambda (price impact coefficient)
|
||||
*/
|
||||
export function kyleLambda(
|
||||
priceChanges: number[],
|
||||
orderFlow: number[] // Signed order flow (positive for buys, negative for sells)
|
||||
): number {
|
||||
if (priceChanges.length !== orderFlow.length || priceChanges.length < 2) return 0;
|
||||
|
||||
// Calculate regression: priceChange = lambda * orderFlow + error
|
||||
const n = priceChanges.length;
|
||||
const meanPrice = priceChanges.reduce((sum, p) => sum + p, 0) / n;
|
||||
const meanFlow = orderFlow.reduce((sum, f) => sum + f, 0) / n;
|
||||
|
||||
let numerator = 0;
|
||||
let denominator = 0;
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
const priceDeviation = priceChanges[i] - meanPrice;
|
||||
const flowDeviation = orderFlow[i] - meanFlow;
|
||||
|
||||
numerator += priceDeviation * flowDeviation;
|
||||
denominator += flowDeviation * flowDeviation;
|
||||
}
|
||||
|
||||
return denominator > 0 ? numerator / denominator : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Probability of Informed Trading (PIN) - simplified version
|
||||
*/
|
||||
export function probabilityInformedTrading(
|
||||
buyVolumes: number[],
|
||||
sellVolumes: number[],
|
||||
period: number = 20
|
||||
): number {
|
||||
if (buyVolumes.length !== sellVolumes.length || buyVolumes.length < period) return 0;
|
||||
|
||||
const recentBuys = buyVolumes.slice(-period);
|
||||
const recentSells = sellVolumes.slice(-period);
|
||||
|
||||
let totalImbalance = 0;
|
||||
let totalVolume = 0;
|
||||
|
||||
for (let i = 0; i < period; i++) {
|
||||
const imbalance = Math.abs(recentBuys[i] - recentSells[i]);
|
||||
const volume = recentBuys[i] + recentSells[i];
|
||||
|
||||
totalImbalance += imbalance;
|
||||
totalVolume += volume;
|
||||
}
|
||||
|
||||
// Simplified PIN estimate based on order imbalance
|
||||
return totalVolume > 0 ? totalImbalance / totalVolume : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Herfindahl-Hirschman Index for Volume Concentration
|
||||
*/
|
||||
export function volumeConcentrationHHI(
|
||||
exchanges: Array<{ name: string; volume: number }>
|
||||
): number {
|
||||
if (exchanges.length === 0) return 0;
|
||||
|
||||
const totalVolume = exchanges.reduce((sum, exchange) => sum + exchange.volume, 0);
|
||||
|
||||
if (totalVolume === 0) return 0;
|
||||
|
||||
let hhi = 0;
|
||||
for (const exchange of exchanges) {
|
||||
const marketShare = exchange.volume / totalVolume;
|
||||
hhi += marketShare * marketShare;
|
||||
}
|
||||
|
||||
return hhi * 10000; // Scale to 0-10000 range
|
||||
}
|
||||
|
||||
// Helper functions
|
||||
|
||||
function calculateVolatility(returns: number[]): number {
|
||||
|
|
|
|||
|
|
@ -389,33 +389,188 @@ export function performanceAttribution(
|
|||
}
|
||||
|
||||
/**
|
||||
* Calculate efficient frontier points
|
||||
* Calculate Efficient Frontier points
|
||||
*/
|
||||
export function calculateEfficientFrontier(
|
||||
expectedReturns: number[],
|
||||
covarianceMatrix: number[][],
|
||||
numPoints: number = 100
|
||||
): Array<{ return: number; volatility: number; sharpeRatio: number; weights: number[] }> {
|
||||
returns: number[][], // Array of return series for each asset
|
||||
symbols: string[],
|
||||
riskFreeRate: number = 0.02,
|
||||
numPoints: number = 50
|
||||
): Array<{
|
||||
weights: number[];
|
||||
expectedReturn: number;
|
||||
volatility: number;
|
||||
sharpeRatio: number;
|
||||
}> {
|
||||
if (returns.length !== symbols.length || returns.length < 2) return [];
|
||||
|
||||
const n = returns.length;
|
||||
const results: Array<{ weights: number[]; expectedReturn: number; volatility: number; sharpeRatio: number; }> = [];
|
||||
|
||||
// Calculate expected returns and covariance matrix
|
||||
const expectedReturns = returns.map(assetReturns =>
|
||||
assetReturns.reduce((sum, ret) => sum + ret, 0) / assetReturns.length
|
||||
);
|
||||
|
||||
const covarianceMatrix = calculateCovarianceMatrix(returns);
|
||||
|
||||
// Generate target returns from min to max expected return
|
||||
const minReturn = Math.min(...expectedReturns);
|
||||
const maxReturn = Math.max(...expectedReturns);
|
||||
const returnStep = (maxReturn - minReturn) / (numPoints - 1);
|
||||
|
||||
const frontierPoints: Array<{ return: number; volatility: number; sharpeRatio: number; weights: number[] }> = [];
|
||||
|
||||
for (let i = 0; i < numPoints; i++) {
|
||||
const targetReturn = minReturn + i * returnStep;
|
||||
|
||||
// Simplified optimization for target return
|
||||
// In production, use proper constrained optimization
|
||||
const result = markowitzOptimization(expectedReturns, covarianceMatrix);
|
||||
// Find minimum variance portfolio for target return using quadratic programming (simplified)
|
||||
const weights = findMinimumVarianceWeights(expectedReturns, covarianceMatrix, targetReturn);
|
||||
|
||||
frontierPoints.push({
|
||||
return: targetReturn,
|
||||
volatility: result.volatility,
|
||||
sharpeRatio: result.sharpeRatio,
|
||||
weights: result.weights
|
||||
});
|
||||
if (weights && weights.length === n) {
|
||||
const portfolioReturn = weights.reduce((sum, w, j) => sum + w * expectedReturns[j], 0);
|
||||
const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix);
|
||||
const portfolioVolatility = Math.sqrt(portfolioVariance);
|
||||
const sharpeRatio = portfolioVolatility > 0 ? (portfolioReturn - riskFreeRate) / portfolioVolatility : 0;
|
||||
|
||||
results.push({
|
||||
weights,
|
||||
expectedReturn: portfolioReturn,
|
||||
volatility: portfolioVolatility,
|
||||
sharpeRatio
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return frontierPoints;
|
||||
return results.sort((a, b) => a.volatility - b.volatility);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find Minimum Variance Portfolio
|
||||
*/
|
||||
export function findMinimumVariancePortfolio(
|
||||
returns: number[][],
|
||||
symbols: string[]
|
||||
): PortfolioOptimizationResult | null {
|
||||
if (returns.length !== symbols.length || returns.length < 2) return null;
|
||||
|
||||
const covarianceMatrix = calculateCovarianceMatrix(returns);
|
||||
const n = returns.length;
|
||||
|
||||
// For minimum variance portfolio: w = (Σ^-1 * 1) / (1' * Σ^-1 * 1)
|
||||
// Simplified implementation using equal weights as starting point
|
||||
const weights = new Array(n).fill(1 / n);
|
||||
|
||||
// Iterative optimization (simplified)
|
||||
for (let iter = 0; iter < 100; iter++) {
|
||||
const gradient = calculateVarianceGradient(weights, covarianceMatrix);
|
||||
const stepSize = 0.01;
|
||||
|
||||
// Update weights
|
||||
for (let i = 0; i < n; i++) {
|
||||
weights[i] -= stepSize * gradient[i];
|
||||
}
|
||||
|
||||
// Normalize weights to sum to 1
|
||||
const weightSum = weights.reduce((sum, w) => sum + w, 0);
|
||||
for (let i = 0; i < n; i++) {
|
||||
weights[i] = Math.max(0, weights[i] / weightSum);
|
||||
}
|
||||
}
|
||||
|
||||
const expectedReturns = returns.map(assetReturns =>
|
||||
assetReturns.reduce((sum, ret) => sum + ret, 0) / assetReturns.length
|
||||
);
|
||||
|
||||
const portfolioReturn = weights.reduce((sum, w, i) => sum + w * expectedReturns[i], 0);
|
||||
const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix);
|
||||
const portfolioVolatility = Math.sqrt(portfolioVariance);
|
||||
const sharpeRatio = portfolioVolatility > 0 ? portfolioReturn / portfolioVolatility : 0;
|
||||
|
||||
return {
|
||||
weights,
|
||||
expectedReturn: portfolioReturn,
|
||||
volatility: portfolioVolatility,
|
||||
sharpeRatio,
|
||||
symbols
|
||||
};
|
||||
}
|
||||
|
||||
// Helper functions for portfolio optimization
|
||||
|
||||
function calculateCovarianceMatrix(returns: number[][]): number[][] {
|
||||
const n = returns.length;
|
||||
const matrix: number[][] = [];
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
matrix[i] = [];
|
||||
for (let j = 0; j < n; j++) {
|
||||
matrix[i][j] = calculateCovariance(returns[i], returns[j]);
|
||||
}
|
||||
}
|
||||
|
||||
return matrix;
|
||||
}
|
||||
|
||||
function calculateCovariance(x: number[], y: number[]): number {
|
||||
if (x.length !== y.length || x.length < 2) return 0;
|
||||
|
||||
const n = x.length;
|
||||
const meanX = x.reduce((sum, val) => sum + val, 0) / n;
|
||||
const meanY = y.reduce((sum, val) => sum + val, 0) / n;
|
||||
|
||||
return x.reduce((sum, val, i) => sum + (val - meanX) * (y[i] - meanY), 0) / (n - 1);
|
||||
}
|
||||
|
||||
// calculatePortfolioVariance is already exported above
|
||||
|
||||
function calculateVarianceGradient(weights: number[], covarianceMatrix: number[][]): number[] {
|
||||
const n = weights.length;
|
||||
const gradient: number[] = [];
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
let grad = 0;
|
||||
for (let j = 0; j < n; j++) {
|
||||
grad += 2 * weights[j] * covarianceMatrix[i][j];
|
||||
}
|
||||
gradient[i] = grad;
|
||||
}
|
||||
|
||||
return gradient;
|
||||
}
|
||||
|
||||
function findMinimumVarianceWeights(
|
||||
expectedReturns: number[],
|
||||
covarianceMatrix: number[][],
|
||||
targetReturn: number
|
||||
): number[] | null {
|
||||
const n = expectedReturns.length;
|
||||
|
||||
// Simplified implementation - in practice would use quadratic programming solver
|
||||
// Start with equal weights and adjust
|
||||
const weights = new Array(n).fill(1 / n);
|
||||
|
||||
// Iterative adjustment to meet target return constraint
|
||||
for (let iter = 0; iter < 50; iter++) {
|
||||
const currentReturn = weights.reduce((sum, w, i) => sum + w * expectedReturns[i], 0);
|
||||
const returnDiff = targetReturn - currentReturn;
|
||||
|
||||
if (Math.abs(returnDiff) < 0.001) break;
|
||||
|
||||
// Adjust weights proportionally to expected returns
|
||||
const totalExpectedReturn = expectedReturns.reduce((sum, r) => sum + Math.abs(r), 0);
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
const adjustment = (returnDiff * Math.abs(expectedReturns[i])) / totalExpectedReturn;
|
||||
weights[i] = Math.max(0, weights[i] + adjustment * 0.1);
|
||||
}
|
||||
|
||||
// Normalize weights
|
||||
const weightSum = weights.reduce((sum, w) => sum + w, 0);
|
||||
if (weightSum > 0) {
|
||||
for (let i = 0; i < n; i++) {
|
||||
weights[i] /= weightSum;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return weights;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -452,3 +452,73 @@ export function validatePositionSize(
|
|||
violations
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Optimal F position sizing (Ralph Vince's method)
|
||||
*/
|
||||
export function optimalFPositionSize(
|
||||
accountSize: number,
|
||||
historicalReturns: number[],
|
||||
maxIterations: number = 100
|
||||
): number {
|
||||
if (historicalReturns.length === 0 || accountSize <= 0) return 0;
|
||||
|
||||
// Convert returns to P&L per unit
|
||||
const pnlValues = historicalReturns.map(ret => ret * 1000); // Assuming $1000 per unit
|
||||
|
||||
let bestF = 0;
|
||||
let bestTWR = 0; // Terminal Wealth Relative
|
||||
|
||||
// Test different f values (0.01 to 1.00)
|
||||
for (let f = 0.01; f <= 1.0; f += 0.01) {
|
||||
let twr = 1.0;
|
||||
let valid = true;
|
||||
|
||||
for (const pnl of pnlValues) {
|
||||
const hpr = 1 + (f * pnl / 1000); // Holding Period Return
|
||||
|
||||
if (hpr <= 0) {
|
||||
valid = false;
|
||||
break;
|
||||
}
|
||||
|
||||
twr *= hpr;
|
||||
}
|
||||
|
||||
if (valid && twr > bestTWR) {
|
||||
bestTWR = twr;
|
||||
bestF = f;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply safety factor
|
||||
const safeF = bestF * 0.75; // 75% of optimal f for safety
|
||||
|
||||
return accountSize * safeF;
|
||||
}
|
||||
|
||||
/**
|
||||
* Secure F position sizing (safer version of Optimal F)
|
||||
*/
|
||||
export function secureFPositionSize(
|
||||
accountSize: number,
|
||||
historicalReturns: number[],
|
||||
confidenceLevel: number = 0.95
|
||||
): number {
|
||||
if (historicalReturns.length === 0 || accountSize <= 0) return 0;
|
||||
|
||||
// Sort returns to find worst-case scenarios
|
||||
const sortedReturns = [...historicalReturns].sort((a, b) => a - b);
|
||||
const worstCaseIndex = Math.floor((1 - confidenceLevel) * sortedReturns.length);
|
||||
const worstCaseReturn = sortedReturns[worstCaseIndex];
|
||||
|
||||
// Calculate maximum position size that won't bankrupt at confidence level
|
||||
const maxLoss = Math.abs(worstCaseReturn);
|
||||
const maxRiskPercentage = 0.02; // Never risk more than 2% on worst case
|
||||
|
||||
if (maxLoss === 0) return accountSize * 0.1; // Default to 10% if no historical losses
|
||||
|
||||
const secureF = Math.min(maxRiskPercentage / maxLoss, 0.25); // Cap at 25%
|
||||
|
||||
return accountSize * secureF;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -421,3 +421,129 @@ export function riskAdjustedReturn(
|
|||
if (portfolioRisk === 0) return 0;
|
||||
return (portfolioReturn - riskFreeRate) / portfolioRisk;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Omega Ratio (probability-weighted ratio of gains vs losses)
|
||||
*/
|
||||
export function omegaRatio(returns: number[], threshold: number = 0): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
let gainsSum = 0;
|
||||
let lossesSum = 0;
|
||||
|
||||
for (const ret of returns) {
|
||||
const excessReturn = ret - threshold;
|
||||
if (excessReturn > 0) {
|
||||
gainsSum += excessReturn;
|
||||
} else {
|
||||
lossesSum += Math.abs(excessReturn);
|
||||
}
|
||||
}
|
||||
|
||||
return lossesSum > 0 ? gainsSum / lossesSum : (gainsSum > 0 ? Infinity : 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Upside Potential Ratio
|
||||
*/
|
||||
export function upsidePotentialRatio(returns: number[], threshold: number = 0): number {
|
||||
if (returns.length === 0) return 0;
|
||||
|
||||
let upsidePotentialSum = 0;
|
||||
let downsideDeviationSum = 0;
|
||||
|
||||
for (const ret of returns) {
|
||||
const excessReturn = ret - threshold;
|
||||
if (excessReturn > 0) {
|
||||
upsidePotentialSum += excessReturn;
|
||||
} else {
|
||||
downsideDeviationSum += excessReturn * excessReturn;
|
||||
}
|
||||
}
|
||||
|
||||
const downsideDeviation = Math.sqrt(downsideDeviationSum / returns.length);
|
||||
const avgUpsidePotential = upsidePotentialSum / returns.length;
|
||||
|
||||
return downsideDeviation > 0 ? avgUpsidePotential / downsideDeviation : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate maximum drawdown duration
|
||||
*/
|
||||
export function drawdownDuration(equityCurve: number[]): {
|
||||
maxDuration: number;
|
||||
currentDuration: number;
|
||||
avgDuration: number;
|
||||
} {
|
||||
if (equityCurve.length === 0) {
|
||||
return { maxDuration: 0, currentDuration: 0, avgDuration: 0 };
|
||||
}
|
||||
|
||||
let peak = equityCurve[0];
|
||||
let maxDuration = 0;
|
||||
let currentDuration = 0;
|
||||
const durations: number[] = [];
|
||||
let inDrawdown = false;
|
||||
let drawdownStart = 0;
|
||||
|
||||
for (let i = 1; i < equityCurve.length; i++) {
|
||||
if (equityCurve[i] > peak) {
|
||||
if (inDrawdown) {
|
||||
// End of drawdown
|
||||
const duration = i - drawdownStart;
|
||||
durations.push(duration);
|
||||
maxDuration = Math.max(maxDuration, duration);
|
||||
inDrawdown = false;
|
||||
currentDuration = 0;
|
||||
}
|
||||
peak = equityCurve[i];
|
||||
} else {
|
||||
if (!inDrawdown) {
|
||||
// Start of drawdown
|
||||
inDrawdown = true;
|
||||
drawdownStart = i;
|
||||
}
|
||||
currentDuration = i - drawdownStart;
|
||||
}
|
||||
}
|
||||
|
||||
// If still in drawdown at the end
|
||||
if (inDrawdown) {
|
||||
maxDuration = Math.max(maxDuration, currentDuration);
|
||||
}
|
||||
|
||||
const avgDuration = durations.length > 0 ?
|
||||
durations.reduce((sum, dur) => sum + dur, 0) / durations.length : 0;
|
||||
|
||||
return {
|
||||
maxDuration,
|
||||
currentDuration: inDrawdown ? currentDuration : 0,
|
||||
avgDuration
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate CAPM expected return
|
||||
* Uses the Capital Asset Pricing Model: E(R) = Rf + β(E(Rm) - Rf)
|
||||
*/
|
||||
export function capmExpectedReturn(
|
||||
riskFreeRate: number,
|
||||
marketReturn: number,
|
||||
assetBeta: number
|
||||
): number {
|
||||
return riskFreeRate + assetBeta * (marketReturn - riskFreeRate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate Jensen's Alpha
|
||||
* Jensen's Alpha = Portfolio Return - CAPM Expected Return
|
||||
*/
|
||||
export function jensenAlpha(
|
||||
portfolioReturn: number,
|
||||
riskFreeRate: number,
|
||||
marketReturn: number,
|
||||
portfolioBeta: number
|
||||
): number {
|
||||
const expectedReturn = capmExpectedReturn(riskFreeRate, marketReturn, portfolioBeta);
|
||||
return portfolioReturn - expectedReturn;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -497,3 +497,151 @@ export function aroon(ohlcv: OHLCVData[], period: number = 14): { up: number[],
|
|||
|
||||
return { up, down };
|
||||
}
|
||||
|
||||
/**
|
||||
* Average Directional Movement Index (ADX) and Directional Movement Indicators (DMI)
|
||||
*/
|
||||
export function adx(ohlcv: OHLCVData[], period: number = 14): { adx: number[], plusDI: number[], minusDI: number[] } {
|
||||
if (period >= ohlcv.length) return { adx: [], plusDI: [], minusDI: [] };
|
||||
|
||||
const trueRanges: number[] = [];
|
||||
const plusDM: number[] = [];
|
||||
const minusDM: number[] = [];
|
||||
|
||||
// Calculate True Range and Directional Movements
|
||||
for (let i = 1; i < ohlcv.length; i++) {
|
||||
const current = ohlcv[i];
|
||||
const previous = ohlcv[i - 1];
|
||||
|
||||
// True Range
|
||||
const tr = Math.max(
|
||||
current.high - current.low,
|
||||
Math.abs(current.high - previous.close),
|
||||
Math.abs(current.low - previous.close)
|
||||
);
|
||||
trueRanges.push(tr);
|
||||
|
||||
// Directional Movements
|
||||
const highDiff = current.high - previous.high;
|
||||
const lowDiff = previous.low - current.low;
|
||||
|
||||
const plusDMValue = (highDiff > lowDiff && highDiff > 0) ? highDiff : 0;
|
||||
const minusDMValue = (lowDiff > highDiff && lowDiff > 0) ? lowDiff : 0;
|
||||
|
||||
plusDM.push(plusDMValue);
|
||||
minusDM.push(minusDMValue);
|
||||
}
|
||||
|
||||
// Calculate smoothed averages
|
||||
const atrValues = sma(trueRanges, period);
|
||||
const smoothedPlusDM = sma(plusDM, period);
|
||||
const smoothedMinusDM = sma(minusDM, period);
|
||||
|
||||
const plusDI: number[] = [];
|
||||
const minusDI: number[] = [];
|
||||
const dx: number[] = [];
|
||||
|
||||
// Calculate DI+ and DI-
|
||||
for (let i = 0; i < atrValues.length; i++) {
|
||||
const diPlus = atrValues[i] > 0 ? (smoothedPlusDM[i] / atrValues[i]) * 100 : 0;
|
||||
const diMinus = atrValues[i] > 0 ? (smoothedMinusDM[i] / atrValues[i]) * 100 : 0;
|
||||
|
||||
plusDI.push(diPlus);
|
||||
minusDI.push(diMinus);
|
||||
|
||||
// Calculate DX
|
||||
const diSum = diPlus + diMinus;
|
||||
const dxValue = diSum > 0 ? (Math.abs(diPlus - diMinus) / diSum) * 100 : 0;
|
||||
dx.push(dxValue);
|
||||
}
|
||||
|
||||
// Calculate ADX (smoothed DX)
|
||||
const adxValues = sma(dx, period);
|
||||
|
||||
return {
|
||||
adx: adxValues,
|
||||
plusDI: plusDI.slice(period - 1),
|
||||
minusDI: minusDI.slice(period - 1)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Volume Weighted Moving Average (VWMA)
|
||||
*/
|
||||
export function vwma(ohlcv: OHLCVData[], period: number = 20): number[] {
|
||||
if (period >= ohlcv.length) return [];
|
||||
|
||||
const result: number[] = [];
|
||||
|
||||
for (let i = period - 1; i < ohlcv.length; i++) {
|
||||
const slice = ohlcv.slice(i - period + 1, i + 1);
|
||||
|
||||
let totalVolumePrice = 0;
|
||||
let totalVolume = 0;
|
||||
|
||||
for (const candle of slice) {
|
||||
const typicalPrice = (candle.high + candle.low + candle.close) / 3;
|
||||
totalVolumePrice += typicalPrice * candle.volume;
|
||||
totalVolume += candle.volume;
|
||||
}
|
||||
|
||||
const vwmaValue = totalVolume > 0 ? totalVolumePrice / totalVolume : 0;
|
||||
result.push(vwmaValue);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pivot Points (Standard)
|
||||
*/
|
||||
export function pivotPoints(ohlcv: OHLCVData[]): Array<{
|
||||
pivot: number;
|
||||
resistance1: number;
|
||||
resistance2: number;
|
||||
resistance3: number;
|
||||
support1: number;
|
||||
support2: number;
|
||||
support3: number;
|
||||
}> {
|
||||
if (ohlcv.length === 0) return [];
|
||||
|
||||
const result: Array<{
|
||||
pivot: number;
|
||||
resistance1: number;
|
||||
resistance2: number;
|
||||
resistance3: number;
|
||||
support1: number;
|
||||
support2: number;
|
||||
support3: number;
|
||||
}> = [];
|
||||
|
||||
for (let i = 0; i < ohlcv.length; i++) {
|
||||
const candle = ohlcv[i];
|
||||
|
||||
// Calculate pivot point
|
||||
const pivot = (candle.high + candle.low + candle.close) / 3;
|
||||
|
||||
// Calculate resistance and support levels
|
||||
const resistance1 = (2 * pivot) - candle.low;
|
||||
const support1 = (2 * pivot) - candle.high;
|
||||
|
||||
const resistance2 = pivot + (candle.high - candle.low);
|
||||
const support2 = pivot - (candle.high - candle.low);
|
||||
|
||||
const resistance3 = candle.high + 2 * (pivot - candle.low);
|
||||
const support3 = candle.low - 2 * (candle.high - pivot);
|
||||
|
||||
result.push({
|
||||
pivot,
|
||||
resistance1,
|
||||
resistance2,
|
||||
resistance3,
|
||||
support1,
|
||||
support2,
|
||||
support3
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,19 +0,0 @@
|
|||
import { fixedRiskPositionSize } from '../src/calculations/position-sizing.js';
|
||||
|
||||
try {
|
||||
console.log('Testing position sizing calculations...');
|
||||
|
||||
const result = fixedRiskPositionSize({
|
||||
accountSize: 100000,
|
||||
riskPercentage: 2,
|
||||
entryPrice: 100,
|
||||
stopLoss: 95
|
||||
});
|
||||
|
||||
console.log('Fixed risk position size result:', result);
|
||||
console.log('Expected: 400 shares');
|
||||
console.log('Test passed:', result === 400);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error:', error);
|
||||
}
|
||||
|
|
@ -1,138 +0,0 @@
|
|||
/**
|
||||
* Validation script for position sizing calculations
|
||||
*/
|
||||
import {
|
||||
fixedRiskPositionSize,
|
||||
kellyPositionSize,
|
||||
volatilityTargetPositionSize,
|
||||
equalWeightPositionSize,
|
||||
atrBasedPositionSize,
|
||||
expectancyPositionSize,
|
||||
calculatePortfolioHeat,
|
||||
validatePositionSize
|
||||
} from '../src/calculations/position-sizing.js';
|
||||
|
||||
console.log('=== Position Sizing Calculation Validation ===\n');
|
||||
|
||||
// Test 1: Fixed Risk Position Sizing
|
||||
console.log('1. Fixed Risk Position Sizing');
|
||||
const fixedRiskResult = fixedRiskPositionSize({
|
||||
accountSize: 100000,
|
||||
riskPercentage: 2,
|
||||
entryPrice: 100,
|
||||
stopLoss: 95,
|
||||
leverage: 1
|
||||
});
|
||||
console.log(` Account: $100,000, Risk: 2%, Entry: $100, Stop: $95`);
|
||||
console.log(` Result: ${fixedRiskResult} shares`);
|
||||
console.log(` Expected: 400 shares (Risk: $2,000 ÷ $5 risk per share = 400)`);
|
||||
console.log(` ✓ ${fixedRiskResult === 400 ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
// Test 2: Kelly Criterion
|
||||
console.log('2. Kelly Criterion Position Sizing');
|
||||
const kellyResult = kellyPositionSize({
|
||||
winRate: 0.6,
|
||||
averageWin: 150,
|
||||
averageLoss: -100
|
||||
}, 100000);
|
||||
console.log(` Win Rate: 60%, Avg Win: $150, Avg Loss: $100`);
|
||||
console.log(` Result: $${kellyResult.toFixed(0)}`);
|
||||
console.log(` Kelly formula with safety factor applied`);
|
||||
console.log(` ✓ ${kellyResult > 0 && kellyResult < 25000 ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
// Test 3: Volatility Target Position Sizing
|
||||
console.log('3. Volatility Target Position Sizing');
|
||||
const volResult = volatilityTargetPositionSize({
|
||||
price: 100,
|
||||
volatility: 0.20,
|
||||
targetVolatility: 0.10,
|
||||
lookbackDays: 30
|
||||
}, 100000);
|
||||
console.log(` Price: $100, Asset Vol: 20%, Target Vol: 10%`);
|
||||
console.log(` Result: ${volResult} shares`);
|
||||
console.log(` Expected: 500 shares (Vol ratio 0.5 * $100k = $50k ÷ $100 = 500)`);
|
||||
console.log(` ✓ ${volResult === 500 ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
// Test 4: Equal Weight Position Sizing
|
||||
console.log('4. Equal Weight Position Sizing');
|
||||
const equalResult = equalWeightPositionSize(100000, 5, 100);
|
||||
console.log(` Account: $100,000, Positions: 5, Price: $100`);
|
||||
console.log(` Result: ${equalResult} shares`);
|
||||
console.log(` Expected: 200 shares ($100k ÷ 5 = $20k ÷ $100 = 200)`);
|
||||
console.log(` ✓ ${equalResult === 200 ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
// Test 5: ATR-Based Position Sizing
|
||||
console.log('5. ATR-Based Position Sizing');
|
||||
const atrResult = atrBasedPositionSize(100000, 2, 5, 2, 100);
|
||||
console.log(` Account: $100,000, Risk: 2%, ATR: $5, Multiplier: 2`);
|
||||
console.log(` Result: ${atrResult} shares`);
|
||||
console.log(` Expected: 200 shares (Risk: $2k ÷ Stop: $10 = 200)`);
|
||||
console.log(` ✓ ${atrResult === 200 ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
// Test 6: Expectancy Position Sizing
|
||||
console.log('6. Expectancy Position Sizing');
|
||||
const expectancyResult = expectancyPositionSize(100000, 0.6, 150, -100, 5);
|
||||
console.log(` Win Rate: 60%, Avg Win: $150, Avg Loss: $100`);
|
||||
console.log(` Result: $${expectancyResult.toFixed(0)}`);
|
||||
console.log(` Expectancy: 0.6*150 - 0.4*100 = 50 (positive expectancy)`);
|
||||
console.log(` ✓ ${expectancyResult > 0 ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
// Test 7: Portfolio Heat Calculation
|
||||
console.log('7. Portfolio Heat Calculation');
|
||||
const heatResult = calculatePortfolioHeat([
|
||||
{ value: 10000, risk: 500 },
|
||||
{ value: 15000, risk: 750 },
|
||||
{ value: 20000, risk: 1000 }
|
||||
], 100000);
|
||||
console.log(` Positions with risks: $500, $750, $1000`);
|
||||
console.log(` Result: ${heatResult}%`);
|
||||
console.log(` Expected: 2.25% (Total risk: $2250 ÷ $100k = 2.25%)`);
|
||||
console.log(` ✓ ${heatResult === 2.25 ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
// Test 8: Position Size Validation
|
||||
console.log('8. Position Size Validation');
|
||||
const validationResult = validatePositionSize(50, 100, 100000, 10, 2);
|
||||
console.log(` Position: 50 shares @ $100, Account: $100k, Max: 10%`);
|
||||
console.log(` Result: ${validationResult.isValid ? 'Valid' : 'Invalid'}`);
|
||||
console.log(` Position value: $5,000 (5% of account - within 10% limit)`);
|
||||
console.log(` ✓ ${validationResult.isValid ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
// Test edge cases
|
||||
console.log('=== Edge Case Testing ===\n');
|
||||
|
||||
// Zero/negative inputs
|
||||
console.log('9. Zero/Negative Input Handling');
|
||||
const zeroResult = fixedRiskPositionSize({
|
||||
accountSize: 0,
|
||||
riskPercentage: 2,
|
||||
entryPrice: 100,
|
||||
stopLoss: 95
|
||||
});
|
||||
console.log(` Zero account size result: ${zeroResult}`);
|
||||
console.log(` ✓ ${zeroResult === 0 ? 'PASS' : 'FAIL'}`);
|
||||
|
||||
const equalStopResult = fixedRiskPositionSize({
|
||||
accountSize: 100000,
|
||||
riskPercentage: 2,
|
||||
entryPrice: 100,
|
||||
stopLoss: 100
|
||||
});
|
||||
console.log(` Equal entry/stop result: ${equalStopResult}`);
|
||||
console.log(` ✓ ${equalStopResult === 0 ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
// Negative expectancy Kelly
|
||||
console.log('10. Negative Expectancy Kelly');
|
||||
const negativeKellyResult = kellyPositionSize({
|
||||
winRate: 0.3,
|
||||
averageWin: 100,
|
||||
averageLoss: -200
|
||||
}, 100000);
|
||||
console.log(` Win Rate: 30%, Avg Win: $100, Avg Loss: $200`);
|
||||
console.log(` Result: $${negativeKellyResult}`);
|
||||
console.log(` Expected: $0 (negative expectancy)`);
|
||||
console.log(` ✓ ${negativeKellyResult === 0 ? 'PASS' : 'FAIL'}\n`);
|
||||
|
||||
console.log('=== Validation Complete ===');
|
||||
console.log('All position sizing calculations have been validated!');
|
||||
console.log('The functions now include proper input validation, edge case handling,');
|
||||
console.log('and mathematically correct implementations.');
|
||||
Loading…
Add table
Add a link
Reference in a new issue