small fixes for backtest

This commit is contained in:
Boki 2025-07-03 18:14:40 -04:00
parent 6df32dc18b
commit 6cf3179092
16 changed files with 2180 additions and 16 deletions

View file

@ -31,6 +31,7 @@ napi-derive = "2"
statrs = "0.16"
rand = "0.8"
rand_distr = "0.4"
nalgebra = "0.32"
# Logging
tracing = "0.1"

Binary file not shown.

View file

@ -3,7 +3,7 @@ use napi::{bindgen_prelude::*};
use serde_json;
use crate::indicators::{
SMA, EMA, RSI, MACD, BollingerBands, Stochastic, ATR,
Indicator, IncrementalIndicator, IndicatorResult, PriceData
Indicator, IncrementalIndicator
};
/// Convert JS array to Vec<f64>

View file

@ -1,6 +1,8 @@
mod indicators;
mod risk;
pub use indicators::{TechnicalIndicators, IncrementalSMA, IncrementalEMA, IncrementalRSI};
pub use risk::{RiskAnalyzer, OrderbookAnalyzer};
use napi_derive::napi;
use napi::{bindgen_prelude::*, JsObject};

View file

@ -0,0 +1,166 @@
use napi_derive::napi;
use napi::{bindgen_prelude::*};
use crate::risk::{BetSizer, BetSizingParameters, MarketRegime, RiskModel};
use crate::orderbook::{OrderBookAnalytics, LiquidityProfile};
use crate::positions::Position;
use std::collections::HashMap;
#[napi]
pub struct RiskAnalyzer {
risk_model: RiskModel,
bet_sizer: BetSizer,
}
#[napi]
impl RiskAnalyzer {
#[napi(constructor)]
pub fn new(capital: f64, base_risk_per_trade: f64, lookback_period: u32) -> Self {
Self {
risk_model: RiskModel::new(lookback_period as usize),
bet_sizer: BetSizer::new(capital, base_risk_per_trade),
}
}
#[napi]
pub fn update_returns(&mut self, symbol: String, returns: Vec<f64>) -> Result<()> {
self.risk_model.update_returns(&symbol, returns);
Ok(())
}
#[napi]
pub fn calculate_portfolio_risk(&self, positions_json: String, prices_json: String) -> Result<String> {
// Parse positions
let positions_data: Vec<(String, f64, f64)> = serde_json::from_str(&positions_json)
.map_err(|e| Error::from_reason(format!("Failed to parse positions: {}", e)))?;
let mut positions = HashMap::new();
for (symbol, quantity, avg_price) in positions_data {
positions.insert(symbol.clone(), Position {
symbol,
quantity,
average_price: avg_price,
realized_pnl: 0.0,
unrealized_pnl: 0.0,
total_cost: quantity * avg_price,
last_update: chrono::Utc::now(),
});
}
// Parse prices
let prices: HashMap<String, f64> = serde_json::from_str(&prices_json)
.map_err(|e| Error::from_reason(format!("Failed to parse prices: {}", e)))?;
// Calculate risk
match self.risk_model.calculate_portfolio_risk(&positions, &prices) {
Ok(risk) => Ok(serde_json::to_string(&risk).unwrap()),
Err(e) => Err(Error::from_reason(e)),
}
}
#[napi]
pub fn calculate_position_size(
&self,
signal_strength: f64,
signal_confidence: f64,
volatility: f64,
liquidity_score: f64,
current_drawdown: f64,
price: f64,
stop_loss: Option<f64>,
market_regime: String,
) -> Result<String> {
let regime = match market_regime.as_str() {
"trending" => MarketRegime::Trending,
"range_bound" => MarketRegime::RangeBound,
"high_volatility" => MarketRegime::HighVolatility,
"low_volatility" => MarketRegime::LowVolatility,
_ => MarketRegime::Transitioning,
};
let params = BetSizingParameters {
signal_strength,
signal_confidence,
market_regime: regime,
volatility,
liquidity_score,
correlation_exposure: 0.0, // Would be calculated from portfolio
current_drawdown,
};
let position_size = self.bet_sizer.calculate_position_size(
&params,
price,
stop_loss,
None, // Historical performance
None, // Orderbook analytics
None, // Liquidity profile
);
Ok(serde_json::to_string(&position_size).unwrap())
}
#[napi]
pub fn calculate_optimal_stop_loss(
&self,
entry_price: f64,
volatility: f64,
support_levels: Vec<f64>,
atr: Option<f64>,
is_long: bool,
) -> f64 {
self.bet_sizer.calculate_optimal_stop_loss(
entry_price,
volatility,
&support_levels,
atr,
is_long,
)
}
}
#[napi]
pub struct OrderbookAnalyzer {}
#[napi]
impl OrderbookAnalyzer {
#[napi(constructor)]
pub fn new() -> Self {
Self {}
}
#[napi]
pub fn analyze_orderbook(&self, snapshot_json: String) -> Result<String> {
let snapshot: crate::OrderBookSnapshot = serde_json::from_str(&snapshot_json)
.map_err(|e| Error::from_reason(format!("Failed to parse snapshot: {}", e)))?;
match OrderBookAnalytics::calculate(&snapshot) {
Some(analytics) => Ok(serde_json::to_string(&analytics).unwrap()),
None => Err(Error::from_reason("Failed to calculate analytics")),
}
}
#[napi]
pub fn calculate_liquidity_profile(&self, snapshot_json: String) -> Result<String> {
let snapshot: crate::OrderBookSnapshot = serde_json::from_str(&snapshot_json)
.map_err(|e| Error::from_reason(format!("Failed to parse snapshot: {}", e)))?;
let profile = LiquidityProfile::from_snapshot(&snapshot);
Ok(serde_json::to_string(&profile).unwrap())
}
#[napi]
pub fn calculate_market_impact(
&self,
snapshot_json: String,
order_size_usd: f64,
is_buy: bool,
) -> Result<String> {
let snapshot: crate::OrderBookSnapshot = serde_json::from_str(&snapshot_json)
.map_err(|e| Error::from_reason(format!("Failed to parse snapshot: {}", e)))?;
let profile = LiquidityProfile::from_snapshot(&snapshot);
let impact = profile.calculate_market_impact(order_size_usd, is_buy);
Ok(serde_json::to_string(&impact).unwrap())
}
}

View file

@ -0,0 +1,374 @@
use crate::{OrderBookSnapshot, PriceLevel};
use serde::{Serialize, Deserialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OrderBookAnalytics {
pub spread: f64,
pub spread_bps: f64,
pub mid_price: f64,
pub micro_price: f64, // Size-weighted mid price
pub imbalance: f64, // -1 to 1 (negative = bid pressure)
pub depth_imbalance: OrderBookImbalance,
pub liquidity_score: f64,
pub effective_spread: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OrderBookImbalance {
pub level_1: f64,
pub level_5: f64,
pub level_10: f64,
pub weighted: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LiquidityProfile {
pub bid_liquidity: Vec<LiquidityLevel>,
pub ask_liquidity: Vec<LiquidityLevel>,
pub total_bid_depth: f64,
pub total_ask_depth: f64,
pub bid_depth_weighted_price: f64,
pub ask_depth_weighted_price: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LiquidityLevel {
pub price: f64,
pub size: f64,
pub cumulative_size: f64,
pub cost_to_execute: f64, // Cost to buy/sell up to this level
}
impl OrderBookAnalytics {
pub fn calculate(snapshot: &OrderBookSnapshot) -> Option<Self> {
if snapshot.bids.is_empty() || snapshot.asks.is_empty() {
return None;
}
let best_bid = snapshot.bids[0].price;
let best_ask = snapshot.asks[0].price;
let spread = best_ask - best_bid;
let mid_price = (best_bid + best_ask) / 2.0;
let spread_bps = (spread / mid_price) * 10000.0;
// Calculate micro price (size-weighted)
let bid_size = snapshot.bids[0].size;
let ask_size = snapshot.asks[0].size;
let micro_price = (best_bid * ask_size + best_ask * bid_size) / (bid_size + ask_size);
// Calculate imbalance
let imbalance = (bid_size - ask_size) / (bid_size + ask_size);
// Calculate depth imbalance at different levels
let depth_imbalance = Self::calculate_depth_imbalance(snapshot);
// Calculate liquidity score
let liquidity_score = Self::calculate_liquidity_score(snapshot);
// Effective spread (considers depth)
let effective_spread = Self::calculate_effective_spread(snapshot, 1000.0); // $1000 order
Some(OrderBookAnalytics {
spread,
spread_bps,
mid_price,
micro_price,
imbalance,
depth_imbalance,
liquidity_score,
effective_spread,
})
}
fn calculate_depth_imbalance(snapshot: &OrderBookSnapshot) -> OrderBookImbalance {
let calc_imbalance = |depth: usize| -> f64 {
let bid_depth: f64 = snapshot.bids.iter()
.take(depth)
.map(|l| l.size)
.sum();
let ask_depth: f64 = snapshot.asks.iter()
.take(depth)
.map(|l| l.size)
.sum();
if bid_depth + ask_depth > 0.0 {
(bid_depth - ask_depth) / (bid_depth + ask_depth)
} else {
0.0
}
};
// Weighted imbalance (more weight on top levels)
let mut weighted_bid = 0.0;
let mut weighted_ask = 0.0;
let mut weight_sum = 0.0;
for (i, (bid, ask)) in snapshot.bids.iter().zip(snapshot.asks.iter()).enumerate().take(10) {
let weight = 1.0 / (i + 1) as f64;
weighted_bid += bid.size * weight;
weighted_ask += ask.size * weight;
weight_sum += weight;
}
let weighted = if weighted_bid + weighted_ask > 0.0 {
(weighted_bid - weighted_ask) / (weighted_bid + weighted_ask)
} else {
0.0
};
OrderBookImbalance {
level_1: calc_imbalance(1),
level_5: calc_imbalance(5),
level_10: calc_imbalance(10),
weighted,
}
}
fn calculate_liquidity_score(snapshot: &OrderBookSnapshot) -> f64 {
// Liquidity score based on depth and tightness
let depth_score = (snapshot.bids.len() + snapshot.asks.len()) as f64 / 20.0; // Normalize by 10 levels each side
let volume_score = {
let bid_volume: f64 = snapshot.bids.iter().take(5).map(|l| l.size).sum();
let ask_volume: f64 = snapshot.asks.iter().take(5).map(|l| l.size).sum();
((bid_volume + ask_volume) / 10000.0).min(1.0) // Normalize by $10k
};
let spread_score = if let (Some(bid), Some(ask)) = (snapshot.bids.first(), snapshot.asks.first()) {
let spread_bps = ((ask.price - bid.price) / ((ask.price + bid.price) / 2.0)) * 10000.0;
(50.0 / (spread_bps + 1.0)).min(1.0) // Lower spread = higher score
} else {
0.0
};
(depth_score * 0.3 + volume_score * 0.4 + spread_score * 0.3).min(1.0)
}
fn calculate_effective_spread(snapshot: &OrderBookSnapshot, order_size_usd: f64) -> f64 {
let avg_execution_price = |levels: &[PriceLevel], size_usd: f64, is_buy: bool| -> Option<f64> {
let mut remaining = size_usd;
let mut total_cost = 0.0;
let mut total_shares = 0.0;
for level in levels {
let level_value = level.price * level.size;
if remaining <= level_value {
let shares = remaining / level.price;
total_cost += remaining;
total_shares += shares;
break;
} else {
total_cost += level_value;
total_shares += level.size;
remaining -= level_value;
}
}
if total_shares > 0.0 {
Some(total_cost / total_shares)
} else {
None
}
};
if let (Some(bid_exec), Some(ask_exec)) = (
avg_execution_price(&snapshot.bids, order_size_usd, false),
avg_execution_price(&snapshot.asks, order_size_usd, true)
) {
ask_exec - bid_exec
} else if let (Some(bid), Some(ask)) = (snapshot.bids.first(), snapshot.asks.first()) {
ask.price - bid.price
} else {
0.0
}
}
}
impl LiquidityProfile {
pub fn from_snapshot(snapshot: &OrderBookSnapshot) -> Self {
let mut bid_liquidity = Vec::new();
let mut ask_liquidity = Vec::new();
let mut cumulative_bid_size = 0.0;
let mut cumulative_bid_cost = 0.0;
for bid in &snapshot.bids {
cumulative_bid_size += bid.size;
cumulative_bid_cost += bid.price * bid.size;
bid_liquidity.push(LiquidityLevel {
price: bid.price,
size: bid.size,
cumulative_size: cumulative_bid_size,
cost_to_execute: cumulative_bid_cost,
});
}
let mut cumulative_ask_size = 0.0;
let mut cumulative_ask_cost = 0.0;
for ask in &snapshot.asks {
cumulative_ask_size += ask.size;
cumulative_ask_cost += ask.price * ask.size;
ask_liquidity.push(LiquidityLevel {
price: ask.price,
size: ask.size,
cumulative_size: cumulative_ask_size,
cost_to_execute: cumulative_ask_cost,
});
}
let total_bid_depth = cumulative_bid_cost;
let total_ask_depth = cumulative_ask_cost;
let bid_depth_weighted_price = if cumulative_bid_size > 0.0 {
cumulative_bid_cost / cumulative_bid_size
} else {
0.0
};
let ask_depth_weighted_price = if cumulative_ask_size > 0.0 {
cumulative_ask_cost / cumulative_ask_size
} else {
0.0
};
Self {
bid_liquidity,
ask_liquidity,
total_bid_depth,
total_ask_depth,
bid_depth_weighted_price,
ask_depth_weighted_price,
}
}
/// Calculate the market impact of executing a given size
pub fn calculate_market_impact(&self, size_usd: f64, is_buy: bool) -> MarketImpact {
let levels = if is_buy { &self.ask_liquidity } else { &self.bid_liquidity };
if levels.is_empty() {
return MarketImpact::default();
}
let reference_price = levels[0].price;
let mut remaining = size_usd;
let mut total_cost = 0.0;
let mut total_shares = 0.0;
let mut levels_consumed = 0;
for (i, level) in levels.iter().enumerate() {
let level_value = level.price * level.size;
if remaining <= level_value {
let shares = remaining / level.price;
total_cost += shares * level.price;
total_shares += shares;
levels_consumed = i + 1;
break;
} else {
total_cost += level_value;
total_shares += level.size;
remaining -= level_value;
levels_consumed = i + 1;
}
}
let avg_execution_price = if total_shares > 0.0 {
total_cost / total_shares
} else {
reference_price
};
let price_impact = if is_buy {
(avg_execution_price - reference_price) / reference_price
} else {
(reference_price - avg_execution_price) / reference_price
};
let slippage = (avg_execution_price - reference_price).abs();
MarketImpact {
avg_execution_price,
price_impact,
slippage,
levels_consumed,
total_shares,
}
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct MarketImpact {
pub avg_execution_price: f64,
pub price_impact: f64, // As percentage
pub slippage: f64, // In price units
pub levels_consumed: usize,
pub total_shares: f64,
}
/// Track orderbook dynamics over time
pub struct OrderBookDynamics {
snapshots: Vec<(chrono::DateTime<chrono::Utc>, OrderBookAnalytics)>,
max_history: usize,
}
impl OrderBookDynamics {
pub fn new(max_history: usize) -> Self {
Self {
snapshots: Vec::new(),
max_history,
}
}
pub fn add_snapshot(&mut self, timestamp: chrono::DateTime<chrono::Utc>, analytics: OrderBookAnalytics) {
self.snapshots.push((timestamp, analytics));
if self.snapshots.len() > self.max_history {
self.snapshots.remove(0);
}
}
pub fn get_volatility(&self, window: usize) -> Option<f64> {
if self.snapshots.len() < window {
return None;
}
let recent = &self.snapshots[self.snapshots.len() - window..];
let mid_prices: Vec<f64> = recent.iter().map(|(_, a)| a.mid_price).collect();
let mean = mid_prices.iter().sum::<f64>() / mid_prices.len() as f64;
let variance = mid_prices.iter()
.map(|p| (p - mean).powi(2))
.sum::<f64>() / mid_prices.len() as f64;
Some(variance.sqrt())
}
pub fn get_average_spread(&self, window: usize) -> Option<f64> {
if self.snapshots.len() < window {
return None;
}
let recent = &self.snapshots[self.snapshots.len() - window..];
let total_spread: f64 = recent.iter().map(|(_, a)| a.spread).sum();
Some(total_spread / window as f64)
}
pub fn detect_momentum(&self, window: usize) -> Option<f64> {
if self.snapshots.len() < window {
return None;
}
let recent = &self.snapshots[self.snapshots.len() - window..];
let imbalances: Vec<f64> = recent.iter()
.map(|(_, a)| a.depth_imbalance.weighted)
.collect();
// Average imbalance indicates momentum direction
Some(imbalances.iter().sum::<f64>() / imbalances.len() as f64)
}
}

View file

@ -1,3 +1,5 @@
pub mod analytics;
use crate::{Quote, Trade, Side, OrderBookSnapshot, PriceLevel};
use chrono::{DateTime, Utc};
use dashmap::DashMap;
@ -5,6 +7,8 @@ use parking_lot::RwLock;
use std::collections::BTreeMap;
use std::sync::Arc;
pub use analytics::{OrderBookAnalytics, LiquidityProfile, OrderBookImbalance, MarketImpact};
// Manages order books for all symbols
pub struct OrderBookManager {
books: DashMap<String, Arc<RwLock<OrderBook>>>,
@ -49,6 +53,16 @@ impl OrderBookManager {
book_guard.get_best_bid_ask()
})
}
pub fn get_analytics(&self, symbol: &str, depth: usize) -> Option<OrderBookAnalytics> {
self.get_snapshot(symbol, depth)
.and_then(|snapshot| OrderBookAnalytics::calculate(&snapshot))
}
pub fn get_liquidity_profile(&self, symbol: &str, depth: usize) -> Option<LiquidityProfile> {
self.get_snapshot(symbol, depth)
.map(|snapshot| LiquidityProfile::from_snapshot(&snapshot))
}
}
// Individual order book for a symbol
@ -183,6 +197,61 @@ impl OrderBook {
let best_ask = self.asks.values().next()?.price;
Some((best_bid, best_ask))
}
pub fn get_mid_price(&self) -> Option<f64> {
self.get_best_bid_ask()
.map(|(bid, ask)| (bid + ask) / 2.0)
}
pub fn get_spread(&self) -> Option<f64> {
self.get_best_bid_ask()
.map(|(bid, ask)| ask - bid)
}
pub fn get_depth_at_price(&self, price: f64, side: Side) -> f64 {
match side {
Side::Buy => {
self.bids.values()
.filter(|level| level.price >= price)
.map(|level| level.size)
.sum()
}
Side::Sell => {
self.asks.values()
.filter(|level| level.price <= price)
.map(|level| level.size)
.sum()
}
}
}
pub fn get_volume_weighted_price(&self, size: f64, side: Side) -> Option<f64> {
let levels: Vec<&Level> = match side {
Side::Buy => self.asks.values().collect(),
Side::Sell => self.bids.values().collect(),
};
let mut remaining_size = size;
let mut total_cost = 0.0;
let mut total_shares = 0.0;
for level in levels {
if remaining_size <= 0.0 {
break;
}
let fill_size = remaining_size.min(level.size);
total_cost += fill_size * level.price;
total_shares += fill_size;
remaining_size -= fill_size;
}
if total_shares > 0.0 {
Some(total_cost / total_shares)
} else {
None
}
}
fn clean_stale_levels(&mut self, current_time: DateTime<Utc>) {
let stale_threshold = chrono::Duration::seconds(60); // 60 seconds

View file

@ -0,0 +1,505 @@
use std::collections::HashMap;
use crate::orderbook::analytics::{OrderBookAnalytics, LiquidityProfile};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BetSizingParameters {
pub signal_strength: f64, // -1 to 1
pub signal_confidence: f64, // 0 to 1
pub market_regime: MarketRegime,
pub volatility: f64,
pub liquidity_score: f64,
pub correlation_exposure: f64,
pub current_drawdown: f64,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum MarketRegime {
Trending,
RangeBound,
HighVolatility,
LowVolatility,
Transitioning,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PositionSize {
pub shares: u32,
pub notional_value: f64,
pub percent_of_capital: f64,
pub risk_adjusted_size: f64,
pub sizing_method: String,
pub adjustments: Vec<SizeAdjustment>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SizeAdjustment {
pub reason: String,
pub factor: f64,
}
pub struct BetSizer {
capital: f64,
base_risk_per_trade: f64,
max_position_size: f64,
min_position_size: f64,
use_kelly: bool,
volatility_scaling: bool,
regime_adjustments: HashMap<MarketRegime, f64>,
}
impl BetSizer {
pub fn new(capital: f64, base_risk_per_trade: f64) -> Self {
let mut regime_adjustments = HashMap::new();
regime_adjustments.insert(MarketRegime::Trending, 1.2);
regime_adjustments.insert(MarketRegime::RangeBound, 0.8);
regime_adjustments.insert(MarketRegime::HighVolatility, 0.6);
regime_adjustments.insert(MarketRegime::LowVolatility, 1.1);
regime_adjustments.insert(MarketRegime::Transitioning, 0.7);
Self {
capital,
base_risk_per_trade,
max_position_size: 0.10, // 10% max
min_position_size: 0.001, // 0.1% min
use_kelly: true,
volatility_scaling: true,
regime_adjustments,
}
}
pub fn calculate_position_size(
&self,
params: &BetSizingParameters,
price: f64,
stop_loss: Option<f64>,
historical_performance: Option<&PerformanceStats>,
orderbook_analytics: Option<&OrderBookAnalytics>,
liquidity_profile: Option<&LiquidityProfile>,
) -> PositionSize {
let mut adjustments = Vec::new();
// Start with base position size
let mut risk_fraction = self.base_risk_per_trade;
// 1. Kelly Criterion adjustment
if self.use_kelly {
if let Some(perf) = historical_performance {
let kelly_fraction = self.calculate_kelly_fraction(perf);
let kelly_adjustment = (kelly_fraction / self.base_risk_per_trade).min(2.0).max(0.1);
risk_fraction *= kelly_adjustment;
adjustments.push(SizeAdjustment {
reason: "Kelly Criterion".to_string(),
factor: kelly_adjustment,
});
}
}
// 2. Signal strength adjustment
let signal_adjustment = self.calculate_signal_adjustment(params.signal_strength, params.signal_confidence);
risk_fraction *= signal_adjustment;
adjustments.push(SizeAdjustment {
reason: "Signal Strength".to_string(),
factor: signal_adjustment,
});
// 3. Volatility adjustment
if self.volatility_scaling {
let vol_adjustment = self.calculate_volatility_adjustment(params.volatility);
risk_fraction *= vol_adjustment;
adjustments.push(SizeAdjustment {
reason: "Volatility Scaling".to_string(),
factor: vol_adjustment,
});
}
// 4. Market regime adjustment
if let Some(&regime_factor) = self.regime_adjustments.get(&params.market_regime) {
risk_fraction *= regime_factor;
adjustments.push(SizeAdjustment {
reason: format!("Market Regime ({:?})", params.market_regime),
factor: regime_factor,
});
}
// 5. Liquidity adjustment
let liquidity_adjustment = self.calculate_liquidity_adjustment(
params.liquidity_score,
orderbook_analytics,
liquidity_profile,
);
risk_fraction *= liquidity_adjustment;
adjustments.push(SizeAdjustment {
reason: "Liquidity".to_string(),
factor: liquidity_adjustment,
});
// 6. Correlation adjustment
let correlation_adjustment = self.calculate_correlation_adjustment(params.correlation_exposure);
risk_fraction *= correlation_adjustment;
adjustments.push(SizeAdjustment {
reason: "Correlation Exposure".to_string(),
factor: correlation_adjustment,
});
// 7. Drawdown adjustment
let drawdown_adjustment = self.calculate_drawdown_adjustment(params.current_drawdown);
risk_fraction *= drawdown_adjustment;
adjustments.push(SizeAdjustment {
reason: "Current Drawdown".to_string(),
factor: drawdown_adjustment,
});
// Calculate final position size
let position_value = if let Some(stop_price) = stop_loss {
// Risk-based sizing
let risk_per_share = (price - stop_price).abs();
let risk_amount = self.capital * risk_fraction;
risk_amount / risk_per_share
} else {
// Fixed percentage sizing
self.capital * risk_fraction / price
};
// Apply min/max constraints
let constrained_value = position_value
.min(self.capital * self.max_position_size / price)
.max(self.capital * self.min_position_size / price);
let shares = constrained_value.floor() as u32;
let notional_value = shares as f64 * price;
let percent_of_capital = notional_value / self.capital;
PositionSize {
shares,
notional_value,
percent_of_capital,
risk_adjusted_size: risk_fraction,
sizing_method: if stop_loss.is_some() { "Risk-based".to_string() } else { "Fixed-percentage".to_string() },
adjustments,
}
}
fn calculate_kelly_fraction(&self, perf: &PerformanceStats) -> f64 {
if perf.total_trades < 20 {
return self.base_risk_per_trade; // Not enough data
}
let win_rate = perf.win_rate;
let loss_rate = 1.0 - win_rate;
if perf.avg_loss == 0.0 {
return self.base_risk_per_trade;
}
let win_loss_ratio = perf.avg_win / perf.avg_loss.abs();
// Kelly formula: f = p - q/b
// where p = win probability, q = loss probability, b = win/loss ratio
let kelly = win_rate - (loss_rate / win_loss_ratio);
// Apply Kelly fraction (typically 25% of full Kelly)
let kelly_fraction = kelly * 0.25;
// Ensure it's positive and reasonable
kelly_fraction.max(0.0).min(0.25)
}
fn calculate_signal_adjustment(&self, strength: f64, confidence: f64) -> f64 {
// Strength is -1 to 1, we want 0 to 1
let normalized_strength = (strength.abs() + 1.0) / 2.0;
// Combine strength and confidence
let signal_score = normalized_strength * 0.7 + confidence * 0.3;
// Map to adjustment factor (0.5 to 1.5)
0.5 + signal_score
}
fn calculate_volatility_adjustment(&self, volatility: f64) -> f64 {
// Target volatility (e.g., 15% annualized)
let target_vol = 0.15 / (252.0_f64.sqrt()); // Daily vol
// Inverse volatility scaling
let adjustment = (target_vol / volatility).min(1.5).max(0.5);
adjustment
}
fn calculate_liquidity_adjustment(
&self,
liquidity_score: f64,
orderbook: Option<&OrderBookAnalytics>,
profile: Option<&LiquidityProfile>,
) -> f64 {
let mut adjustment = 1.0;
// Base liquidity score adjustment
if liquidity_score < 0.3 {
adjustment *= 0.5; // Very poor liquidity
} else if liquidity_score < 0.5 {
adjustment *= 0.7;
} else if liquidity_score > 0.8 {
adjustment *= 1.1; // Good liquidity bonus
}
// Orderbook spread adjustment
if let Some(ob) = orderbook {
if ob.spread_bps > 50.0 {
adjustment *= 0.8; // Wide spread penalty
} else if ob.spread_bps < 10.0 {
adjustment *= 1.1; // Tight spread bonus
}
}
// Market impact consideration
if let Some(prof) = profile {
// Check if our typical order size would move the market
let typical_order_value = self.capital * self.base_risk_per_trade;
let impact = prof.calculate_market_impact(typical_order_value, true);
if impact.price_impact > 0.001 { // More than 10 bps impact
adjustment *= (1.0 - impact.price_impact * 10.0).max(0.5);
}
}
adjustment
}
fn calculate_correlation_adjustment(&self, correlation_exposure: f64) -> f64 {
// Reduce size if highly correlated with existing positions
if correlation_exposure > 0.7 {
0.5
} else if correlation_exposure > 0.5 {
0.7
} else if correlation_exposure > 0.3 {
0.9
} else {
1.0
}
}
fn calculate_drawdown_adjustment(&self, current_drawdown: f64) -> f64 {
// Reduce size during drawdowns
if current_drawdown > 0.20 {
0.5 // 50% reduction if in 20%+ drawdown
} else if current_drawdown > 0.10 {
0.7
} else if current_drawdown > 0.05 {
0.85
} else {
1.0
}
}
pub fn calculate_optimal_stop_loss(
&self,
entry_price: f64,
volatility: f64,
support_levels: &[f64],
atr: Option<f64>,
is_long: bool,
) -> f64 {
let mut stop_candidates = Vec::new();
// 1. Volatility-based stop
let vol_stop = if is_long {
entry_price * (1.0 - 2.0 * volatility)
} else {
entry_price * (1.0 + 2.0 * volatility)
};
stop_candidates.push(("Volatility", vol_stop));
// 2. ATR-based stop
if let Some(atr_value) = atr {
let atr_stop = if is_long {
entry_price - 2.0 * atr_value
} else {
entry_price + 2.0 * atr_value
};
stop_candidates.push(("ATR", atr_stop));
}
// 3. Support/Resistance based stop
if !support_levels.is_empty() {
let technical_stop = if is_long {
// Find nearest support below entry
support_levels.iter()
.filter(|&&level| level < entry_price)
.max_by(|a, b| a.partial_cmp(b).unwrap())
.map(|&level| level * 0.995) // Just below support
} else {
// Find nearest resistance above entry
support_levels.iter()
.filter(|&&level| level > entry_price)
.min_by(|a, b| a.partial_cmp(b).unwrap())
.map(|&level| level * 1.005) // Just above resistance
};
if let Some(stop) = technical_stop {
stop_candidates.push(("Technical", stop));
}
}
// 4. Maximum loss stop (e.g., 5% from entry)
let max_loss_stop = if is_long {
entry_price * 0.95
} else {
entry_price * 1.05
};
stop_candidates.push(("MaxLoss", max_loss_stop));
// Choose the most conservative stop (closest to entry)
let optimal_stop = if is_long {
stop_candidates.iter()
.map(|(_, stop)| *stop)
.max_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap_or(vol_stop)
} else {
stop_candidates.iter()
.map(|(_, stop)| *stop)
.min_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap_or(vol_stop)
};
optimal_stop
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PerformanceStats {
pub total_trades: u32,
pub win_rate: f64,
pub avg_win: f64,
pub avg_loss: f64,
pub sharpe_ratio: f64,
pub max_consecutive_losses: u32,
}
/// Dynamic position sizing based on market conditions
pub struct DynamicSizer {
base_sizer: BetSizer,
regime_detector: MarketRegimeDetector,
performance_tracker: PerformanceTracker,
}
impl DynamicSizer {
pub fn new(capital: f64, base_risk: f64) -> Self {
Self {
base_sizer: BetSizer::new(capital, base_risk),
regime_detector: MarketRegimeDetector::new(),
performance_tracker: PerformanceTracker::new(),
}
}
pub fn update_market_data(&mut self, volatility: f64, volume: f64, trend_strength: f64) {
self.regime_detector.update(volatility, volume, trend_strength);
}
pub fn record_trade(&mut self, pnl: f64, holding_period: i64) {
self.performance_tracker.record_trade(pnl, holding_period);
}
pub fn get_sizing_parameters(&self) -> BetSizingParameters {
BetSizingParameters {
signal_strength: 0.0, // To be filled by strategy
signal_confidence: 0.0, // To be filled by strategy
market_regime: self.regime_detector.current_regime(),
volatility: self.regime_detector.current_volatility(),
liquidity_score: 0.0, // To be filled from orderbook
correlation_exposure: 0.0, // To be filled from portfolio
current_drawdown: self.performance_tracker.current_drawdown(),
}
}
}
struct MarketRegimeDetector {
volatility_history: Vec<f64>,
volume_history: Vec<f64>,
trend_history: Vec<f64>,
window_size: usize,
}
impl MarketRegimeDetector {
fn new() -> Self {
Self {
volatility_history: Vec::new(),
volume_history: Vec::new(),
trend_history: Vec::new(),
window_size: 20,
}
}
fn update(&mut self, volatility: f64, volume: f64, trend_strength: f64) {
self.volatility_history.push(volatility);
self.volume_history.push(volume);
self.trend_history.push(trend_strength);
// Keep only recent history
if self.volatility_history.len() > self.window_size {
self.volatility_history.remove(0);
self.volume_history.remove(0);
self.trend_history.remove(0);
}
}
fn current_regime(&self) -> MarketRegime {
if self.volatility_history.len() < 5 {
return MarketRegime::Transitioning;
}
let avg_vol = self.volatility_history.iter().sum::<f64>() / self.volatility_history.len() as f64;
let avg_trend = self.trend_history.iter().sum::<f64>() / self.trend_history.len() as f64;
if avg_vol > 0.02 {
MarketRegime::HighVolatility
} else if avg_vol < 0.01 {
MarketRegime::LowVolatility
} else if avg_trend.abs() > 0.7 {
MarketRegime::Trending
} else if avg_trend.abs() < 0.3 {
MarketRegime::RangeBound
} else {
MarketRegime::Transitioning
}
}
fn current_volatility(&self) -> f64 {
if self.volatility_history.is_empty() {
return 0.015; // Default 1.5% daily vol
}
self.volatility_history.iter().sum::<f64>() / self.volatility_history.len() as f64
}
}
struct PerformanceTracker {
trades: Vec<(f64, i64)>, // (pnl, holding_period)
peak_capital: f64,
current_capital: f64,
}
impl PerformanceTracker {
fn new() -> Self {
Self {
trades: Vec::new(),
peak_capital: 100000.0,
current_capital: 100000.0,
}
}
fn record_trade(&mut self, pnl: f64, holding_period: i64) {
self.trades.push((pnl, holding_period));
self.current_capital += pnl;
if self.current_capital > self.peak_capital {
self.peak_capital = self.current_capital;
}
}
fn current_drawdown(&self) -> f64 {
if self.peak_capital > 0.0 {
(self.peak_capital - self.current_capital) / self.peak_capital
} else {
0.0
}
}
}

View file

@ -1,9 +1,15 @@
pub mod portfolio;
pub mod bet_sizing;
use crate::{Order, Side};
use dashmap::DashMap;
use parking_lot::RwLock;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
pub use portfolio::{PortfolioRisk, RiskModel, CorrelationMatrix, ConcentrationMetrics};
pub use bet_sizing::{BetSizer, BetSizingParameters, PositionSize, MarketRegime, DynamicSizer};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RiskLimits {
pub max_position_size: f64,

View file

@ -0,0 +1,533 @@
use std::collections::HashMap;
use nalgebra::{DMatrix, DVector};
use crate::positions::Position;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PortfolioRisk {
pub total_var_95: f64,
pub total_var_99: f64,
pub total_cvar_95: f64,
pub marginal_var: HashMap<String, f64>,
pub component_var: HashMap<String, f64>,
pub correlation_matrix: CorrelationMatrix,
pub concentration_risk: ConcentrationMetrics,
pub stress_test_results: HashMap<String, f64>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CorrelationMatrix {
pub symbols: Vec<String>,
pub matrix: Vec<Vec<f64>>,
pub average_correlation: f64,
pub max_correlation: (String, String, f64),
pub clustering_score: f64, // How clustered the portfolio is
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConcentrationMetrics {
pub herfindahl_index: f64,
pub effective_number_of_positions: f64,
pub top_5_concentration: f64,
pub sector_concentration: HashMap<String, f64>,
}
#[derive(Debug, Clone)]
pub struct RiskModel {
returns_history: HashMap<String, Vec<f64>>,
lookback_period: usize,
confidence_levels: Vec<f64>,
}
impl RiskModel {
pub fn new(lookback_period: usize) -> Self {
Self {
returns_history: HashMap::new(),
lookback_period,
confidence_levels: vec![0.95, 0.99],
}
}
pub fn update_returns(&mut self, symbol: &str, returns: Vec<f64>) {
self.returns_history.insert(symbol.to_string(), returns);
}
pub fn calculate_portfolio_risk(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
) -> Result<PortfolioRisk, String> {
// Get active symbols and their weights
let (symbols, weights) = self.get_portfolio_weights(positions, current_prices)?;
// Calculate covariance matrix
let cov_matrix = self.calculate_covariance_matrix(&symbols)?;
// Portfolio variance
let portfolio_variance = self.calculate_portfolio_variance(&weights, &cov_matrix);
let portfolio_vol = portfolio_variance.sqrt();
// VaR calculations
let total_portfolio_value = self.calculate_total_value(positions, current_prices);
let total_var_95 = total_portfolio_value * portfolio_vol * 1.645; // 95% confidence
let total_var_99 = total_portfolio_value * portfolio_vol * 2.326; // 99% confidence
// CVaR (Conditional VaR)
let total_cvar_95 = self.calculate_cvar(&symbols, &weights, 0.95)?;
// Marginal and Component VaR
let (marginal_var, component_var) = self.calculate_var_decomposition(
&symbols,
&weights,
&cov_matrix,
portfolio_vol,
total_portfolio_value,
)?;
// Correlation analysis
let correlation_matrix = self.calculate_correlation_matrix(&symbols)?;
// Concentration metrics
let concentration_risk = self.calculate_concentration_metrics(positions, current_prices);
// Stress tests
let stress_test_results = self.run_stress_tests(positions, current_prices, &cov_matrix)?;
Ok(PortfolioRisk {
total_var_95,
total_var_99,
total_cvar_95,
marginal_var,
component_var,
correlation_matrix,
concentration_risk,
stress_test_results,
})
}
fn get_portfolio_weights(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
) -> Result<(Vec<String>, DVector<f64>), String> {
let mut symbols = Vec::new();
let mut values = Vec::new();
let mut total_value = 0.0;
for (symbol, position) in positions {
if let Some(&price) = current_prices.get(symbol) {
let position_value = position.quantity * price;
symbols.push(symbol.clone());
values.push(position_value.abs());
total_value += position_value.abs();
}
}
if total_value == 0.0 {
return Err("Portfolio has zero value".to_string());
}
let weights = DVector::from_vec(values.iter().map(|v| v / total_value).collect());
Ok((symbols, weights))
}
fn calculate_covariance_matrix(&self, symbols: &[String]) -> Result<DMatrix<f64>, String> {
let n = symbols.len();
let mut matrix = DMatrix::zeros(n, n);
for (i, symbol_i) in symbols.iter().enumerate() {
for (j, symbol_j) in symbols.iter().enumerate() {
if i <= j {
let cov = self.calculate_covariance(symbol_i, symbol_j)?;
matrix[(i, j)] = cov;
matrix[(j, i)] = cov; // Symmetric
}
}
}
Ok(matrix)
}
fn calculate_covariance(&self, symbol1: &str, symbol2: &str) -> Result<f64, String> {
let returns1 = self.returns_history.get(symbol1)
.ok_or_else(|| format!("No returns data for {}", symbol1))?;
let returns2 = self.returns_history.get(symbol2)
.ok_or_else(|| format!("No returns data for {}", symbol2))?;
if returns1.len() != returns2.len() {
return Err("Mismatched returns length".to_string());
}
let n = returns1.len() as f64;
let mean1 = returns1.iter().sum::<f64>() / n;
let mean2 = returns2.iter().sum::<f64>() / n;
let covariance = returns1.iter()
.zip(returns2.iter())
.map(|(r1, r2)| (r1 - mean1) * (r2 - mean2))
.sum::<f64>() / (n - 1.0);
Ok(covariance)
}
fn calculate_portfolio_variance(&self, weights: &DVector<f64>, cov_matrix: &DMatrix<f64>) -> f64 {
let variance = weights.transpose() * cov_matrix * weights;
variance[(0, 0)]
}
fn calculate_var_decomposition(
&self,
symbols: &[String],
weights: &DVector<f64>,
cov_matrix: &DMatrix<f64>,
portfolio_vol: f64,
total_value: f64,
) -> Result<(HashMap<String, f64>, HashMap<String, f64>), String> {
let mut marginal_var = HashMap::new();
let mut component_var = HashMap::new();
// Marginal VaR = ∂VaR/∂w_i
let cov_weights = cov_matrix * weights;
for (i, symbol) in symbols.iter().enumerate() {
let marginal = (cov_weights[i] / portfolio_vol) * 1.645 * total_value;
let component = marginal * weights[i];
marginal_var.insert(symbol.clone(), marginal);
component_var.insert(symbol.clone(), component);
}
Ok((marginal_var, component_var))
}
fn calculate_cvar(&self, symbols: &[String], weights: &DVector<f64>, confidence: f64) -> Result<f64, String> {
// Simulate portfolio returns
let portfolio_returns = self.simulate_portfolio_returns(symbols, weights, 10000)?;
// Sort returns
let mut sorted_returns = portfolio_returns.clone();
sorted_returns.sort_by(|a, b| a.partial_cmp(b).unwrap());
// Find VaR threshold
let var_index = ((1.0 - confidence) * sorted_returns.len() as f64) as usize;
let var_threshold = sorted_returns[var_index];
// Calculate expected loss beyond VaR
let tail_losses: Vec<f64> = sorted_returns.iter()
.take(var_index)
.cloned()
.collect();
if tail_losses.is_empty() {
return Ok(0.0);
}
let cvar = -tail_losses.iter().sum::<f64>() / tail_losses.len() as f64;
Ok(cvar)
}
fn simulate_portfolio_returns(
&self,
symbols: &[String],
weights: &DVector<f64>,
num_simulations: usize,
) -> Result<Vec<f64>, String> {
let mut portfolio_returns = Vec::with_capacity(num_simulations);
// Use historical simulation
let min_length = symbols.iter()
.map(|s| self.returns_history.get(s).map(|r| r.len()).unwrap_or(0))
.min()
.unwrap_or(0);
if min_length == 0 {
return Err("No returns data available".to_string());
}
// Bootstrap from historical returns
use rand::prelude::*;
let mut rng = thread_rng();
for _ in 0..num_simulations {
let idx = rng.gen_range(0..min_length);
let mut portfolio_return = 0.0;
for (i, symbol) in symbols.iter().enumerate() {
if let Some(returns) = self.returns_history.get(symbol) {
portfolio_return += weights[i] * returns[idx];
}
}
portfolio_returns.push(portfolio_return);
}
Ok(portfolio_returns)
}
fn calculate_correlation_matrix(&self, symbols: &[String]) -> Result<CorrelationMatrix, String> {
let n = symbols.len();
let mut matrix = vec![vec![0.0; n]; n];
let mut sum_correlation = 0.0;
let mut count = 0;
let mut max_correlation = ("".to_string(), "".to_string(), 0.0);
for (i, symbol_i) in symbols.iter().enumerate() {
for (j, symbol_j) in symbols.iter().enumerate() {
if i <= j {
let corr = if i == j {
1.0
} else {
self.calculate_correlation(symbol_i, symbol_j)?
};
matrix[i][j] = corr;
matrix[j][i] = corr;
if i != j {
sum_correlation += corr.abs();
count += 1;
if corr.abs() > max_correlation.2 {
max_correlation = (symbol_i.clone(), symbol_j.clone(), corr);
}
}
}
}
}
let average_correlation = if count > 0 {
sum_correlation / count as f64
} else {
0.0
};
// Calculate clustering score (higher = more clustered)
let clustering_score = self.calculate_clustering_score(&matrix);
Ok(CorrelationMatrix {
symbols: symbols.to_vec(),
matrix,
average_correlation,
max_correlation,
clustering_score,
})
}
fn calculate_correlation(&self, symbol1: &str, symbol2: &str) -> Result<f64, String> {
let returns1 = self.returns_history.get(symbol1)
.ok_or_else(|| format!("No returns data for {}", symbol1))?;
let returns2 = self.returns_history.get(symbol2)
.ok_or_else(|| format!("No returns data for {}", symbol2))?;
let cov = self.calculate_covariance(symbol1, symbol2)?;
let std1 = self.calculate_std_dev(returns1);
let std2 = self.calculate_std_dev(returns2);
if std1 == 0.0 || std2 == 0.0 {
return Ok(0.0);
}
Ok(cov / (std1 * std2))
}
fn calculate_std_dev(&self, returns: &[f64]) -> f64 {
let n = returns.len() as f64;
let mean = returns.iter().sum::<f64>() / n;
let variance = returns.iter()
.map(|r| (r - mean).powi(2))
.sum::<f64>() / (n - 1.0);
variance.sqrt()
}
fn calculate_clustering_score(&self, correlation_matrix: &[Vec<f64>]) -> f64 {
// Use average linkage clustering metric
let n = correlation_matrix.len();
if n < 2 {
return 0.0;
}
let mut cluster_sum = 0.0;
let mut cluster_count = 0;
// Look for groups of highly correlated assets
for i in 0..n {
for j in i+1..n {
for k in j+1..n {
let corr_ij = correlation_matrix[i][j].abs();
let corr_ik = correlation_matrix[i][k].abs();
let corr_jk = correlation_matrix[j][k].abs();
// If all three are highly correlated, they form a cluster
let min_corr = corr_ij.min(corr_ik).min(corr_jk);
if min_corr > 0.5 {
cluster_sum += min_corr;
cluster_count += 1;
}
}
}
}
if cluster_count > 0 {
cluster_sum / cluster_count as f64
} else {
0.0
}
}
fn calculate_concentration_metrics(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
) -> ConcentrationMetrics {
let mut position_values: Vec<(String, f64)> = Vec::new();
let mut total_value = 0.0;
for (symbol, position) in positions {
if let Some(&price) = current_prices.get(symbol) {
let value = (position.quantity * price).abs();
position_values.push((symbol.clone(), value));
total_value += value;
}
}
// Sort by value descending
position_values.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
// Herfindahl Index
let herfindahl_index = position_values.iter()
.map(|(_, value)| {
let weight = value / total_value;
weight * weight
})
.sum();
// Effective number of positions
let effective_number_of_positions = if herfindahl_index > 0.0 {
1.0 / herfindahl_index
} else {
0.0
};
// Top 5 concentration
let top_5_value: f64 = position_values.iter()
.take(5)
.map(|(_, value)| value)
.sum();
let top_5_concentration = top_5_value / total_value;
// Sector concentration (simplified - would need sector mapping)
let sector_concentration = HashMap::new(); // TODO: Implement with sector data
ConcentrationMetrics {
herfindahl_index,
effective_number_of_positions,
top_5_concentration,
sector_concentration,
}
}
fn calculate_total_value(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
) -> f64 {
positions.iter()
.filter_map(|(symbol, position)| {
current_prices.get(symbol).map(|&price| (position.quantity * price).abs())
})
.sum()
}
fn run_stress_tests(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
cov_matrix: &DMatrix<f64>,
) -> Result<HashMap<String, f64>, String> {
let mut results = HashMap::new();
// Market crash scenario
let market_crash_loss = self.calculate_scenario_loss(
positions,
current_prices,
-0.20, // 20% market drop
);
results.insert("market_crash_20pct".to_string(), market_crash_loss);
// Flight to quality
let flight_to_quality = self.calculate_correlation_stress(
positions,
current_prices,
cov_matrix,
0.9, // High correlation scenario
)?;
results.insert("flight_to_quality".to_string(), flight_to_quality);
// Volatility spike
let vol_spike = self.calculate_volatility_stress(
positions,
current_prices,
cov_matrix,
2.0, // Double volatility
)?;
results.insert("volatility_spike_2x".to_string(), vol_spike);
Ok(results)
}
fn calculate_scenario_loss(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
shock: f64,
) -> f64 {
let current_value = self.calculate_total_value(positions, current_prices);
current_value * shock.abs()
}
fn calculate_correlation_stress(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
cov_matrix: &DMatrix<f64>,
target_correlation: f64,
) -> Result<f64, String> {
// Adjust correlation matrix to stress scenario
let n = cov_matrix.nrows();
let mut stressed_cov = cov_matrix.clone();
for i in 0..n {
for j in 0..n {
if i != j {
let current_corr = cov_matrix[(i, j)] / (cov_matrix[(i, i)].sqrt() * cov_matrix[(j, j)].sqrt());
let stress_factor = target_correlation / current_corr.abs().max(0.1);
stressed_cov[(i, j)] *= stress_factor;
}
}
}
let (_, weights) = self.get_portfolio_weights(positions, current_prices)?;
let stressed_variance = self.calculate_portfolio_variance(&weights, &stressed_cov);
let stressed_vol = stressed_variance.sqrt();
Ok(self.calculate_total_value(positions, current_prices) * stressed_vol * 2.326) // 99% VaR
}
fn calculate_volatility_stress(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
cov_matrix: &DMatrix<f64>,
vol_multiplier: f64,
) -> Result<f64, String> {
let stressed_cov = cov_matrix * vol_multiplier.powi(2);
let (_, weights) = self.get_portfolio_weights(positions, current_prices)?;
let stressed_variance = self.calculate_portfolio_variance(&weights, &stressed_cov);
let stressed_vol = stressed_variance.sqrt();
Ok(self.calculate_total_value(positions, current_prices) * stressed_vol * 2.326) // 99% VaR
}
}