small fixes for backtest

This commit is contained in:
Boki 2025-07-03 18:14:40 -04:00
parent 6df32dc18b
commit 6cf3179092
16 changed files with 2180 additions and 16 deletions

47
Cargo.lock generated
View file

@ -160,6 +160,7 @@ dependencies = [
"chrono",
"crossbeam",
"dashmap",
"nalgebra 0.32.6",
"napi",
"napi-build",
"napi-derive",
@ -411,13 +412,29 @@ checksum = "d506eb7e08d6329505faa8a3a00a5dcc6de9f76e0c77e4b75763ae3c770831ff"
dependencies = [
"approx",
"matrixmultiply",
"nalgebra-macros",
"nalgebra-macros 0.1.0",
"num-complex",
"num-rational",
"num-traits",
"rand",
"rand_distr",
"simba",
"simba 0.6.0",
"typenum",
]
[[package]]
name = "nalgebra"
version = "0.32.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b5c17de023a86f59ed79891b2e5d5a94c705dbe904a5b5c9c952ea6221b03e4"
dependencies = [
"approx",
"matrixmultiply",
"nalgebra-macros 0.2.2",
"num-complex",
"num-rational",
"num-traits",
"simba 0.8.1",
"typenum",
]
@ -432,6 +449,17 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "nalgebra-macros"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "254a5372af8fc138e36684761d3c0cdb758a4410e938babcff1c860ce14ddbfc"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.104",
]
[[package]]
name = "napi"
version = "2.16.17"
@ -816,6 +844,19 @@ dependencies = [
"wide",
]
[[package]]
name = "simba"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "061507c94fc6ab4ba1c9a0305018408e312e17c041eb63bef8aa726fa33aceae"
dependencies = [
"approx",
"num-complex",
"num-traits",
"paste",
"wide",
]
[[package]]
name = "smallvec"
version = "1.15.1"
@ -840,7 +881,7 @@ checksum = "b35a062dbadac17a42e0fc64c27f419b25d6fae98572eb43c8814c9e873d7721"
dependencies = [
"approx",
"lazy_static",
"nalgebra",
"nalgebra 0.29.0",
"num-traits",
"rand",
]

View file

@ -31,6 +31,7 @@ napi-derive = "2"
statrs = "0.16"
rand = "0.8"
rand_distr = "0.4"
nalgebra = "0.32"
# Logging
tracing = "0.1"

Binary file not shown.

View file

@ -3,7 +3,7 @@ use napi::{bindgen_prelude::*};
use serde_json;
use crate::indicators::{
SMA, EMA, RSI, MACD, BollingerBands, Stochastic, ATR,
Indicator, IncrementalIndicator, IndicatorResult, PriceData
Indicator, IncrementalIndicator
};
/// Convert JS array to Vec<f64>

View file

@ -1,6 +1,8 @@
mod indicators;
mod risk;
pub use indicators::{TechnicalIndicators, IncrementalSMA, IncrementalEMA, IncrementalRSI};
pub use risk::{RiskAnalyzer, OrderbookAnalyzer};
use napi_derive::napi;
use napi::{bindgen_prelude::*, JsObject};

View file

@ -0,0 +1,166 @@
use napi_derive::napi;
use napi::{bindgen_prelude::*};
use crate::risk::{BetSizer, BetSizingParameters, MarketRegime, RiskModel};
use crate::orderbook::{OrderBookAnalytics, LiquidityProfile};
use crate::positions::Position;
use std::collections::HashMap;
#[napi]
pub struct RiskAnalyzer {
risk_model: RiskModel,
bet_sizer: BetSizer,
}
#[napi]
impl RiskAnalyzer {
#[napi(constructor)]
pub fn new(capital: f64, base_risk_per_trade: f64, lookback_period: u32) -> Self {
Self {
risk_model: RiskModel::new(lookback_period as usize),
bet_sizer: BetSizer::new(capital, base_risk_per_trade),
}
}
#[napi]
pub fn update_returns(&mut self, symbol: String, returns: Vec<f64>) -> Result<()> {
self.risk_model.update_returns(&symbol, returns);
Ok(())
}
#[napi]
pub fn calculate_portfolio_risk(&self, positions_json: String, prices_json: String) -> Result<String> {
// Parse positions
let positions_data: Vec<(String, f64, f64)> = serde_json::from_str(&positions_json)
.map_err(|e| Error::from_reason(format!("Failed to parse positions: {}", e)))?;
let mut positions = HashMap::new();
for (symbol, quantity, avg_price) in positions_data {
positions.insert(symbol.clone(), Position {
symbol,
quantity,
average_price: avg_price,
realized_pnl: 0.0,
unrealized_pnl: 0.0,
total_cost: quantity * avg_price,
last_update: chrono::Utc::now(),
});
}
// Parse prices
let prices: HashMap<String, f64> = serde_json::from_str(&prices_json)
.map_err(|e| Error::from_reason(format!("Failed to parse prices: {}", e)))?;
// Calculate risk
match self.risk_model.calculate_portfolio_risk(&positions, &prices) {
Ok(risk) => Ok(serde_json::to_string(&risk).unwrap()),
Err(e) => Err(Error::from_reason(e)),
}
}
#[napi]
pub fn calculate_position_size(
&self,
signal_strength: f64,
signal_confidence: f64,
volatility: f64,
liquidity_score: f64,
current_drawdown: f64,
price: f64,
stop_loss: Option<f64>,
market_regime: String,
) -> Result<String> {
let regime = match market_regime.as_str() {
"trending" => MarketRegime::Trending,
"range_bound" => MarketRegime::RangeBound,
"high_volatility" => MarketRegime::HighVolatility,
"low_volatility" => MarketRegime::LowVolatility,
_ => MarketRegime::Transitioning,
};
let params = BetSizingParameters {
signal_strength,
signal_confidence,
market_regime: regime,
volatility,
liquidity_score,
correlation_exposure: 0.0, // Would be calculated from portfolio
current_drawdown,
};
let position_size = self.bet_sizer.calculate_position_size(
&params,
price,
stop_loss,
None, // Historical performance
None, // Orderbook analytics
None, // Liquidity profile
);
Ok(serde_json::to_string(&position_size).unwrap())
}
#[napi]
pub fn calculate_optimal_stop_loss(
&self,
entry_price: f64,
volatility: f64,
support_levels: Vec<f64>,
atr: Option<f64>,
is_long: bool,
) -> f64 {
self.bet_sizer.calculate_optimal_stop_loss(
entry_price,
volatility,
&support_levels,
atr,
is_long,
)
}
}
#[napi]
pub struct OrderbookAnalyzer {}
#[napi]
impl OrderbookAnalyzer {
#[napi(constructor)]
pub fn new() -> Self {
Self {}
}
#[napi]
pub fn analyze_orderbook(&self, snapshot_json: String) -> Result<String> {
let snapshot: crate::OrderBookSnapshot = serde_json::from_str(&snapshot_json)
.map_err(|e| Error::from_reason(format!("Failed to parse snapshot: {}", e)))?;
match OrderBookAnalytics::calculate(&snapshot) {
Some(analytics) => Ok(serde_json::to_string(&analytics).unwrap()),
None => Err(Error::from_reason("Failed to calculate analytics")),
}
}
#[napi]
pub fn calculate_liquidity_profile(&self, snapshot_json: String) -> Result<String> {
let snapshot: crate::OrderBookSnapshot = serde_json::from_str(&snapshot_json)
.map_err(|e| Error::from_reason(format!("Failed to parse snapshot: {}", e)))?;
let profile = LiquidityProfile::from_snapshot(&snapshot);
Ok(serde_json::to_string(&profile).unwrap())
}
#[napi]
pub fn calculate_market_impact(
&self,
snapshot_json: String,
order_size_usd: f64,
is_buy: bool,
) -> Result<String> {
let snapshot: crate::OrderBookSnapshot = serde_json::from_str(&snapshot_json)
.map_err(|e| Error::from_reason(format!("Failed to parse snapshot: {}", e)))?;
let profile = LiquidityProfile::from_snapshot(&snapshot);
let impact = profile.calculate_market_impact(order_size_usd, is_buy);
Ok(serde_json::to_string(&impact).unwrap())
}
}

View file

@ -0,0 +1,374 @@
use crate::{OrderBookSnapshot, PriceLevel};
use serde::{Serialize, Deserialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OrderBookAnalytics {
pub spread: f64,
pub spread_bps: f64,
pub mid_price: f64,
pub micro_price: f64, // Size-weighted mid price
pub imbalance: f64, // -1 to 1 (negative = bid pressure)
pub depth_imbalance: OrderBookImbalance,
pub liquidity_score: f64,
pub effective_spread: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OrderBookImbalance {
pub level_1: f64,
pub level_5: f64,
pub level_10: f64,
pub weighted: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LiquidityProfile {
pub bid_liquidity: Vec<LiquidityLevel>,
pub ask_liquidity: Vec<LiquidityLevel>,
pub total_bid_depth: f64,
pub total_ask_depth: f64,
pub bid_depth_weighted_price: f64,
pub ask_depth_weighted_price: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LiquidityLevel {
pub price: f64,
pub size: f64,
pub cumulative_size: f64,
pub cost_to_execute: f64, // Cost to buy/sell up to this level
}
impl OrderBookAnalytics {
pub fn calculate(snapshot: &OrderBookSnapshot) -> Option<Self> {
if snapshot.bids.is_empty() || snapshot.asks.is_empty() {
return None;
}
let best_bid = snapshot.bids[0].price;
let best_ask = snapshot.asks[0].price;
let spread = best_ask - best_bid;
let mid_price = (best_bid + best_ask) / 2.0;
let spread_bps = (spread / mid_price) * 10000.0;
// Calculate micro price (size-weighted)
let bid_size = snapshot.bids[0].size;
let ask_size = snapshot.asks[0].size;
let micro_price = (best_bid * ask_size + best_ask * bid_size) / (bid_size + ask_size);
// Calculate imbalance
let imbalance = (bid_size - ask_size) / (bid_size + ask_size);
// Calculate depth imbalance at different levels
let depth_imbalance = Self::calculate_depth_imbalance(snapshot);
// Calculate liquidity score
let liquidity_score = Self::calculate_liquidity_score(snapshot);
// Effective spread (considers depth)
let effective_spread = Self::calculate_effective_spread(snapshot, 1000.0); // $1000 order
Some(OrderBookAnalytics {
spread,
spread_bps,
mid_price,
micro_price,
imbalance,
depth_imbalance,
liquidity_score,
effective_spread,
})
}
fn calculate_depth_imbalance(snapshot: &OrderBookSnapshot) -> OrderBookImbalance {
let calc_imbalance = |depth: usize| -> f64 {
let bid_depth: f64 = snapshot.bids.iter()
.take(depth)
.map(|l| l.size)
.sum();
let ask_depth: f64 = snapshot.asks.iter()
.take(depth)
.map(|l| l.size)
.sum();
if bid_depth + ask_depth > 0.0 {
(bid_depth - ask_depth) / (bid_depth + ask_depth)
} else {
0.0
}
};
// Weighted imbalance (more weight on top levels)
let mut weighted_bid = 0.0;
let mut weighted_ask = 0.0;
let mut weight_sum = 0.0;
for (i, (bid, ask)) in snapshot.bids.iter().zip(snapshot.asks.iter()).enumerate().take(10) {
let weight = 1.0 / (i + 1) as f64;
weighted_bid += bid.size * weight;
weighted_ask += ask.size * weight;
weight_sum += weight;
}
let weighted = if weighted_bid + weighted_ask > 0.0 {
(weighted_bid - weighted_ask) / (weighted_bid + weighted_ask)
} else {
0.0
};
OrderBookImbalance {
level_1: calc_imbalance(1),
level_5: calc_imbalance(5),
level_10: calc_imbalance(10),
weighted,
}
}
fn calculate_liquidity_score(snapshot: &OrderBookSnapshot) -> f64 {
// Liquidity score based on depth and tightness
let depth_score = (snapshot.bids.len() + snapshot.asks.len()) as f64 / 20.0; // Normalize by 10 levels each side
let volume_score = {
let bid_volume: f64 = snapshot.bids.iter().take(5).map(|l| l.size).sum();
let ask_volume: f64 = snapshot.asks.iter().take(5).map(|l| l.size).sum();
((bid_volume + ask_volume) / 10000.0).min(1.0) // Normalize by $10k
};
let spread_score = if let (Some(bid), Some(ask)) = (snapshot.bids.first(), snapshot.asks.first()) {
let spread_bps = ((ask.price - bid.price) / ((ask.price + bid.price) / 2.0)) * 10000.0;
(50.0 / (spread_bps + 1.0)).min(1.0) // Lower spread = higher score
} else {
0.0
};
(depth_score * 0.3 + volume_score * 0.4 + spread_score * 0.3).min(1.0)
}
fn calculate_effective_spread(snapshot: &OrderBookSnapshot, order_size_usd: f64) -> f64 {
let avg_execution_price = |levels: &[PriceLevel], size_usd: f64, is_buy: bool| -> Option<f64> {
let mut remaining = size_usd;
let mut total_cost = 0.0;
let mut total_shares = 0.0;
for level in levels {
let level_value = level.price * level.size;
if remaining <= level_value {
let shares = remaining / level.price;
total_cost += remaining;
total_shares += shares;
break;
} else {
total_cost += level_value;
total_shares += level.size;
remaining -= level_value;
}
}
if total_shares > 0.0 {
Some(total_cost / total_shares)
} else {
None
}
};
if let (Some(bid_exec), Some(ask_exec)) = (
avg_execution_price(&snapshot.bids, order_size_usd, false),
avg_execution_price(&snapshot.asks, order_size_usd, true)
) {
ask_exec - bid_exec
} else if let (Some(bid), Some(ask)) = (snapshot.bids.first(), snapshot.asks.first()) {
ask.price - bid.price
} else {
0.0
}
}
}
impl LiquidityProfile {
pub fn from_snapshot(snapshot: &OrderBookSnapshot) -> Self {
let mut bid_liquidity = Vec::new();
let mut ask_liquidity = Vec::new();
let mut cumulative_bid_size = 0.0;
let mut cumulative_bid_cost = 0.0;
for bid in &snapshot.bids {
cumulative_bid_size += bid.size;
cumulative_bid_cost += bid.price * bid.size;
bid_liquidity.push(LiquidityLevel {
price: bid.price,
size: bid.size,
cumulative_size: cumulative_bid_size,
cost_to_execute: cumulative_bid_cost,
});
}
let mut cumulative_ask_size = 0.0;
let mut cumulative_ask_cost = 0.0;
for ask in &snapshot.asks {
cumulative_ask_size += ask.size;
cumulative_ask_cost += ask.price * ask.size;
ask_liquidity.push(LiquidityLevel {
price: ask.price,
size: ask.size,
cumulative_size: cumulative_ask_size,
cost_to_execute: cumulative_ask_cost,
});
}
let total_bid_depth = cumulative_bid_cost;
let total_ask_depth = cumulative_ask_cost;
let bid_depth_weighted_price = if cumulative_bid_size > 0.0 {
cumulative_bid_cost / cumulative_bid_size
} else {
0.0
};
let ask_depth_weighted_price = if cumulative_ask_size > 0.0 {
cumulative_ask_cost / cumulative_ask_size
} else {
0.0
};
Self {
bid_liquidity,
ask_liquidity,
total_bid_depth,
total_ask_depth,
bid_depth_weighted_price,
ask_depth_weighted_price,
}
}
/// Calculate the market impact of executing a given size
pub fn calculate_market_impact(&self, size_usd: f64, is_buy: bool) -> MarketImpact {
let levels = if is_buy { &self.ask_liquidity } else { &self.bid_liquidity };
if levels.is_empty() {
return MarketImpact::default();
}
let reference_price = levels[0].price;
let mut remaining = size_usd;
let mut total_cost = 0.0;
let mut total_shares = 0.0;
let mut levels_consumed = 0;
for (i, level) in levels.iter().enumerate() {
let level_value = level.price * level.size;
if remaining <= level_value {
let shares = remaining / level.price;
total_cost += shares * level.price;
total_shares += shares;
levels_consumed = i + 1;
break;
} else {
total_cost += level_value;
total_shares += level.size;
remaining -= level_value;
levels_consumed = i + 1;
}
}
let avg_execution_price = if total_shares > 0.0 {
total_cost / total_shares
} else {
reference_price
};
let price_impact = if is_buy {
(avg_execution_price - reference_price) / reference_price
} else {
(reference_price - avg_execution_price) / reference_price
};
let slippage = (avg_execution_price - reference_price).abs();
MarketImpact {
avg_execution_price,
price_impact,
slippage,
levels_consumed,
total_shares,
}
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct MarketImpact {
pub avg_execution_price: f64,
pub price_impact: f64, // As percentage
pub slippage: f64, // In price units
pub levels_consumed: usize,
pub total_shares: f64,
}
/// Track orderbook dynamics over time
pub struct OrderBookDynamics {
snapshots: Vec<(chrono::DateTime<chrono::Utc>, OrderBookAnalytics)>,
max_history: usize,
}
impl OrderBookDynamics {
pub fn new(max_history: usize) -> Self {
Self {
snapshots: Vec::new(),
max_history,
}
}
pub fn add_snapshot(&mut self, timestamp: chrono::DateTime<chrono::Utc>, analytics: OrderBookAnalytics) {
self.snapshots.push((timestamp, analytics));
if self.snapshots.len() > self.max_history {
self.snapshots.remove(0);
}
}
pub fn get_volatility(&self, window: usize) -> Option<f64> {
if self.snapshots.len() < window {
return None;
}
let recent = &self.snapshots[self.snapshots.len() - window..];
let mid_prices: Vec<f64> = recent.iter().map(|(_, a)| a.mid_price).collect();
let mean = mid_prices.iter().sum::<f64>() / mid_prices.len() as f64;
let variance = mid_prices.iter()
.map(|p| (p - mean).powi(2))
.sum::<f64>() / mid_prices.len() as f64;
Some(variance.sqrt())
}
pub fn get_average_spread(&self, window: usize) -> Option<f64> {
if self.snapshots.len() < window {
return None;
}
let recent = &self.snapshots[self.snapshots.len() - window..];
let total_spread: f64 = recent.iter().map(|(_, a)| a.spread).sum();
Some(total_spread / window as f64)
}
pub fn detect_momentum(&self, window: usize) -> Option<f64> {
if self.snapshots.len() < window {
return None;
}
let recent = &self.snapshots[self.snapshots.len() - window..];
let imbalances: Vec<f64> = recent.iter()
.map(|(_, a)| a.depth_imbalance.weighted)
.collect();
// Average imbalance indicates momentum direction
Some(imbalances.iter().sum::<f64>() / imbalances.len() as f64)
}
}

View file

@ -1,3 +1,5 @@
pub mod analytics;
use crate::{Quote, Trade, Side, OrderBookSnapshot, PriceLevel};
use chrono::{DateTime, Utc};
use dashmap::DashMap;
@ -5,6 +7,8 @@ use parking_lot::RwLock;
use std::collections::BTreeMap;
use std::sync::Arc;
pub use analytics::{OrderBookAnalytics, LiquidityProfile, OrderBookImbalance, MarketImpact};
// Manages order books for all symbols
pub struct OrderBookManager {
books: DashMap<String, Arc<RwLock<OrderBook>>>,
@ -49,6 +53,16 @@ impl OrderBookManager {
book_guard.get_best_bid_ask()
})
}
pub fn get_analytics(&self, symbol: &str, depth: usize) -> Option<OrderBookAnalytics> {
self.get_snapshot(symbol, depth)
.and_then(|snapshot| OrderBookAnalytics::calculate(&snapshot))
}
pub fn get_liquidity_profile(&self, symbol: &str, depth: usize) -> Option<LiquidityProfile> {
self.get_snapshot(symbol, depth)
.map(|snapshot| LiquidityProfile::from_snapshot(&snapshot))
}
}
// Individual order book for a symbol
@ -183,6 +197,61 @@ impl OrderBook {
let best_ask = self.asks.values().next()?.price;
Some((best_bid, best_ask))
}
pub fn get_mid_price(&self) -> Option<f64> {
self.get_best_bid_ask()
.map(|(bid, ask)| (bid + ask) / 2.0)
}
pub fn get_spread(&self) -> Option<f64> {
self.get_best_bid_ask()
.map(|(bid, ask)| ask - bid)
}
pub fn get_depth_at_price(&self, price: f64, side: Side) -> f64 {
match side {
Side::Buy => {
self.bids.values()
.filter(|level| level.price >= price)
.map(|level| level.size)
.sum()
}
Side::Sell => {
self.asks.values()
.filter(|level| level.price <= price)
.map(|level| level.size)
.sum()
}
}
}
pub fn get_volume_weighted_price(&self, size: f64, side: Side) -> Option<f64> {
let levels: Vec<&Level> = match side {
Side::Buy => self.asks.values().collect(),
Side::Sell => self.bids.values().collect(),
};
let mut remaining_size = size;
let mut total_cost = 0.0;
let mut total_shares = 0.0;
for level in levels {
if remaining_size <= 0.0 {
break;
}
let fill_size = remaining_size.min(level.size);
total_cost += fill_size * level.price;
total_shares += fill_size;
remaining_size -= fill_size;
}
if total_shares > 0.0 {
Some(total_cost / total_shares)
} else {
None
}
}
fn clean_stale_levels(&mut self, current_time: DateTime<Utc>) {
let stale_threshold = chrono::Duration::seconds(60); // 60 seconds

View file

@ -0,0 +1,505 @@
use std::collections::HashMap;
use crate::orderbook::analytics::{OrderBookAnalytics, LiquidityProfile};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BetSizingParameters {
pub signal_strength: f64, // -1 to 1
pub signal_confidence: f64, // 0 to 1
pub market_regime: MarketRegime,
pub volatility: f64,
pub liquidity_score: f64,
pub correlation_exposure: f64,
pub current_drawdown: f64,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum MarketRegime {
Trending,
RangeBound,
HighVolatility,
LowVolatility,
Transitioning,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PositionSize {
pub shares: u32,
pub notional_value: f64,
pub percent_of_capital: f64,
pub risk_adjusted_size: f64,
pub sizing_method: String,
pub adjustments: Vec<SizeAdjustment>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SizeAdjustment {
pub reason: String,
pub factor: f64,
}
pub struct BetSizer {
capital: f64,
base_risk_per_trade: f64,
max_position_size: f64,
min_position_size: f64,
use_kelly: bool,
volatility_scaling: bool,
regime_adjustments: HashMap<MarketRegime, f64>,
}
impl BetSizer {
pub fn new(capital: f64, base_risk_per_trade: f64) -> Self {
let mut regime_adjustments = HashMap::new();
regime_adjustments.insert(MarketRegime::Trending, 1.2);
regime_adjustments.insert(MarketRegime::RangeBound, 0.8);
regime_adjustments.insert(MarketRegime::HighVolatility, 0.6);
regime_adjustments.insert(MarketRegime::LowVolatility, 1.1);
regime_adjustments.insert(MarketRegime::Transitioning, 0.7);
Self {
capital,
base_risk_per_trade,
max_position_size: 0.10, // 10% max
min_position_size: 0.001, // 0.1% min
use_kelly: true,
volatility_scaling: true,
regime_adjustments,
}
}
pub fn calculate_position_size(
&self,
params: &BetSizingParameters,
price: f64,
stop_loss: Option<f64>,
historical_performance: Option<&PerformanceStats>,
orderbook_analytics: Option<&OrderBookAnalytics>,
liquidity_profile: Option<&LiquidityProfile>,
) -> PositionSize {
let mut adjustments = Vec::new();
// Start with base position size
let mut risk_fraction = self.base_risk_per_trade;
// 1. Kelly Criterion adjustment
if self.use_kelly {
if let Some(perf) = historical_performance {
let kelly_fraction = self.calculate_kelly_fraction(perf);
let kelly_adjustment = (kelly_fraction / self.base_risk_per_trade).min(2.0).max(0.1);
risk_fraction *= kelly_adjustment;
adjustments.push(SizeAdjustment {
reason: "Kelly Criterion".to_string(),
factor: kelly_adjustment,
});
}
}
// 2. Signal strength adjustment
let signal_adjustment = self.calculate_signal_adjustment(params.signal_strength, params.signal_confidence);
risk_fraction *= signal_adjustment;
adjustments.push(SizeAdjustment {
reason: "Signal Strength".to_string(),
factor: signal_adjustment,
});
// 3. Volatility adjustment
if self.volatility_scaling {
let vol_adjustment = self.calculate_volatility_adjustment(params.volatility);
risk_fraction *= vol_adjustment;
adjustments.push(SizeAdjustment {
reason: "Volatility Scaling".to_string(),
factor: vol_adjustment,
});
}
// 4. Market regime adjustment
if let Some(&regime_factor) = self.regime_adjustments.get(&params.market_regime) {
risk_fraction *= regime_factor;
adjustments.push(SizeAdjustment {
reason: format!("Market Regime ({:?})", params.market_regime),
factor: regime_factor,
});
}
// 5. Liquidity adjustment
let liquidity_adjustment = self.calculate_liquidity_adjustment(
params.liquidity_score,
orderbook_analytics,
liquidity_profile,
);
risk_fraction *= liquidity_adjustment;
adjustments.push(SizeAdjustment {
reason: "Liquidity".to_string(),
factor: liquidity_adjustment,
});
// 6. Correlation adjustment
let correlation_adjustment = self.calculate_correlation_adjustment(params.correlation_exposure);
risk_fraction *= correlation_adjustment;
adjustments.push(SizeAdjustment {
reason: "Correlation Exposure".to_string(),
factor: correlation_adjustment,
});
// 7. Drawdown adjustment
let drawdown_adjustment = self.calculate_drawdown_adjustment(params.current_drawdown);
risk_fraction *= drawdown_adjustment;
adjustments.push(SizeAdjustment {
reason: "Current Drawdown".to_string(),
factor: drawdown_adjustment,
});
// Calculate final position size
let position_value = if let Some(stop_price) = stop_loss {
// Risk-based sizing
let risk_per_share = (price - stop_price).abs();
let risk_amount = self.capital * risk_fraction;
risk_amount / risk_per_share
} else {
// Fixed percentage sizing
self.capital * risk_fraction / price
};
// Apply min/max constraints
let constrained_value = position_value
.min(self.capital * self.max_position_size / price)
.max(self.capital * self.min_position_size / price);
let shares = constrained_value.floor() as u32;
let notional_value = shares as f64 * price;
let percent_of_capital = notional_value / self.capital;
PositionSize {
shares,
notional_value,
percent_of_capital,
risk_adjusted_size: risk_fraction,
sizing_method: if stop_loss.is_some() { "Risk-based".to_string() } else { "Fixed-percentage".to_string() },
adjustments,
}
}
fn calculate_kelly_fraction(&self, perf: &PerformanceStats) -> f64 {
if perf.total_trades < 20 {
return self.base_risk_per_trade; // Not enough data
}
let win_rate = perf.win_rate;
let loss_rate = 1.0 - win_rate;
if perf.avg_loss == 0.0 {
return self.base_risk_per_trade;
}
let win_loss_ratio = perf.avg_win / perf.avg_loss.abs();
// Kelly formula: f = p - q/b
// where p = win probability, q = loss probability, b = win/loss ratio
let kelly = win_rate - (loss_rate / win_loss_ratio);
// Apply Kelly fraction (typically 25% of full Kelly)
let kelly_fraction = kelly * 0.25;
// Ensure it's positive and reasonable
kelly_fraction.max(0.0).min(0.25)
}
fn calculate_signal_adjustment(&self, strength: f64, confidence: f64) -> f64 {
// Strength is -1 to 1, we want 0 to 1
let normalized_strength = (strength.abs() + 1.0) / 2.0;
// Combine strength and confidence
let signal_score = normalized_strength * 0.7 + confidence * 0.3;
// Map to adjustment factor (0.5 to 1.5)
0.5 + signal_score
}
fn calculate_volatility_adjustment(&self, volatility: f64) -> f64 {
// Target volatility (e.g., 15% annualized)
let target_vol = 0.15 / (252.0_f64.sqrt()); // Daily vol
// Inverse volatility scaling
let adjustment = (target_vol / volatility).min(1.5).max(0.5);
adjustment
}
fn calculate_liquidity_adjustment(
&self,
liquidity_score: f64,
orderbook: Option<&OrderBookAnalytics>,
profile: Option<&LiquidityProfile>,
) -> f64 {
let mut adjustment = 1.0;
// Base liquidity score adjustment
if liquidity_score < 0.3 {
adjustment *= 0.5; // Very poor liquidity
} else if liquidity_score < 0.5 {
adjustment *= 0.7;
} else if liquidity_score > 0.8 {
adjustment *= 1.1; // Good liquidity bonus
}
// Orderbook spread adjustment
if let Some(ob) = orderbook {
if ob.spread_bps > 50.0 {
adjustment *= 0.8; // Wide spread penalty
} else if ob.spread_bps < 10.0 {
adjustment *= 1.1; // Tight spread bonus
}
}
// Market impact consideration
if let Some(prof) = profile {
// Check if our typical order size would move the market
let typical_order_value = self.capital * self.base_risk_per_trade;
let impact = prof.calculate_market_impact(typical_order_value, true);
if impact.price_impact > 0.001 { // More than 10 bps impact
adjustment *= (1.0 - impact.price_impact * 10.0).max(0.5);
}
}
adjustment
}
fn calculate_correlation_adjustment(&self, correlation_exposure: f64) -> f64 {
// Reduce size if highly correlated with existing positions
if correlation_exposure > 0.7 {
0.5
} else if correlation_exposure > 0.5 {
0.7
} else if correlation_exposure > 0.3 {
0.9
} else {
1.0
}
}
fn calculate_drawdown_adjustment(&self, current_drawdown: f64) -> f64 {
// Reduce size during drawdowns
if current_drawdown > 0.20 {
0.5 // 50% reduction if in 20%+ drawdown
} else if current_drawdown > 0.10 {
0.7
} else if current_drawdown > 0.05 {
0.85
} else {
1.0
}
}
pub fn calculate_optimal_stop_loss(
&self,
entry_price: f64,
volatility: f64,
support_levels: &[f64],
atr: Option<f64>,
is_long: bool,
) -> f64 {
let mut stop_candidates = Vec::new();
// 1. Volatility-based stop
let vol_stop = if is_long {
entry_price * (1.0 - 2.0 * volatility)
} else {
entry_price * (1.0 + 2.0 * volatility)
};
stop_candidates.push(("Volatility", vol_stop));
// 2. ATR-based stop
if let Some(atr_value) = atr {
let atr_stop = if is_long {
entry_price - 2.0 * atr_value
} else {
entry_price + 2.0 * atr_value
};
stop_candidates.push(("ATR", atr_stop));
}
// 3. Support/Resistance based stop
if !support_levels.is_empty() {
let technical_stop = if is_long {
// Find nearest support below entry
support_levels.iter()
.filter(|&&level| level < entry_price)
.max_by(|a, b| a.partial_cmp(b).unwrap())
.map(|&level| level * 0.995) // Just below support
} else {
// Find nearest resistance above entry
support_levels.iter()
.filter(|&&level| level > entry_price)
.min_by(|a, b| a.partial_cmp(b).unwrap())
.map(|&level| level * 1.005) // Just above resistance
};
if let Some(stop) = technical_stop {
stop_candidates.push(("Technical", stop));
}
}
// 4. Maximum loss stop (e.g., 5% from entry)
let max_loss_stop = if is_long {
entry_price * 0.95
} else {
entry_price * 1.05
};
stop_candidates.push(("MaxLoss", max_loss_stop));
// Choose the most conservative stop (closest to entry)
let optimal_stop = if is_long {
stop_candidates.iter()
.map(|(_, stop)| *stop)
.max_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap_or(vol_stop)
} else {
stop_candidates.iter()
.map(|(_, stop)| *stop)
.min_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap_or(vol_stop)
};
optimal_stop
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PerformanceStats {
pub total_trades: u32,
pub win_rate: f64,
pub avg_win: f64,
pub avg_loss: f64,
pub sharpe_ratio: f64,
pub max_consecutive_losses: u32,
}
/// Dynamic position sizing based on market conditions
pub struct DynamicSizer {
base_sizer: BetSizer,
regime_detector: MarketRegimeDetector,
performance_tracker: PerformanceTracker,
}
impl DynamicSizer {
pub fn new(capital: f64, base_risk: f64) -> Self {
Self {
base_sizer: BetSizer::new(capital, base_risk),
regime_detector: MarketRegimeDetector::new(),
performance_tracker: PerformanceTracker::new(),
}
}
pub fn update_market_data(&mut self, volatility: f64, volume: f64, trend_strength: f64) {
self.regime_detector.update(volatility, volume, trend_strength);
}
pub fn record_trade(&mut self, pnl: f64, holding_period: i64) {
self.performance_tracker.record_trade(pnl, holding_period);
}
pub fn get_sizing_parameters(&self) -> BetSizingParameters {
BetSizingParameters {
signal_strength: 0.0, // To be filled by strategy
signal_confidence: 0.0, // To be filled by strategy
market_regime: self.regime_detector.current_regime(),
volatility: self.regime_detector.current_volatility(),
liquidity_score: 0.0, // To be filled from orderbook
correlation_exposure: 0.0, // To be filled from portfolio
current_drawdown: self.performance_tracker.current_drawdown(),
}
}
}
struct MarketRegimeDetector {
volatility_history: Vec<f64>,
volume_history: Vec<f64>,
trend_history: Vec<f64>,
window_size: usize,
}
impl MarketRegimeDetector {
fn new() -> Self {
Self {
volatility_history: Vec::new(),
volume_history: Vec::new(),
trend_history: Vec::new(),
window_size: 20,
}
}
fn update(&mut self, volatility: f64, volume: f64, trend_strength: f64) {
self.volatility_history.push(volatility);
self.volume_history.push(volume);
self.trend_history.push(trend_strength);
// Keep only recent history
if self.volatility_history.len() > self.window_size {
self.volatility_history.remove(0);
self.volume_history.remove(0);
self.trend_history.remove(0);
}
}
fn current_regime(&self) -> MarketRegime {
if self.volatility_history.len() < 5 {
return MarketRegime::Transitioning;
}
let avg_vol = self.volatility_history.iter().sum::<f64>() / self.volatility_history.len() as f64;
let avg_trend = self.trend_history.iter().sum::<f64>() / self.trend_history.len() as f64;
if avg_vol > 0.02 {
MarketRegime::HighVolatility
} else if avg_vol < 0.01 {
MarketRegime::LowVolatility
} else if avg_trend.abs() > 0.7 {
MarketRegime::Trending
} else if avg_trend.abs() < 0.3 {
MarketRegime::RangeBound
} else {
MarketRegime::Transitioning
}
}
fn current_volatility(&self) -> f64 {
if self.volatility_history.is_empty() {
return 0.015; // Default 1.5% daily vol
}
self.volatility_history.iter().sum::<f64>() / self.volatility_history.len() as f64
}
}
struct PerformanceTracker {
trades: Vec<(f64, i64)>, // (pnl, holding_period)
peak_capital: f64,
current_capital: f64,
}
impl PerformanceTracker {
fn new() -> Self {
Self {
trades: Vec::new(),
peak_capital: 100000.0,
current_capital: 100000.0,
}
}
fn record_trade(&mut self, pnl: f64, holding_period: i64) {
self.trades.push((pnl, holding_period));
self.current_capital += pnl;
if self.current_capital > self.peak_capital {
self.peak_capital = self.current_capital;
}
}
fn current_drawdown(&self) -> f64 {
if self.peak_capital > 0.0 {
(self.peak_capital - self.current_capital) / self.peak_capital
} else {
0.0
}
}
}

View file

@ -1,9 +1,15 @@
pub mod portfolio;
pub mod bet_sizing;
use crate::{Order, Side};
use dashmap::DashMap;
use parking_lot::RwLock;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
pub use portfolio::{PortfolioRisk, RiskModel, CorrelationMatrix, ConcentrationMetrics};
pub use bet_sizing::{BetSizer, BetSizingParameters, PositionSize, MarketRegime, DynamicSizer};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RiskLimits {
pub max_position_size: f64,

View file

@ -0,0 +1,533 @@
use std::collections::HashMap;
use nalgebra::{DMatrix, DVector};
use crate::positions::Position;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PortfolioRisk {
pub total_var_95: f64,
pub total_var_99: f64,
pub total_cvar_95: f64,
pub marginal_var: HashMap<String, f64>,
pub component_var: HashMap<String, f64>,
pub correlation_matrix: CorrelationMatrix,
pub concentration_risk: ConcentrationMetrics,
pub stress_test_results: HashMap<String, f64>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CorrelationMatrix {
pub symbols: Vec<String>,
pub matrix: Vec<Vec<f64>>,
pub average_correlation: f64,
pub max_correlation: (String, String, f64),
pub clustering_score: f64, // How clustered the portfolio is
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConcentrationMetrics {
pub herfindahl_index: f64,
pub effective_number_of_positions: f64,
pub top_5_concentration: f64,
pub sector_concentration: HashMap<String, f64>,
}
#[derive(Debug, Clone)]
pub struct RiskModel {
returns_history: HashMap<String, Vec<f64>>,
lookback_period: usize,
confidence_levels: Vec<f64>,
}
impl RiskModel {
pub fn new(lookback_period: usize) -> Self {
Self {
returns_history: HashMap::new(),
lookback_period,
confidence_levels: vec![0.95, 0.99],
}
}
pub fn update_returns(&mut self, symbol: &str, returns: Vec<f64>) {
self.returns_history.insert(symbol.to_string(), returns);
}
pub fn calculate_portfolio_risk(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
) -> Result<PortfolioRisk, String> {
// Get active symbols and their weights
let (symbols, weights) = self.get_portfolio_weights(positions, current_prices)?;
// Calculate covariance matrix
let cov_matrix = self.calculate_covariance_matrix(&symbols)?;
// Portfolio variance
let portfolio_variance = self.calculate_portfolio_variance(&weights, &cov_matrix);
let portfolio_vol = portfolio_variance.sqrt();
// VaR calculations
let total_portfolio_value = self.calculate_total_value(positions, current_prices);
let total_var_95 = total_portfolio_value * portfolio_vol * 1.645; // 95% confidence
let total_var_99 = total_portfolio_value * portfolio_vol * 2.326; // 99% confidence
// CVaR (Conditional VaR)
let total_cvar_95 = self.calculate_cvar(&symbols, &weights, 0.95)?;
// Marginal and Component VaR
let (marginal_var, component_var) = self.calculate_var_decomposition(
&symbols,
&weights,
&cov_matrix,
portfolio_vol,
total_portfolio_value,
)?;
// Correlation analysis
let correlation_matrix = self.calculate_correlation_matrix(&symbols)?;
// Concentration metrics
let concentration_risk = self.calculate_concentration_metrics(positions, current_prices);
// Stress tests
let stress_test_results = self.run_stress_tests(positions, current_prices, &cov_matrix)?;
Ok(PortfolioRisk {
total_var_95,
total_var_99,
total_cvar_95,
marginal_var,
component_var,
correlation_matrix,
concentration_risk,
stress_test_results,
})
}
fn get_portfolio_weights(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
) -> Result<(Vec<String>, DVector<f64>), String> {
let mut symbols = Vec::new();
let mut values = Vec::new();
let mut total_value = 0.0;
for (symbol, position) in positions {
if let Some(&price) = current_prices.get(symbol) {
let position_value = position.quantity * price;
symbols.push(symbol.clone());
values.push(position_value.abs());
total_value += position_value.abs();
}
}
if total_value == 0.0 {
return Err("Portfolio has zero value".to_string());
}
let weights = DVector::from_vec(values.iter().map(|v| v / total_value).collect());
Ok((symbols, weights))
}
fn calculate_covariance_matrix(&self, symbols: &[String]) -> Result<DMatrix<f64>, String> {
let n = symbols.len();
let mut matrix = DMatrix::zeros(n, n);
for (i, symbol_i) in symbols.iter().enumerate() {
for (j, symbol_j) in symbols.iter().enumerate() {
if i <= j {
let cov = self.calculate_covariance(symbol_i, symbol_j)?;
matrix[(i, j)] = cov;
matrix[(j, i)] = cov; // Symmetric
}
}
}
Ok(matrix)
}
fn calculate_covariance(&self, symbol1: &str, symbol2: &str) -> Result<f64, String> {
let returns1 = self.returns_history.get(symbol1)
.ok_or_else(|| format!("No returns data for {}", symbol1))?;
let returns2 = self.returns_history.get(symbol2)
.ok_or_else(|| format!("No returns data for {}", symbol2))?;
if returns1.len() != returns2.len() {
return Err("Mismatched returns length".to_string());
}
let n = returns1.len() as f64;
let mean1 = returns1.iter().sum::<f64>() / n;
let mean2 = returns2.iter().sum::<f64>() / n;
let covariance = returns1.iter()
.zip(returns2.iter())
.map(|(r1, r2)| (r1 - mean1) * (r2 - mean2))
.sum::<f64>() / (n - 1.0);
Ok(covariance)
}
fn calculate_portfolio_variance(&self, weights: &DVector<f64>, cov_matrix: &DMatrix<f64>) -> f64 {
let variance = weights.transpose() * cov_matrix * weights;
variance[(0, 0)]
}
fn calculate_var_decomposition(
&self,
symbols: &[String],
weights: &DVector<f64>,
cov_matrix: &DMatrix<f64>,
portfolio_vol: f64,
total_value: f64,
) -> Result<(HashMap<String, f64>, HashMap<String, f64>), String> {
let mut marginal_var = HashMap::new();
let mut component_var = HashMap::new();
// Marginal VaR = ∂VaR/∂w_i
let cov_weights = cov_matrix * weights;
for (i, symbol) in symbols.iter().enumerate() {
let marginal = (cov_weights[i] / portfolio_vol) * 1.645 * total_value;
let component = marginal * weights[i];
marginal_var.insert(symbol.clone(), marginal);
component_var.insert(symbol.clone(), component);
}
Ok((marginal_var, component_var))
}
fn calculate_cvar(&self, symbols: &[String], weights: &DVector<f64>, confidence: f64) -> Result<f64, String> {
// Simulate portfolio returns
let portfolio_returns = self.simulate_portfolio_returns(symbols, weights, 10000)?;
// Sort returns
let mut sorted_returns = portfolio_returns.clone();
sorted_returns.sort_by(|a, b| a.partial_cmp(b).unwrap());
// Find VaR threshold
let var_index = ((1.0 - confidence) * sorted_returns.len() as f64) as usize;
let var_threshold = sorted_returns[var_index];
// Calculate expected loss beyond VaR
let tail_losses: Vec<f64> = sorted_returns.iter()
.take(var_index)
.cloned()
.collect();
if tail_losses.is_empty() {
return Ok(0.0);
}
let cvar = -tail_losses.iter().sum::<f64>() / tail_losses.len() as f64;
Ok(cvar)
}
fn simulate_portfolio_returns(
&self,
symbols: &[String],
weights: &DVector<f64>,
num_simulations: usize,
) -> Result<Vec<f64>, String> {
let mut portfolio_returns = Vec::with_capacity(num_simulations);
// Use historical simulation
let min_length = symbols.iter()
.map(|s| self.returns_history.get(s).map(|r| r.len()).unwrap_or(0))
.min()
.unwrap_or(0);
if min_length == 0 {
return Err("No returns data available".to_string());
}
// Bootstrap from historical returns
use rand::prelude::*;
let mut rng = thread_rng();
for _ in 0..num_simulations {
let idx = rng.gen_range(0..min_length);
let mut portfolio_return = 0.0;
for (i, symbol) in symbols.iter().enumerate() {
if let Some(returns) = self.returns_history.get(symbol) {
portfolio_return += weights[i] * returns[idx];
}
}
portfolio_returns.push(portfolio_return);
}
Ok(portfolio_returns)
}
fn calculate_correlation_matrix(&self, symbols: &[String]) -> Result<CorrelationMatrix, String> {
let n = symbols.len();
let mut matrix = vec![vec![0.0; n]; n];
let mut sum_correlation = 0.0;
let mut count = 0;
let mut max_correlation = ("".to_string(), "".to_string(), 0.0);
for (i, symbol_i) in symbols.iter().enumerate() {
for (j, symbol_j) in symbols.iter().enumerate() {
if i <= j {
let corr = if i == j {
1.0
} else {
self.calculate_correlation(symbol_i, symbol_j)?
};
matrix[i][j] = corr;
matrix[j][i] = corr;
if i != j {
sum_correlation += corr.abs();
count += 1;
if corr.abs() > max_correlation.2 {
max_correlation = (symbol_i.clone(), symbol_j.clone(), corr);
}
}
}
}
}
let average_correlation = if count > 0 {
sum_correlation / count as f64
} else {
0.0
};
// Calculate clustering score (higher = more clustered)
let clustering_score = self.calculate_clustering_score(&matrix);
Ok(CorrelationMatrix {
symbols: symbols.to_vec(),
matrix,
average_correlation,
max_correlation,
clustering_score,
})
}
fn calculate_correlation(&self, symbol1: &str, symbol2: &str) -> Result<f64, String> {
let returns1 = self.returns_history.get(symbol1)
.ok_or_else(|| format!("No returns data for {}", symbol1))?;
let returns2 = self.returns_history.get(symbol2)
.ok_or_else(|| format!("No returns data for {}", symbol2))?;
let cov = self.calculate_covariance(symbol1, symbol2)?;
let std1 = self.calculate_std_dev(returns1);
let std2 = self.calculate_std_dev(returns2);
if std1 == 0.0 || std2 == 0.0 {
return Ok(0.0);
}
Ok(cov / (std1 * std2))
}
fn calculate_std_dev(&self, returns: &[f64]) -> f64 {
let n = returns.len() as f64;
let mean = returns.iter().sum::<f64>() / n;
let variance = returns.iter()
.map(|r| (r - mean).powi(2))
.sum::<f64>() / (n - 1.0);
variance.sqrt()
}
fn calculate_clustering_score(&self, correlation_matrix: &[Vec<f64>]) -> f64 {
// Use average linkage clustering metric
let n = correlation_matrix.len();
if n < 2 {
return 0.0;
}
let mut cluster_sum = 0.0;
let mut cluster_count = 0;
// Look for groups of highly correlated assets
for i in 0..n {
for j in i+1..n {
for k in j+1..n {
let corr_ij = correlation_matrix[i][j].abs();
let corr_ik = correlation_matrix[i][k].abs();
let corr_jk = correlation_matrix[j][k].abs();
// If all three are highly correlated, they form a cluster
let min_corr = corr_ij.min(corr_ik).min(corr_jk);
if min_corr > 0.5 {
cluster_sum += min_corr;
cluster_count += 1;
}
}
}
}
if cluster_count > 0 {
cluster_sum / cluster_count as f64
} else {
0.0
}
}
fn calculate_concentration_metrics(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
) -> ConcentrationMetrics {
let mut position_values: Vec<(String, f64)> = Vec::new();
let mut total_value = 0.0;
for (symbol, position) in positions {
if let Some(&price) = current_prices.get(symbol) {
let value = (position.quantity * price).abs();
position_values.push((symbol.clone(), value));
total_value += value;
}
}
// Sort by value descending
position_values.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
// Herfindahl Index
let herfindahl_index = position_values.iter()
.map(|(_, value)| {
let weight = value / total_value;
weight * weight
})
.sum();
// Effective number of positions
let effective_number_of_positions = if herfindahl_index > 0.0 {
1.0 / herfindahl_index
} else {
0.0
};
// Top 5 concentration
let top_5_value: f64 = position_values.iter()
.take(5)
.map(|(_, value)| value)
.sum();
let top_5_concentration = top_5_value / total_value;
// Sector concentration (simplified - would need sector mapping)
let sector_concentration = HashMap::new(); // TODO: Implement with sector data
ConcentrationMetrics {
herfindahl_index,
effective_number_of_positions,
top_5_concentration,
sector_concentration,
}
}
fn calculate_total_value(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
) -> f64 {
positions.iter()
.filter_map(|(symbol, position)| {
current_prices.get(symbol).map(|&price| (position.quantity * price).abs())
})
.sum()
}
fn run_stress_tests(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
cov_matrix: &DMatrix<f64>,
) -> Result<HashMap<String, f64>, String> {
let mut results = HashMap::new();
// Market crash scenario
let market_crash_loss = self.calculate_scenario_loss(
positions,
current_prices,
-0.20, // 20% market drop
);
results.insert("market_crash_20pct".to_string(), market_crash_loss);
// Flight to quality
let flight_to_quality = self.calculate_correlation_stress(
positions,
current_prices,
cov_matrix,
0.9, // High correlation scenario
)?;
results.insert("flight_to_quality".to_string(), flight_to_quality);
// Volatility spike
let vol_spike = self.calculate_volatility_stress(
positions,
current_prices,
cov_matrix,
2.0, // Double volatility
)?;
results.insert("volatility_spike_2x".to_string(), vol_spike);
Ok(results)
}
fn calculate_scenario_loss(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
shock: f64,
) -> f64 {
let current_value = self.calculate_total_value(positions, current_prices);
current_value * shock.abs()
}
fn calculate_correlation_stress(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
cov_matrix: &DMatrix<f64>,
target_correlation: f64,
) -> Result<f64, String> {
// Adjust correlation matrix to stress scenario
let n = cov_matrix.nrows();
let mut stressed_cov = cov_matrix.clone();
for i in 0..n {
for j in 0..n {
if i != j {
let current_corr = cov_matrix[(i, j)] / (cov_matrix[(i, i)].sqrt() * cov_matrix[(j, j)].sqrt());
let stress_factor = target_correlation / current_corr.abs().max(0.1);
stressed_cov[(i, j)] *= stress_factor;
}
}
}
let (_, weights) = self.get_portfolio_weights(positions, current_prices)?;
let stressed_variance = self.calculate_portfolio_variance(&weights, &stressed_cov);
let stressed_vol = stressed_variance.sqrt();
Ok(self.calculate_total_value(positions, current_prices) * stressed_vol * 2.326) // 99% VaR
}
fn calculate_volatility_stress(
&self,
positions: &HashMap<String, Position>,
current_prices: &HashMap<String, f64>,
cov_matrix: &DMatrix<f64>,
vol_multiplier: f64,
) -> Result<f64, String> {
let stressed_cov = cov_matrix * vol_multiplier.powi(2);
let (_, weights) = self.get_portfolio_weights(positions, current_prices)?;
let stressed_variance = self.calculate_portfolio_variance(&weights, &stressed_cov);
let stressed_vol = stressed_variance.sqrt();
Ok(self.calculate_total_value(positions, current_prices) * stressed_vol * 2.326) // 99% VaR
}
}

View file

@ -0,0 +1,272 @@
/**
* Advanced Risk Management Examples
* Demonstrates orderbook analytics, portfolio risk, and bet sizing
*/
import { TradingEngine, RiskAnalyzer, OrderbookAnalyzer } from '@stock-bot/core';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('AdvancedRiskExample');
// Example 1: Orderbook Analytics
async function orderbookAnalyticsExample() {
console.log('\n=== Orderbook Analytics Example ===');
const engine = new TradingEngine('paper', { startingCapital: 100000 });
const obAnalyzer = new OrderbookAnalyzer();
// Update orderbook with some data
const symbol = 'AAPL';
engine.updateQuote(symbol, 149.95, 150.05, 1000, 1200);
engine.updateQuote(symbol, 149.90, 150.10, 800, 900);
engine.updateQuote(symbol, 149.85, 150.15, 600, 700);
// Get orderbook snapshot
const snapshotJson = engine.getOrderbookSnapshot(symbol, 10);
const snapshot = JSON.parse(snapshotJson);
// Analyze orderbook
const analyticsJson = obAnalyzer.analyzeOrderbook(snapshotJson);
const analytics = JSON.parse(analyticsJson);
console.log('Orderbook Analytics:');
console.log(` Spread: $${analytics.spread.toFixed(2)} (${analytics.spread_bps.toFixed(1)} bps)`);
console.log(` Mid Price: $${analytics.mid_price.toFixed(2)}`);
console.log(` Micro Price: $${analytics.micro_price.toFixed(2)}`);
console.log(` Imbalance: ${(analytics.imbalance * 100).toFixed(1)}%`);
console.log(` Liquidity Score: ${analytics.liquidity_score.toFixed(2)}`);
// Calculate liquidity profile
const profileJson = obAnalyzer.calculateLiquidityProfile(snapshotJson);
const profile = JSON.parse(profileJson);
console.log('\nLiquidity Profile:');
console.log(` Total Bid Depth: $${profile.total_bid_depth.toFixed(2)}`);
console.log(` Total Ask Depth: $${profile.total_ask_depth.toFixed(2)}`);
// Calculate market impact for a $10,000 buy order
const impactJson = obAnalyzer.calculateMarketImpact(snapshotJson, 10000, true);
const impact = JSON.parse(impactJson);
console.log('\nMarket Impact ($10k buy):');
console.log(` Avg Execution Price: $${impact.avg_execution_price.toFixed(2)}`);
console.log(` Price Impact: ${(impact.price_impact * 100).toFixed(2)}%`);
console.log(` Slippage: $${impact.slippage.toFixed(2)}`);
console.log(` Levels Consumed: ${impact.levels_consumed}`);
}
// Example 2: Portfolio Risk Analysis
async function portfolioRiskExample() {
console.log('\n=== Portfolio Risk Analysis Example ===');
const riskAnalyzer = new RiskAnalyzer(100000, 0.02, 252); // $100k, 2% risk, 252 days lookback
// Update historical returns for portfolio symbols
const symbols = ['AAPL', 'GOOGL', 'MSFT', 'AMZN', 'META'];
// Simulate some returns data (in practice, load from historical data)
for (const symbol of symbols) {
const returns = generateRandomReturns(252);
riskAnalyzer.updateReturns(symbol, returns);
}
// Current positions
const positions = [
{ symbol: 'AAPL', quantity: 100, avgPrice: 150 },
{ symbol: 'GOOGL', quantity: 50, avgPrice: 140 },
{ symbol: 'MSFT', quantity: 75, avgPrice: 380 },
{ symbol: 'AMZN', quantity: 40, avgPrice: 170 },
{ symbol: 'META', quantity: 60, avgPrice: 480 }
];
// Current prices
const prices = {
AAPL: 155,
GOOGL: 145,
MSFT: 390,
AMZN: 175,
META: 490
};
// Calculate portfolio risk
const riskJson = riskAnalyzer.calculatePortfolioRisk(
JSON.stringify(positions.map(p => [p.symbol, p.quantity, p.avgPrice])),
JSON.stringify(prices)
);
const risk = JSON.parse(riskJson);
console.log('Portfolio Risk Metrics:');
console.log(` VaR (95%): $${risk.total_var_95.toFixed(2)}`);
console.log(` VaR (99%): $${risk.total_var_99.toFixed(2)}`);
console.log(` CVaR (95%): $${risk.total_cvar_95.toFixed(2)}`);
console.log('\nConcentration Metrics:');
console.log(` Herfindahl Index: ${risk.concentration_risk.herfindahl_index.toFixed(3)}`);
console.log(` Effective Positions: ${risk.concentration_risk.effective_number_of_positions.toFixed(1)}`);
console.log(` Top 5 Concentration: ${(risk.concentration_risk.top_5_concentration * 100).toFixed(1)}%`);
console.log('\nCorrelation Analysis:');
console.log(` Average Correlation: ${risk.correlation_matrix.average_correlation.toFixed(3)}`);
console.log(` Max Correlation: ${risk.correlation_matrix.max_correlation[0]} vs ${risk.correlation_matrix.max_correlation[1]} = ${risk.correlation_matrix.max_correlation[2].toFixed(3)}`);
console.log(` Clustering Score: ${risk.correlation_matrix.clustering_score.toFixed(3)}`);
console.log('\nStress Test Results:');
for (const [scenario, loss] of Object.entries(risk.stress_test_results)) {
console.log(` ${scenario}: $${(loss as number).toFixed(2)}`);
}
}
// Example 3: Dynamic Bet Sizing
async function betSizingExample() {
console.log('\n=== Dynamic Bet Sizing Example ===');
const riskAnalyzer = new RiskAnalyzer(100000, 0.02, 252);
// Scenario 1: Strong signal in trending market
console.log('\nScenario 1: Strong Signal, Trending Market');
let positionSize = riskAnalyzer.calculatePositionSize(
0.8, // signal_strength
0.9, // signal_confidence
0.015, // volatility (1.5% daily)
0.8, // liquidity_score
0.02, // current_drawdown (2%)
150, // price
145, // stop_loss
'trending'
);
let size = JSON.parse(positionSize);
console.log(` Shares: ${size.shares}`);
console.log(` Notional Value: $${size.notional_value.toFixed(2)}`);
console.log(` % of Capital: ${(size.percent_of_capital * 100).toFixed(2)}%`);
console.log(' Adjustments:');
for (const adj of size.adjustments) {
console.log(` ${adj.reason}: ${adj.factor.toFixed(2)}x`);
}
// Scenario 2: Weak signal in high volatility
console.log('\nScenario 2: Weak Signal, High Volatility');
positionSize = riskAnalyzer.calculatePositionSize(
0.3, // signal_strength
0.5, // signal_confidence
0.03, // volatility (3% daily)
0.6, // liquidity_score
0.15, // current_drawdown (15%)
150, // price
null, // no stop_loss
'high_volatility'
);
size = JSON.parse(positionSize);
console.log(` Shares: ${size.shares}`);
console.log(` Notional Value: $${size.notional_value.toFixed(2)}`);
console.log(` % of Capital: ${(size.percent_of_capital * 100).toFixed(2)}%`);
// Calculate optimal stop loss
const supportLevels = [148, 145, 142, 140];
const optimalStop = riskAnalyzer.calculateOptimalStopLoss(
150, // entry_price
0.015, // volatility
supportLevels,
2.5, // atr
true // is_long
);
console.log(`\nOptimal Stop Loss: $${optimalStop.toFixed(2)}`);
}
// Example 4: Integrated Risk Management in Trading
async function integratedTradingExample() {
console.log('\n=== Integrated Risk Management Example ===');
const engine = new TradingEngine('backtest', {
startTime: Date.now() - 30 * 24 * 60 * 60 * 1000, // 30 days ago
endTime: Date.now(),
speedMultiplier: 1
});
const riskAnalyzer = new RiskAnalyzer(100000, 0.02, 252);
const obAnalyzer = new OrderbookAnalyzer();
// Simulate a trading decision
const symbol = 'AAPL';
// 1. Check orderbook liquidity
engine.updateQuote(symbol, 149.95, 150.05, 5000, 5500);
const snapshot = engine.getOrderbookSnapshot(symbol, 10);
const analytics = JSON.parse(obAnalyzer.analyzeOrderbook(snapshot));
console.log('Pre-trade Analysis:');
console.log(` Liquidity Score: ${analytics.liquidity_score.toFixed(2)}`);
console.log(` Spread: ${analytics.spread_bps.toFixed(1)} bps`);
console.log(` Orderbook Imbalance: ${(analytics.imbalance * 100).toFixed(1)}%`);
// 2. Calculate position size based on current conditions
const positionSizeJson = riskAnalyzer.calculatePositionSize(
0.7, // signal_strength
0.8, // signal_confidence
0.018, // volatility
analytics.liquidity_score, // from orderbook
0.05, // current_drawdown
150, // price
147, // stop_loss
'trending'
);
const positionSize = JSON.parse(positionSizeJson);
console.log(`\nPosition Sizing:`);
console.log(` Recommended Shares: ${positionSize.shares}`);
console.log(` Risk-Adjusted Size: ${(positionSize.risk_adjusted_size * 100).toFixed(2)}%`);
// 3. Check market impact before placing order
const orderValue = positionSize.shares * 150;
const impact = JSON.parse(obAnalyzer.calculateMarketImpact(snapshot, orderValue, true));
console.log(`\nExpected Market Impact:`);
console.log(` Price Impact: ${(impact.price_impact * 100).toFixed(3)}%`);
console.log(` Expected Fill Price: $${impact.avg_execution_price.toFixed(2)}`);
// 4. Risk check
const riskCheck = engine.checkRisk({
id: '123',
symbol: symbol,
side: 'buy',
quantity: positionSize.shares,
orderType: 'market',
timeInForce: 'DAY'
});
const riskResult = JSON.parse(riskCheck);
console.log(`\nRisk Check: ${riskResult.passed ? 'PASSED' : 'FAILED'}`);
if (!riskResult.passed) {
console.log(' Violations:', riskResult.violations);
}
}
// Helper function to generate random returns
function generateRandomReturns(length: number): number[] {
const returns: number[] = [];
for (let i = 0; i < length; i++) {
// Generate returns with mean 0.0005 (0.05%) and std dev 0.02 (2%)
const return_ = (Math.random() - 0.5) * 0.04 + 0.0005;
returns.push(return_);
}
return returns;
}
// Run all examples
async function runExamples() {
try {
await orderbookAnalyticsExample();
await portfolioRiskExample();
await betSizingExample();
await integratedTradingExample();
} catch (error) {
console.error('Error running examples:', error);
}
}
// Execute if running directly
if (require.main === module) {
runExamples();
}

View file

@ -0,0 +1,115 @@
import { BacktestEngine } from '../src/backtest/BacktestEngine';
import { StrategyManager } from '../src/strategies/StrategyManager';
import { StorageService } from '../src/services/StorageService';
import { ModeManager } from '../src/core/ModeManager';
import { MarketDataService } from '../src/services/MarketDataService';
import { ExecutionService } from '../src/services/ExecutionService';
import { IServiceContainer } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('BacktestTest');
async function runSimpleBacktest() {
// Create service container
const container: IServiceContainer = {
logger: {
info: (msg: string, ...args: any[]) => console.log('[INFO]', msg, ...args),
error: (msg: string, ...args: any[]) => console.error('[ERROR]', msg, ...args),
warn: (msg: string, ...args: any[]) => console.warn('[WARN]', msg, ...args),
debug: (msg: string, ...args: any[]) => console.log('[DEBUG]', msg, ...args),
} as any,
custom: {}
};
// Initialize services
const storageService = new StorageService();
const marketDataService = new MarketDataService(container);
const executionService = new ExecutionService(container);
const modeManager = new ModeManager(container, marketDataService, executionService, storageService);
const strategyManager = new StrategyManager(container);
// Set services in container
container.custom = {
MarketDataService: marketDataService,
ExecutionService: executionService,
ModeManager: modeManager,
StorageService: storageService
};
// Initialize backtest mode with full config
await modeManager.initializeMode({
mode: 'backtest',
startDate: '2023-01-01T00:00:00Z',
endDate: '2024-01-01T00:00:00Z',
speed: 'max',
symbols: ['AAPL'],
initialCapital: 100000,
dataFrequency: '1d',
strategy: 'sma-crossover'
});
// Create backtest engine
const backtestEngine = new BacktestEngine(container, storageService, strategyManager);
// Configure backtest - shorter period for faster testing
const config = {
mode: 'backtest' as const, // Add mode field
name: 'SMA Crossover Test',
strategy: 'sma-crossover',
symbols: ['AAPL'], // Just one symbol for simplicity
startDate: '2023-01-01T00:00:00Z', // ISO datetime format
endDate: '2024-01-01T00:00:00Z', // ISO datetime format
initialCapital: 100000,
commission: 0.001,
slippage: 0.0001,
dataFrequency: '1d' as const,
speed: 'max' as const
};
console.log('Starting backtest with configuration:', config);
try {
const result = await backtestEngine.runBacktest(config);
console.log('\n=== BACKTEST RESULTS ===');
console.log(`Total Trades: ${result.metrics.totalTrades}`);
console.log(`Win Rate: ${result.metrics.winRate.toFixed(2)}%`);
console.log(`Total Return: ${result.metrics.totalReturn.toFixed(2)}%`);
console.log(`Sharpe Ratio: ${result.metrics.sharpeRatio.toFixed(2)}`);
console.log(`Max Drawdown: ${result.metrics.maxDrawdown.toFixed(2)}%`);
console.log('\n=== TRADE HISTORY ===');
if (result.trades.length === 0) {
console.log('No trades were executed!');
} else {
result.trades.forEach((trade, i) => {
console.log(`\nTrade ${i + 1}:`);
console.log(` Symbol: ${trade.symbol}`);
console.log(` Entry: ${trade.entryDate} @ $${trade.entryPrice.toFixed(2)}`);
console.log(` Exit: ${trade.exitDate} @ $${trade.exitPrice.toFixed(2)}`);
console.log(` P&L: $${trade.pnl.toFixed(2)} (${trade.pnlPercent.toFixed(2)}%)`);
});
}
// Show some equity curve data
console.log('\n=== EQUITY CURVE (first and last 5 points) ===');
const equityCurve = result.equity;
if (equityCurve.length > 0) {
equityCurve.slice(0, 5).forEach(point => {
console.log(`${point.date}: $${point.value.toFixed(2)}`);
});
if (equityCurve.length > 10) {
console.log('...');
equityCurve.slice(-5).forEach(point => {
console.log(`${point.date}: $${point.value.toFixed(2)}`);
});
}
}
} catch (error) {
console.error('Backtest failed:', error);
}
}
// Run the test
runSimpleBacktest().catch(console.error);

View file

@ -0,0 +1,84 @@
import { BacktestEngine } from '../src/backtest/BacktestEngine';
import { StrategyManager } from '../src/strategies/StrategyManager';
import { StorageService } from '../src/services/StorageService';
import { ModeManager } from '../src/core/ModeManager';
import { MarketDataService } from '../src/services/MarketDataService';
import { ExecutionService } from '../src/services/ExecutionService';
import { IServiceContainer } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('BacktestTest');
async function runBacktestWithDetailedLogging() {
// Create service container
const container: IServiceContainer = {
logger,
custom: {}
};
// Initialize services
const storageService = new StorageService();
const marketDataService = new MarketDataService(container);
const executionService = new ExecutionService(container);
const modeManager = new ModeManager(container);
const strategyManager = new StrategyManager(container);
// Set services in container
container.custom = {
MarketDataService: marketDataService,
ExecutionService: executionService,
ModeManager: modeManager,
StorageService: storageService
};
// Set backtest mode
await modeManager.setMode('backtest', {
startTime: new Date('2020-01-01').getTime(),
endTime: new Date('2025-01-01').getTime(),
speedMultiplier: 1
});
// Create backtest engine
const backtestEngine = new BacktestEngine(container, storageService, strategyManager);
// Configure backtest
const config = {
name: 'SMA Crossover Test',
strategy: 'sma-crossover',
symbols: ['AAPL', 'GOOGL', 'MSFT'],
startDate: '2020-01-01',
endDate: '2025-01-01',
initialCapital: 100000,
commission: 0.001,
slippage: 0.0001,
dataFrequency: '1d'
};
logger.info('Starting backtest with configuration:', config);
try {
const result = await backtestEngine.runBacktest(config);
logger.info('=== BACKTEST RESULTS ===');
logger.info(`Total Trades: ${result.metrics.totalTrades}`);
logger.info(`Win Rate: ${result.metrics.winRate.toFixed(2)}%`);
logger.info(`Total Return: ${result.metrics.totalReturn.toFixed(2)}%`);
logger.info(`Sharpe Ratio: ${result.metrics.sharpeRatio.toFixed(2)}`);
logger.info(`Max Drawdown: ${result.metrics.maxDrawdown.toFixed(2)}%`);
logger.info('\n=== TRADE HISTORY ===');
result.trades.forEach((trade, i) => {
logger.info(`Trade ${i + 1}:`);
logger.info(` Symbol: ${trade.symbol}`);
logger.info(` Entry: ${trade.entryDate} @ $${trade.entryPrice.toFixed(2)}`);
logger.info(` Exit: ${trade.exitDate} @ $${trade.exitPrice.toFixed(2)}`);
logger.info(` P&L: $${trade.pnl.toFixed(2)} (${trade.pnlPercent.toFixed(2)}%)`);
});
} catch (error) {
logger.error('Backtest failed:', error);
}
}
// Run the test
runBacktestWithDetailedLogging().catch(console.error);

View file

@ -413,11 +413,11 @@ export class BacktestEngine extends EventEmitter {
let trendDuration = 0;
while (currentTime <= endTime) {
// Every 20-50 days, change trend
// Every 10-30 days, change trend (more frequent for testing)
if (trendDuration <= 0) {
trend = Math.random() > 0.5 ? 1 : -1;
trendStrength = 0.002 + Math.random() * 0.003; // 0.2% to 0.5% daily trend
trendDuration = Math.floor(20 + Math.random() * 30);
trendStrength = 0.003 + Math.random() * 0.005; // 0.3% to 0.8% daily trend
trendDuration = Math.floor(10 + Math.random() * 20); // Shorter trends
}
// Generate price movement with trend and noise

View file

@ -11,11 +11,11 @@ export class SimpleMovingAverageCrossover extends BaseStrategy {
private totalSignals = 0;
// Strategy parameters
private readonly FAST_PERIOD = 10;
private readonly SLOW_PERIOD = 20;
private readonly FAST_PERIOD = 5; // Changed from 10 to generate more signals
private readonly SLOW_PERIOD = 15; // Changed from 20 to generate more signals
private readonly POSITION_SIZE = 0.1; // 10% of capital per position
private readonly MIN_HOLDING_BARS = 1; // Minimum bars to hold position
private readonly DEBUG_INTERVAL = 20; // Log every N bars for debugging
private readonly DEBUG_INTERVAL = 10; // Log every N bars for debugging
constructor(config: any, modeManager?: any, executionService?: any) {
super(config, modeManager, executionService);
@ -119,8 +119,8 @@ export class SimpleMovingAverageCrossover extends BaseStrategy {
logger.info(` Current position: ${currentPosition} shares`);
// For golden cross, we want to be long
// If we're short, we need to close the short first
if (currentPosition < 0) {
// Close short position first
logger.info(` Closing short position of ${Math.abs(currentPosition)} shares`);
const signal: Signal = {
type: 'buy',
@ -206,10 +206,7 @@ export class SimpleMovingAverageCrossover extends BaseStrategy {
logger.info(`👉 Total signals generated: ${this.totalSignals}`);
return signal;
} else if (currentPosition === 0) {
// Optional: Open short position (comment out if long-only)
logger.info(` No position, staying flat (long-only strategy)`);
// Uncomment below for long/short strategy:
/*
// Open short position for long/short strategy
const positionSize = this.calculatePositionSize(currentPrice);
logger.info(` Opening short position: ${positionSize} shares`);
@ -233,7 +230,6 @@ export class SimpleMovingAverageCrossover extends BaseStrategy {
this.totalSignals++;
logger.info(`👉 Total signals generated: ${this.totalSignals}`);
return signal;
*/
} else {
logger.info(` ⚠️ Already short, skipping sell signal`);
}