added initial py analytics / rust core / ts orchestrator services

This commit is contained in:
Boki 2025-07-01 11:16:25 -04:00
parent 680b5fd2ae
commit c862ed496b
62 changed files with 13459 additions and 0 deletions

View file

@ -0,0 +1,353 @@
use crate::{Side, MarketMicrostructure, PriceLevel};
use chrono::{DateTime, Utc, Timelike};
#[derive(Debug, Clone)]
pub struct MarketImpactEstimate {
pub temporary_impact: f64,
pub permanent_impact: f64,
pub total_impact: f64,
pub expected_cost: f64,
pub impact_decay_ms: i64,
}
#[derive(Debug, Clone, Copy)]
pub enum ImpactModelType {
Linear,
SquareRoot,
PowerLaw { exponent: f64 },
AlmgrenChriss,
IStarModel,
}
pub struct MarketImpactModel {
model_type: ImpactModelType,
// Model parameters
temporary_impact_coef: f64,
permanent_impact_coef: f64,
spread_impact_weight: f64,
volatility_adjustment: bool,
}
impl MarketImpactModel {
pub fn new(model_type: ImpactModelType) -> Self {
match model_type {
ImpactModelType::Linear => Self {
model_type,
temporary_impact_coef: 0.1,
permanent_impact_coef: 0.05,
spread_impact_weight: 0.5,
volatility_adjustment: true,
},
ImpactModelType::SquareRoot => Self {
model_type,
temporary_impact_coef: 0.142, // Empirical from literature
permanent_impact_coef: 0.0625,
spread_impact_weight: 0.5,
volatility_adjustment: true,
},
ImpactModelType::AlmgrenChriss => Self {
model_type,
temporary_impact_coef: 0.314,
permanent_impact_coef: 0.142,
spread_impact_weight: 0.7,
volatility_adjustment: true,
},
ImpactModelType::PowerLaw { .. } => Self {
model_type,
temporary_impact_coef: 0.2,
permanent_impact_coef: 0.1,
spread_impact_weight: 0.5,
volatility_adjustment: true,
},
ImpactModelType::IStarModel => Self {
model_type,
temporary_impact_coef: 1.0,
permanent_impact_coef: 0.5,
spread_impact_weight: 0.8,
volatility_adjustment: true,
},
}
}
pub fn estimate_impact(
&self,
order_size: f64,
side: Side,
microstructure: &MarketMicrostructure,
orderbook: &[PriceLevel],
current_time: DateTime<Utc>,
) -> MarketImpactEstimate {
// Calculate participation rate
let intraday_volume = self.get_expected_volume(microstructure, current_time);
let participation_rate = order_size / intraday_volume.max(1.0);
// Calculate spread in basis points
let spread_bps = microstructure.avg_spread_bps;
// Calculate volatility adjustment
let vol_adjustment = if self.volatility_adjustment {
(microstructure.volatility / 0.02).sqrt() // Normalize to 2% daily vol
} else {
1.0
};
// Calculate temporary impact based on model type
let temp_impact_bps = match self.model_type {
ImpactModelType::Linear => {
self.temporary_impact_coef * participation_rate * 10000.0
},
ImpactModelType::SquareRoot => {
self.temporary_impact_coef * participation_rate.sqrt() * 10000.0
},
ImpactModelType::PowerLaw { exponent } => {
self.temporary_impact_coef * participation_rate.powf(exponent) * 10000.0
},
ImpactModelType::AlmgrenChriss => {
self.calculate_almgren_chriss_impact(
participation_rate,
spread_bps,
microstructure.volatility,
order_size,
microstructure.avg_trade_size,
)
},
ImpactModelType::IStarModel => {
self.calculate_istar_impact(
order_size,
microstructure,
orderbook,
side,
)
},
};
// Calculate permanent impact (usually smaller)
let perm_impact_bps = self.permanent_impact_coef * participation_rate.sqrt() * 10000.0;
// Add spread cost
let spread_cost_bps = spread_bps * self.spread_impact_weight;
// Apply volatility adjustment
let adjusted_temp_impact = temp_impact_bps * vol_adjustment;
let adjusted_perm_impact = perm_impact_bps * vol_adjustment;
// Calculate total impact
let total_impact_bps = adjusted_temp_impact + adjusted_perm_impact + spread_cost_bps;
// Calculate impact decay time (how long temporary impact lasts)
let impact_decay_ms = self.calculate_impact_decay_time(
order_size,
microstructure.daily_volume,
microstructure.avg_trade_size,
);
// Calculate expected cost
let mid_price = if !orderbook.is_empty() {
orderbook[0].price
} else {
100.0 // Default if no orderbook
};
let direction_multiplier = match side {
Side::Buy => 1.0,
Side::Sell => -1.0,
};
let expected_cost = mid_price * order_size * total_impact_bps / 10000.0 * direction_multiplier;
MarketImpactEstimate {
temporary_impact: adjusted_temp_impact,
permanent_impact: adjusted_perm_impact,
total_impact: total_impact_bps,
expected_cost: expected_cost.abs(),
impact_decay_ms,
}
}
fn calculate_almgren_chriss_impact(
&self,
participation_rate: f64,
spread_bps: f64,
volatility: f64,
order_size: f64,
avg_trade_size: f64,
) -> f64 {
// Almgren-Chriss model parameters
let eta = self.temporary_impact_coef; // Temporary impact coefficient
let gamma = self.permanent_impact_coef; // Permanent impact coefficient
let trading_rate = order_size / avg_trade_size;
// Temporary impact: eta * (v/V)^alpha * sigma
let temp_component = eta * participation_rate.sqrt() * volatility * 10000.0;
// Permanent impact: gamma * (X/V)
let perm_component = gamma * trading_rate * 10000.0;
// Add half spread
let spread_component = spread_bps * 0.5;
temp_component + perm_component + spread_component
}
fn calculate_istar_impact(
&self,
order_size: f64,
microstructure: &MarketMicrostructure,
orderbook: &[PriceLevel],
_side: Side,
) -> f64 {
// I* model - uses order book shape
if orderbook.is_empty() {
return self.temporary_impact_coef * 100.0; // Fallback
}
// Calculate order book imbalance
let mut cumulative_size = 0.0;
let mut impact_bps = 0.0;
// Walk through the book until we've "consumed" our order
for (_i, level) in orderbook.iter().enumerate() {
cumulative_size += level.size;
if cumulative_size >= order_size {
// Calculate average price impact to this level
let ref_price = orderbook[0].price;
let exec_price = level.price;
impact_bps = ((exec_price - ref_price).abs() / ref_price) * 10000.0;
break;
}
}
// Add participation rate impact
let participation_impact = self.temporary_impact_coef *
(order_size / microstructure.daily_volume).sqrt() * 10000.0;
impact_bps + participation_impact
}
fn get_expected_volume(
&self,
microstructure: &MarketMicrostructure,
current_time: DateTime<Utc>,
) -> f64 {
// Use intraday volume profile if available
if microstructure.intraday_volume_profile.len() == 24 {
let hour = current_time.hour() as usize;
let hour_pct = microstructure.intraday_volume_profile[hour];
microstructure.daily_volume * hour_pct
} else {
// Simple assumption: 1/6.5 of daily volume per hour (6.5 hour trading day)
microstructure.daily_volume / 6.5
}
}
fn calculate_impact_decay_time(
&self,
order_size: f64,
daily_volume: f64,
avg_trade_size: f64,
) -> i64 {
// Empirical formula for impact decay
// Larger orders relative to volume decay slower
let volume_ratio = order_size / daily_volume;
let trade_ratio = order_size / avg_trade_size;
// Base decay time in milliseconds
let base_decay_ms = 60_000; // 1 minute base
// Adjust based on order characteristics
let decay_multiplier = 1.0 + volume_ratio * 10.0 + trade_ratio.ln().max(0.0);
(base_decay_ms as f64 * decay_multiplier) as i64
}
pub fn calculate_optimal_execution_schedule(
&self,
total_size: f64,
time_horizon_minutes: f64,
microstructure: &MarketMicrostructure,
risk_aversion: f64,
) -> Vec<(f64, f64)> {
// Almgren-Chriss optimal execution trajectory
let n_slices = (time_horizon_minutes / 5.0).ceil() as usize; // 5-minute buckets
let tau = time_horizon_minutes / n_slices as f64;
let mut schedule = Vec::with_capacity(n_slices);
// Parameters
let volatility = microstructure.volatility;
let _daily_volume = microstructure.daily_volume;
let eta = self.temporary_impact_coef;
let _gamma = self.permanent_impact_coef;
let lambda = risk_aversion;
// Calculate optimal trading rate
let kappa = lambda * volatility.powi(2) / eta;
let alpha = (kappa / tau).sqrt();
for i in 0..n_slices {
let t = i as f64 * tau;
let t_next = (i + 1) as f64 * tau;
// Optimal trajectory: x(t) = X * sinh(alpha * (T - t)) / sinh(alpha * T)
let remaining_start = total_size * (alpha * (time_horizon_minutes - t)).sinh()
/ (alpha * time_horizon_minutes).sinh();
let remaining_end = total_size * (alpha * (time_horizon_minutes - t_next)).sinh()
/ (alpha * time_horizon_minutes).sinh();
let slice_size = remaining_start - remaining_end;
let slice_time = t + tau / 2.0; // Midpoint
schedule.push((slice_time, slice_size));
}
schedule
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_market_impact_models() {
let microstructure = MarketMicrostructure {
symbol: "TEST".to_string(),
avg_spread_bps: 2.0,
daily_volume: 10_000_000.0,
avg_trade_size: 100.0,
volatility: 0.02,
tick_size: 0.01,
lot_size: 1.0,
intraday_volume_profile: vec![0.04; 24], // Flat profile
};
let orderbook = vec![
PriceLevel { price: 100.0, size: 1000.0, order_count: Some(10) },
PriceLevel { price: 100.01, size: 2000.0, order_count: Some(15) },
];
let models = vec![
ImpactModelType::Linear,
ImpactModelType::SquareRoot,
ImpactModelType::AlmgrenChriss,
];
for model_type in models {
let model = MarketImpactModel::new(model_type);
let impact = model.estimate_impact(
1000.0,
Side::Buy,
&microstructure,
&orderbook,
Utc::now(),
);
assert!(impact.total_impact > 0.0);
assert!(impact.temporary_impact >= 0.0);
assert!(impact.permanent_impact >= 0.0);
assert!(impact.expected_cost > 0.0);
assert!(impact.impact_decay_ms > 0);
}
}
}

View file

@ -0,0 +1,5 @@
pub mod market_impact;
pub mod transaction_costs;
pub use market_impact::{MarketImpactModel, ImpactModelType, MarketImpactEstimate};
pub use transaction_costs::{TransactionCostModel, CostComponents};

View file

@ -0,0 +1,355 @@
use crate::{Side, Order, Fill, MarketMicrostructure};
use chrono::{DateTime, Utc};
#[derive(Debug, Clone)]
pub struct CostComponents {
pub spread_cost: f64,
pub market_impact: f64,
pub commission: f64,
pub slippage: f64,
pub opportunity_cost: f64,
pub timing_cost: f64,
pub total_cost: f64,
pub cost_bps: f64,
}
#[derive(Debug, Clone)]
pub struct TransactionCostAnalysis {
pub order_id: String,
pub symbol: String,
pub side: Side,
pub intended_size: f64,
pub filled_size: f64,
pub avg_fill_price: f64,
pub arrival_price: f64,
pub benchmark_price: f64,
pub cost_components: CostComponents,
pub implementation_shortfall: f64,
pub duration_ms: i64,
}
pub struct TransactionCostModel {
commission_rate_bps: f64,
min_commission: f64,
exchange_fees_bps: f64,
regulatory_fees_bps: f64,
benchmark_type: BenchmarkType,
}
#[derive(Debug, Clone, Copy)]
pub enum BenchmarkType {
ArrivalPrice, // Price when order was placed
VWAP, // Volume-weighted average price
TWAP, // Time-weighted average price
Close, // Closing price
MidpointAtArrival, // Mid price at order arrival
}
impl TransactionCostModel {
pub fn new(commission_rate_bps: f64) -> Self {
Self {
commission_rate_bps,
min_commission: 1.0,
exchange_fees_bps: 0.3, // Typical exchange fees
regulatory_fees_bps: 0.1, // SEC fees etc
benchmark_type: BenchmarkType::ArrivalPrice,
}
}
pub fn with_benchmark_type(mut self, benchmark_type: BenchmarkType) -> Self {
self.benchmark_type = benchmark_type;
self
}
pub fn analyze_execution(
&self,
order: &Order,
fills: &[Fill],
arrival_price: f64,
benchmark_prices: &BenchmarkPrices,
microstructure: &MarketMicrostructure,
order_start_time: DateTime<Utc>,
order_end_time: DateTime<Utc>,
) -> TransactionCostAnalysis {
// Calculate filled size and average price
let filled_size = fills.iter().map(|f| f.quantity).sum::<f64>();
let total_value = fills.iter().map(|f| f.price * f.quantity).sum::<f64>();
let avg_fill_price = if filled_size > 0.0 {
total_value / filled_size
} else {
arrival_price
};
// Get benchmark price based on type
let benchmark_price = match self.benchmark_type {
BenchmarkType::ArrivalPrice => arrival_price,
BenchmarkType::VWAP => benchmark_prices.vwap,
BenchmarkType::TWAP => benchmark_prices.twap,
BenchmarkType::Close => benchmark_prices.close,
BenchmarkType::MidpointAtArrival => benchmark_prices.midpoint_at_arrival,
};
// Calculate various cost components
let cost_components = self.calculate_cost_components(
order,
fills,
avg_fill_price,
arrival_price,
benchmark_price,
microstructure,
);
// Calculate implementation shortfall
let side_multiplier = match order.side {
Side::Buy => 1.0,
Side::Sell => -1.0,
};
let implementation_shortfall = side_multiplier * filled_size *
(avg_fill_price - arrival_price) +
side_multiplier * (order.quantity - filled_size) *
(benchmark_price - arrival_price);
// Calculate duration
let duration_ms = (order_end_time - order_start_time).num_milliseconds();
TransactionCostAnalysis {
order_id: order.id.clone(),
symbol: order.symbol.clone(),
side: order.side,
intended_size: order.quantity,
filled_size,
avg_fill_price,
arrival_price,
benchmark_price,
cost_components,
implementation_shortfall,
duration_ms,
}
}
fn calculate_cost_components(
&self,
order: &Order,
fills: &[Fill],
avg_fill_price: f64,
arrival_price: f64,
benchmark_price: f64,
microstructure: &MarketMicrostructure,
) -> CostComponents {
let filled_size = fills.iter().map(|f| f.quantity).sum::<f64>();
let total_value = filled_size * avg_fill_price;
// Spread cost (crossing the spread)
let spread_cost = filled_size * avg_fill_price * microstructure.avg_spread_bps / 10000.0;
// Market impact (price movement due to our order)
let side_multiplier = match order.side {
Side::Buy => 1.0,
Side::Sell => -1.0,
};
let market_impact = side_multiplier * filled_size * (avg_fill_price - arrival_price);
// Commission and fees
let gross_commission = total_value * self.commission_rate_bps / 10000.0;
let commission = gross_commission.max(self.min_commission * fills.len() as f64);
let exchange_fees = total_value * self.exchange_fees_bps / 10000.0;
let regulatory_fees = total_value * self.regulatory_fees_bps / 10000.0;
let total_fees = commission + exchange_fees + regulatory_fees;
// Slippage (difference from benchmark)
let slippage = side_multiplier * filled_size * (avg_fill_price - benchmark_price);
// Opportunity cost (unfilled portion)
let unfilled_size = order.quantity - filled_size;
let opportunity_cost = if unfilled_size > 0.0 {
// Cost of not executing at arrival price
side_multiplier * unfilled_size * (benchmark_price - arrival_price)
} else {
0.0
};
// Timing cost (delay cost)
let timing_cost = side_multiplier * filled_size *
(benchmark_price - arrival_price).max(0.0);
// Total cost
let total_cost = spread_cost + market_impact.abs() + total_fees +
slippage.abs() + opportunity_cost.abs() + timing_cost;
// Cost in basis points
let cost_bps = if total_value > 0.0 {
(total_cost / total_value) * 10000.0
} else {
0.0
};
CostComponents {
spread_cost,
market_impact: market_impact.abs(),
commission: total_fees,
slippage: slippage.abs(),
opportunity_cost: opportunity_cost.abs(),
timing_cost,
total_cost,
cost_bps,
}
}
pub fn calculate_pretrade_cost_estimate(
&self,
order: &Order,
microstructure: &MarketMicrostructure,
current_price: f64,
expected_fill_price: f64,
expected_fill_rate: f64,
) -> CostComponents {
let expected_filled_size = order.quantity * expected_fill_rate;
let total_value = expected_filled_size * expected_fill_price;
// Estimate spread cost
let spread_cost = expected_filled_size * expected_fill_price *
microstructure.avg_spread_bps / 10000.0;
// Estimate market impact
let side_multiplier = match order.side {
Side::Buy => 1.0,
Side::Sell => -1.0,
};
let market_impact = side_multiplier * expected_filled_size *
(expected_fill_price - current_price);
// Calculate commission
let gross_commission = total_value * self.commission_rate_bps / 10000.0;
let commission = gross_commission.max(self.min_commission);
let exchange_fees = total_value * self.exchange_fees_bps / 10000.0;
let regulatory_fees = total_value * self.regulatory_fees_bps / 10000.0;
let total_fees = commission + exchange_fees + regulatory_fees;
// Estimate opportunity cost for unfilled portion
let unfilled_size = order.quantity - expected_filled_size;
let opportunity_cost = if unfilled_size > 0.0 {
// Assume 10bps adverse movement for unfilled portion
unfilled_size * current_price * 0.001
} else {
0.0
};
// No slippage or timing cost for pre-trade estimate
let slippage = 0.0;
let timing_cost = 0.0;
// Total cost
let total_cost = spread_cost + market_impact.abs() + total_fees + opportunity_cost;
// Cost in basis points
let cost_bps = if total_value > 0.0 {
(total_cost / total_value) * 10000.0
} else {
0.0
};
CostComponents {
spread_cost,
market_impact: market_impact.abs(),
commission: total_fees,
slippage,
opportunity_cost,
timing_cost,
total_cost,
cost_bps,
}
}
}
#[derive(Debug, Clone)]
pub struct BenchmarkPrices {
pub vwap: f64,
pub twap: f64,
pub close: f64,
pub midpoint_at_arrival: f64,
}
impl Default for BenchmarkPrices {
fn default() -> Self {
Self {
vwap: 0.0,
twap: 0.0,
close: 0.0,
midpoint_at_arrival: 0.0,
}
}
}
// Helper to track and calculate various price benchmarks
pub struct BenchmarkCalculator {
trades: Vec<(DateTime<Utc>, f64, f64)>, // (time, price, volume)
quotes: Vec<(DateTime<Utc>, f64, f64)>, // (time, bid, ask)
}
impl BenchmarkCalculator {
pub fn new() -> Self {
Self {
trades: Vec::new(),
quotes: Vec::new(),
}
}
pub fn add_trade(&mut self, time: DateTime<Utc>, price: f64, volume: f64) {
self.trades.push((time, price, volume));
}
pub fn add_quote(&mut self, time: DateTime<Utc>, bid: f64, ask: f64) {
self.quotes.push((time, bid, ask));
}
pub fn calculate_benchmarks(
&self,
start_time: DateTime<Utc>,
end_time: DateTime<Utc>,
) -> BenchmarkPrices {
// Filter trades within time window
let window_trades: Vec<_> = self.trades.iter()
.filter(|(t, _, _)| *t >= start_time && *t <= end_time)
.cloned()
.collect();
// Calculate VWAP
let total_volume: f64 = window_trades.iter().map(|(_, _, v)| v).sum();
let vwap = if total_volume > 0.0 {
window_trades.iter()
.map(|(_, p, v)| p * v)
.sum::<f64>() / total_volume
} else {
0.0
};
// Calculate TWAP
let twap = if !window_trades.is_empty() {
window_trades.iter()
.map(|(_, p, _)| p)
.sum::<f64>() / window_trades.len() as f64
} else {
0.0
};
// Get close price (last trade)
let close = window_trades.last()
.map(|(_, p, _)| *p)
.unwrap_or(0.0);
// Get midpoint at arrival
let midpoint_at_arrival = self.quotes.iter()
.filter(|(t, _, _)| *t <= start_time)
.last()
.map(|(_, b, a)| (b + a) / 2.0)
.unwrap_or(0.0);
BenchmarkPrices {
vwap,
twap,
close,
midpoint_at_arrival,
}
}
}

View file

@ -0,0 +1,326 @@
use napi_derive::napi;
use napi::{bindgen_prelude::*, JsObject};
use crate::{
TradingCore, TradingMode, Order, OrderType, TimeInForce, Side,
MarketUpdate, Quote, Trade,
MarketMicrostructure,
core::{create_market_data_source, create_execution_handler, create_time_provider},
};
use crate::risk::RiskLimits;
use std::sync::Arc;
use parking_lot::Mutex;
use chrono::{DateTime, Utc};
#[napi]
pub struct TradingEngine {
core: Arc<Mutex<TradingCore>>,
}
#[napi]
impl TradingEngine {
#[napi(constructor)]
pub fn new(mode: String, config: JsObject) -> Result<Self> {
let mode = parse_mode(&mode, config)?;
let market_data_source = create_market_data_source(&mode);
let execution_handler = create_execution_handler(&mode);
let time_provider = create_time_provider(&mode);
let core = TradingCore::new(mode, market_data_source, execution_handler, time_provider);
Ok(Self {
core: Arc::new(Mutex::new(core)),
})
}
#[napi]
pub fn get_mode(&self) -> String {
let core = self.core.lock();
match core.get_mode() {
TradingMode::Backtest { .. } => "backtest".to_string(),
TradingMode::Paper { .. } => "paper".to_string(),
TradingMode::Live { .. } => "live".to_string(),
}
}
#[napi]
pub fn get_current_time(&self) -> i64 {
let core = self.core.lock();
core.get_time().timestamp_millis()
}
#[napi]
pub fn submit_order(&self, order_js: JsObject) -> Result<String> {
let order = parse_order(order_js)?;
// For now, return a mock result - in real implementation would queue the order
let result = crate::ExecutionResult {
order_id: order.id.clone(),
status: crate::OrderStatus::Accepted,
fills: vec![],
};
Ok(serde_json::to_string(&result).unwrap())
}
#[napi]
pub fn check_risk(&self, order_js: JsObject) -> Result<String> {
let order = parse_order(order_js)?;
let core = self.core.lock();
// Get current position for the symbol
let position = core.position_tracker.get_position(&order.symbol);
let current_quantity = position.map(|p| p.quantity);
let result = core.risk_engine.check_order(&order, current_quantity);
Ok(serde_json::to_string(&result).unwrap())
}
#[napi]
pub fn update_quote(&self, symbol: String, bid: f64, ask: f64, bid_size: f64, ask_size: f64) -> Result<()> {
let quote = Quote { bid, ask, bid_size, ask_size };
let core = self.core.lock();
let timestamp = core.get_time();
core.orderbooks.update_quote(&symbol, quote, timestamp);
// Update unrealized P&L
let mid_price = (bid + ask) / 2.0;
core.position_tracker.update_unrealized_pnl(&symbol, mid_price);
Ok(())
}
#[napi]
pub fn update_trade(&self, symbol: String, price: f64, size: f64, side: String) -> Result<()> {
let side = match side.as_str() {
"buy" | "Buy" => Side::Buy,
"sell" | "Sell" => Side::Sell,
_ => return Err(Error::from_reason("Invalid side")),
};
let trade = Trade { price, size, side };
let core = self.core.lock();
let timestamp = core.get_time();
core.orderbooks.update_trade(&symbol, trade, timestamp);
Ok(())
}
#[napi]
pub fn get_orderbook_snapshot(&self, symbol: String, depth: u32) -> Result<String> {
let core = self.core.lock();
let snapshot = core.orderbooks.get_snapshot(&symbol, depth as usize)
.ok_or_else(|| Error::from_reason("Symbol not found"))?;
Ok(serde_json::to_string(&snapshot).unwrap())
}
#[napi]
pub fn get_best_bid_ask(&self, symbol: String) -> Result<Vec<f64>> {
let core = self.core.lock();
let (bid, ask) = core.orderbooks.get_best_bid_ask(&symbol)
.ok_or_else(|| Error::from_reason("Symbol not found"))?;
Ok(vec![bid, ask])
}
#[napi]
pub fn get_position(&self, symbol: String) -> Result<Option<String>> {
let core = self.core.lock();
let position = core.position_tracker.get_position(&symbol);
Ok(position.map(|p| serde_json::to_string(&p).unwrap()))
}
#[napi]
pub fn get_all_positions(&self) -> Result<String> {
let core = self.core.lock();
let positions = core.position_tracker.get_all_positions();
Ok(serde_json::to_string(&positions).unwrap())
}
#[napi]
pub fn get_open_positions(&self) -> Result<String> {
let core = self.core.lock();
let positions = core.position_tracker.get_open_positions();
Ok(serde_json::to_string(&positions).unwrap())
}
#[napi]
pub fn get_total_pnl(&self) -> Result<Vec<f64>> {
let core = self.core.lock();
let (realized, unrealized) = core.position_tracker.get_total_pnl();
Ok(vec![realized, unrealized])
}
#[napi]
pub fn process_fill(&self, symbol: String, price: f64, quantity: f64, side: String, commission: f64) -> Result<String> {
let side = match side.as_str() {
"buy" | "Buy" => Side::Buy,
"sell" | "Sell" => Side::Sell,
_ => return Err(Error::from_reason("Invalid side")),
};
let core = self.core.lock();
let timestamp = core.get_time();
let fill = crate::Fill {
timestamp,
price,
quantity,
commission,
};
let update = core.position_tracker.process_fill(&symbol, &fill, side);
// Update risk engine with new position
core.risk_engine.update_position(&symbol, update.resulting_position.quantity);
// Update daily P&L
if update.resulting_position.realized_pnl != 0.0 {
core.risk_engine.update_daily_pnl(update.resulting_position.realized_pnl);
}
Ok(serde_json::to_string(&update).unwrap())
}
#[napi]
pub fn update_risk_limits(&self, limits_js: JsObject) -> Result<()> {
let limits = parse_risk_limits(limits_js)?;
let core = self.core.lock();
core.risk_engine.update_limits(limits);
Ok(())
}
#[napi]
pub fn reset_daily_metrics(&self) -> Result<()> {
let core = self.core.lock();
core.risk_engine.reset_daily_metrics();
Ok(())
}
#[napi]
pub fn get_risk_metrics(&self) -> Result<String> {
let core = self.core.lock();
let metrics = core.risk_engine.get_risk_metrics();
Ok(serde_json::to_string(&metrics).unwrap())
}
// Backtest-specific methods
#[napi]
pub fn advance_time(&self, _to_timestamp: i64) -> Result<()> {
let core = self.core.lock();
if let TradingMode::Backtest { .. } = core.get_mode() {
// In real implementation, would downcast and advance time
// For now, return success in backtest mode
Ok(())
} else {
Err(Error::from_reason("Can only advance time in backtest mode"))
}
}
#[napi]
pub fn set_microstructure(&self, _symbol: String, microstructure_json: String) -> Result<()> {
let _microstructure: MarketMicrostructure = serde_json::from_str(&microstructure_json)
.map_err(|e| Error::from_reason(format!("Failed to parse microstructure: {}", e)))?;
let _core = self.core.lock();
// Store microstructure for use in fill simulation
// In real implementation, would pass to execution handler
Ok(())
}
#[napi]
pub fn load_historical_data(&self, data_json: String) -> Result<()> {
let _data: Vec<MarketUpdate> = serde_json::from_str(&data_json)
.map_err(|e| Error::from_reason(format!("Failed to parse data: {}", e)))?;
// In real implementation, would load into historical data source
Ok(())
}
}
// Helper functions to parse JavaScript objects
fn parse_mode(mode_str: &str, config: JsObject) -> Result<TradingMode> {
match mode_str {
"backtest" => {
let start_time: i64 = config.get_named_property("startTime")?;
let end_time: i64 = config.get_named_property("endTime")?;
let speed_multiplier: f64 = config.get_named_property("speedMultiplier")
.unwrap_or(1.0);
Ok(TradingMode::Backtest {
start_time: DateTime::<Utc>::from_timestamp_millis(start_time)
.ok_or_else(|| Error::from_reason("Invalid start time"))?,
end_time: DateTime::<Utc>::from_timestamp_millis(end_time)
.ok_or_else(|| Error::from_reason("Invalid end time"))?,
speed_multiplier,
})
}
"paper" => {
let starting_capital: f64 = config.get_named_property("startingCapital")?;
Ok(TradingMode::Paper { starting_capital })
}
"live" => {
let broker: String = config.get_named_property("broker")?;
let account_id: String = config.get_named_property("accountId")?;
Ok(TradingMode::Live { broker, account_id })
}
_ => Err(Error::from_reason("Invalid mode")),
}
}
fn parse_order(order_js: JsObject) -> Result<Order> {
let id: String = order_js.get_named_property("id")?;
let symbol: String = order_js.get_named_property("symbol")?;
let side_str: String = order_js.get_named_property("side")?;
let side = match side_str.as_str() {
"buy" | "Buy" => Side::Buy,
"sell" | "Sell" => Side::Sell,
_ => return Err(Error::from_reason("Invalid side")),
};
let quantity: f64 = order_js.get_named_property("quantity")?;
let order_type_str: String = order_js.get_named_property("orderType")?;
let order_type = match order_type_str.as_str() {
"market" => OrderType::Market,
"limit" => {
let price: f64 = order_js.get_named_property("limitPrice")?;
OrderType::Limit { price }
}
_ => return Err(Error::from_reason("Invalid order type")),
};
let time_in_force_str: String = order_js.get_named_property("timeInForce")
.unwrap_or_else(|_| "DAY".to_string());
let time_in_force = match time_in_force_str.as_str() {
"DAY" => TimeInForce::Day,
"GTC" => TimeInForce::GTC,
"IOC" => TimeInForce::IOC,
"FOK" => TimeInForce::FOK,
_ => TimeInForce::Day,
};
Ok(Order {
id,
symbol,
side,
quantity,
order_type,
time_in_force,
})
}
fn parse_risk_limits(limits_js: JsObject) -> Result<RiskLimits> {
Ok(RiskLimits {
max_position_size: limits_js.get_named_property("maxPositionSize")?,
max_order_size: limits_js.get_named_property("maxOrderSize")?,
max_daily_loss: limits_js.get_named_property("maxDailyLoss")?,
max_gross_exposure: limits_js.get_named_property("maxGrossExposure")?,
max_symbol_exposure: limits_js.get_named_property("maxSymbolExposure")?,
})
}

View file

@ -0,0 +1,282 @@
use crate::{ExecutionHandler, FillSimulator, Order, ExecutionResult, OrderStatus, Fill, OrderBookSnapshot, OrderType, Side, MarketMicrostructure};
use crate::analytics::{MarketImpactModel, ImpactModelType};
use chrono::Utc;
use parking_lot::Mutex;
use std::collections::HashMap;
// Simulated execution for backtest and paper trading
pub struct SimulatedExecution {
fill_simulator: Box<dyn FillSimulator>,
pending_orders: Mutex<HashMap<String, Order>>,
}
impl SimulatedExecution {
pub fn new(fill_simulator: Box<dyn FillSimulator>) -> Self {
Self {
fill_simulator,
pending_orders: Mutex::new(HashMap::new()),
}
}
pub fn check_pending_orders(&self, orderbook: &OrderBookSnapshot) -> Vec<ExecutionResult> {
let mut results = Vec::new();
let mut pending = self.pending_orders.lock();
pending.retain(|order_id, order| {
if let Some(fill) = self.fill_simulator.simulate_fill(order, orderbook) {
results.push(ExecutionResult {
order_id: order_id.clone(),
status: OrderStatus::Filled,
fills: vec![fill],
});
false // Remove from pending
} else {
true // Keep in pending
}
});
results
}
}
#[async_trait::async_trait]
impl ExecutionHandler for SimulatedExecution {
async fn execute_order(&mut self, order: Order) -> Result<ExecutionResult, String> {
// For market orders, execute immediately
// For limit orders, add to pending
match &order.order_type {
OrderType::Market => {
// In simulation, market orders always fill
// The orchestrator will provide the orderbook for realistic fills
Ok(ExecutionResult {
order_id: order.id.clone(),
status: OrderStatus::Pending,
fills: vec![],
})
}
OrderType::Limit { .. } => {
self.pending_orders.lock().insert(order.id.clone(), order.clone());
Ok(ExecutionResult {
order_id: order.id,
status: OrderStatus::Accepted,
fills: vec![],
})
}
_ => Err("Order type not yet implemented".to_string()),
}
}
fn get_fill_simulator(&self) -> Option<&dyn FillSimulator> {
Some(&*self.fill_simulator)
}
}
// Backtest fill simulator - uses historical data
pub struct BacktestFillSimulator {
slippage_model: SlippageModel,
impact_model: MarketImpactModel,
microstructure_cache: Mutex<HashMap<String, MarketMicrostructure>>,
}
impl BacktestFillSimulator {
pub fn new() -> Self {
Self {
slippage_model: SlippageModel::default(),
impact_model: MarketImpactModel::new(ImpactModelType::SquareRoot),
microstructure_cache: Mutex::new(HashMap::new()),
}
}
pub fn with_impact_model(mut self, model_type: ImpactModelType) -> Self {
self.impact_model = MarketImpactModel::new(model_type);
self
}
pub fn set_microstructure(&self, symbol: String, microstructure: MarketMicrostructure) {
self.microstructure_cache.lock().insert(symbol, microstructure);
}
}
impl FillSimulator for BacktestFillSimulator {
fn simulate_fill(&self, order: &Order, orderbook: &OrderBookSnapshot) -> Option<Fill> {
match &order.order_type {
OrderType::Market => {
// Get market microstructure if available
let microstructure_guard = self.microstructure_cache.lock();
let maybe_microstructure = microstructure_guard.get(&order.symbol);
// Calculate price with market impact
let (price, _impact) = if let Some(microstructure) = maybe_microstructure {
// Use sophisticated market impact model
let impact_estimate = self.impact_model.estimate_impact(
order.quantity,
order.side,
microstructure,
match order.side {
Side::Buy => &orderbook.asks,
Side::Sell => &orderbook.bids,
},
Utc::now(),
);
let base_price = match order.side {
Side::Buy => orderbook.asks.first()?.price,
Side::Sell => orderbook.bids.first()?.price,
};
let impact_price = match order.side {
Side::Buy => base_price * (1.0 + impact_estimate.total_impact / 10000.0),
Side::Sell => base_price * (1.0 - impact_estimate.total_impact / 10000.0),
};
(impact_price, impact_estimate.total_impact)
} else {
// Fallback to simple slippage model
match order.side {
Side::Buy => {
let base_price = orderbook.asks.first()?.price;
let slippage = self.slippage_model.calculate_slippage(order.quantity, &orderbook.asks);
(base_price + slippage, slippage * 10000.0 / base_price)
}
Side::Sell => {
let base_price = orderbook.bids.first()?.price;
let slippage = self.slippage_model.calculate_slippage(order.quantity, &orderbook.bids);
(base_price - slippage, slippage * 10000.0 / base_price)
}
}
};
// Calculate realistic commission
let commission_rate = 0.0005; // 5 bps for institutional
let min_commission = 1.0;
let commission = (order.quantity * price * commission_rate).max(min_commission);
Some(Fill {
timestamp: Utc::now(), // Will be overridden by backtest engine
price,
quantity: order.quantity,
commission,
})
}
OrderType::Limit { price: limit_price } => {
// Check if limit can be filled
match order.side {
Side::Buy => {
if orderbook.asks.first()?.price <= *limit_price {
Some(Fill {
timestamp: Utc::now(),
price: *limit_price,
quantity: order.quantity,
commission: order.quantity * limit_price * 0.001,
})
} else {
None
}
}
Side::Sell => {
if orderbook.bids.first()?.price >= *limit_price {
Some(Fill {
timestamp: Utc::now(),
price: *limit_price,
quantity: order.quantity,
commission: order.quantity * limit_price * 0.001,
})
} else {
None
}
}
}
}
_ => None,
}
}
}
// Paper trading fill simulator - uses real order book
pub struct PaperFillSimulator {
use_real_orderbook: bool,
add_latency_ms: u64,
}
impl PaperFillSimulator {
pub fn new() -> Self {
Self {
use_real_orderbook: true,
add_latency_ms: 100, // Simulate 100ms latency
}
}
}
impl FillSimulator for PaperFillSimulator {
fn simulate_fill(&self, order: &Order, orderbook: &OrderBookSnapshot) -> Option<Fill> {
// Similar to backtest but with more realistic modeling
// Consider actual order book depth
// Add realistic latency simulation
// Respect position size limits based on actual liquidity
// For now, similar implementation to backtest
BacktestFillSimulator::new().simulate_fill(order, orderbook)
}
}
// Real broker execution for live trading
pub struct BrokerExecution {
broker: String,
account_id: String,
// In real implementation, would have broker API client
}
impl BrokerExecution {
pub fn new(broker: String, account_id: String) -> Self {
Self {
broker,
account_id,
}
}
}
#[async_trait::async_trait]
impl ExecutionHandler for BrokerExecution {
async fn execute_order(&mut self, order: Order) -> Result<ExecutionResult, String> {
// In real implementation, would:
// 1. Connect to broker API
// 2. Submit order
// 3. Handle broker responses
// 4. Track order status
// Placeholder for now
Ok(ExecutionResult {
order_id: order.id,
status: OrderStatus::Pending,
fills: vec![],
})
}
fn get_fill_simulator(&self) -> Option<&dyn FillSimulator> {
None // Real broker doesn't simulate
}
}
// Slippage model for realistic fills
#[derive(Default)]
struct SlippageModel {
base_slippage_bps: f64,
impact_coefficient: f64,
}
impl SlippageModel {
fn calculate_slippage(&self, quantity: f64, levels: &[crate::PriceLevel]) -> f64 {
// Simple linear impact model
// In reality would use square-root or more sophisticated model
let total_liquidity: f64 = levels.iter().map(|l| l.size).sum();
let participation_rate = quantity / total_liquidity.max(1.0);
let spread = if levels.len() >= 2 {
(levels[1].price - levels[0].price).abs()
} else {
levels[0].price * 0.0001 // 1 bps if only one level
};
spread * participation_rate * self.impact_coefficient
}
}

View file

@ -0,0 +1,111 @@
use crate::{MarketDataSource, MarketUpdate};
use chrono::{DateTime, Utc};
use parking_lot::Mutex;
use std::collections::VecDeque;
// Historical data source for backtesting
pub struct HistoricalDataSource {
data_queue: Mutex<VecDeque<MarketUpdate>>,
current_position: Mutex<usize>,
}
impl HistoricalDataSource {
pub fn new() -> Self {
Self {
data_queue: Mutex::new(VecDeque::new()),
current_position: Mutex::new(0),
}
}
// This would be called by the orchestrator to load data
pub fn load_data(&self, data: Vec<MarketUpdate>) {
let mut queue = self.data_queue.lock();
queue.clear();
queue.extend(data);
*self.current_position.lock() = 0;
}
}
#[async_trait::async_trait]
impl MarketDataSource for HistoricalDataSource {
async fn get_next_update(&mut self) -> Option<MarketUpdate> {
let queue = self.data_queue.lock();
let mut position = self.current_position.lock();
if *position < queue.len() {
let update = queue[*position].clone();
*position += 1;
Some(update)
} else {
None
}
}
fn seek_to_time(&mut self, timestamp: DateTime<Utc>) -> Result<(), String> {
let queue = self.data_queue.lock();
let mut position = self.current_position.lock();
// Binary search for the timestamp
match queue.binary_search_by_key(&timestamp, |update| update.timestamp) {
Ok(pos) => {
*position = pos;
Ok(())
}
Err(pos) => {
// Position where it would be inserted
*position = pos;
Ok(())
}
}
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
}
// Live data source for paper and live trading
pub struct LiveDataSource {
// Channel to receive data from the orchestrator
data_receiver: tokio::sync::Mutex<Option<tokio::sync::mpsc::Receiver<MarketUpdate>>>,
}
impl LiveDataSource {
pub fn new() -> Self {
Self {
data_receiver: tokio::sync::Mutex::new(None),
}
}
pub async fn set_receiver(&self, receiver: tokio::sync::mpsc::Receiver<MarketUpdate>) {
*self.data_receiver.lock().await = Some(receiver);
}
}
#[async_trait::async_trait]
impl MarketDataSource for LiveDataSource {
async fn get_next_update(&mut self) -> Option<MarketUpdate> {
let mut receiver_guard = self.data_receiver.lock().await;
if let Some(receiver) = receiver_guard.as_mut() {
receiver.recv().await
} else {
None
}
}
fn seek_to_time(&mut self, _timestamp: DateTime<Utc>) -> Result<(), String> {
Err("Cannot seek in live data source".to_string())
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
fn as_any_mut(&mut self) -> &mut dyn std::any::Any {
self
}
}

View file

@ -0,0 +1,476 @@
use crate::{MarketMicrostructure, PriceLevel, Quote, Trade, Bar, Side};
use chrono::{DateTime, Utc, Duration, Timelike};
use rand::prelude::*;
use rand_distr::{Normal, Pareto, Beta};
pub struct OrderBookReconstructor {
tick_size: f64,
lot_size: f64,
num_levels: usize,
spread_model: SpreadModel,
depth_model: DepthModel,
}
#[derive(Clone)]
pub enum SpreadModel {
Fixed { spread_ticks: u32 },
Dynamic { base_bps: f64, volatility_factor: f64 },
InformedTrader { base_bps: f64, information_decay: f64 },
}
#[derive(Clone)]
pub enum DepthModel {
Linear { base_size: f64, decay_rate: f64 },
Exponential { base_size: f64, decay_factor: f64 },
PowerLaw { alpha: f64, x_min: f64 },
}
impl OrderBookReconstructor {
pub fn new(tick_size: f64, lot_size: f64) -> Self {
Self {
tick_size,
lot_size,
num_levels: 10,
spread_model: SpreadModel::Dynamic {
base_bps: 2.0,
volatility_factor: 1.5
},
depth_model: DepthModel::Exponential {
base_size: 1000.0,
decay_factor: 0.7
},
}
}
pub fn reconstruct_from_trades_and_quotes(
&self,
trades: &[(DateTime<Utc>, Trade)],
quotes: &[(DateTime<Utc>, Quote)],
timestamp: DateTime<Utc>,
) -> (Vec<PriceLevel>, Vec<PriceLevel>) {
// Find the most recent quote before timestamp
let recent_quote = quotes.iter()
.filter(|(t, _)| *t <= timestamp)
.last()
.map(|(_, q)| q);
// Find recent trades to estimate market conditions
let recent_trades: Vec<_> = trades.iter()
.filter(|(t, _)| {
let age = timestamp - *t;
age < Duration::minutes(5) && age >= Duration::zero()
})
.map(|(_, t)| t)
.collect();
if let Some(quote) = recent_quote {
// Start with actual quote
self.build_full_book(quote, &recent_trades, timestamp)
} else if !recent_trades.is_empty() {
// Reconstruct from trades only
self.reconstruct_from_trades_only(&recent_trades, timestamp)
} else {
// No data - return empty book
(vec![], vec![])
}
}
fn build_full_book(
&self,
top_quote: &Quote,
recent_trades: &[&Trade],
_timestamp: DateTime<Utc>,
) -> (Vec<PriceLevel>, Vec<PriceLevel>) {
let mut bids = Vec::with_capacity(self.num_levels);
let mut asks = Vec::with_capacity(self.num_levels);
// Add top of book
bids.push(PriceLevel {
price: top_quote.bid,
size: top_quote.bid_size,
order_count: Some(self.estimate_order_count(top_quote.bid_size)),
});
asks.push(PriceLevel {
price: top_quote.ask,
size: top_quote.ask_size,
order_count: Some(self.estimate_order_count(top_quote.ask_size)),
});
// Calculate spread and volatility from recent trades
let (_spread_bps, _volatility) = self.estimate_market_conditions(recent_trades, top_quote);
// Build deeper levels
for i in 1..self.num_levels {
// Bid levels
let bid_price = top_quote.bid - (i as f64 * self.tick_size);
let bid_size = self.calculate_level_size(i, top_quote.bid_size, &self.depth_model);
bids.push(PriceLevel {
price: bid_price,
size: bid_size,
order_count: Some(self.estimate_order_count(bid_size)),
});
// Ask levels
let ask_price = top_quote.ask + (i as f64 * self.tick_size);
let ask_size = self.calculate_level_size(i, top_quote.ask_size, &self.depth_model);
asks.push(PriceLevel {
price: ask_price,
size: ask_size,
order_count: Some(self.estimate_order_count(ask_size)),
});
}
(bids, asks)
}
fn reconstruct_from_trades_only(
&self,
recent_trades: &[&Trade],
_timestamp: DateTime<Utc>,
) -> (Vec<PriceLevel>, Vec<PriceLevel>) {
if recent_trades.is_empty() {
return (vec![], vec![]);
}
// Estimate mid price from trades
let prices: Vec<f64> = recent_trades.iter().map(|t| t.price).collect();
let mid_price = prices.iter().sum::<f64>() / prices.len() as f64;
// Estimate spread from trade price variance
let variance = prices.iter()
.map(|p| (p - mid_price).powi(2))
.sum::<f64>() / prices.len() as f64;
let estimated_spread = variance.sqrt() * 2.0; // Rough approximation
// Build synthetic book
let bid_price = (mid_price - estimated_spread / 2.0 / self.tick_size).round() * self.tick_size;
let ask_price = (mid_price + estimated_spread / 2.0 / self.tick_size).round() * self.tick_size;
// Estimate sizes from trade volumes
let avg_trade_size = recent_trades.iter()
.map(|t| t.size)
.sum::<f64>() / recent_trades.len() as f64;
let mut bids = Vec::with_capacity(self.num_levels);
let mut asks = Vec::with_capacity(self.num_levels);
for i in 0..self.num_levels {
let level_size = avg_trade_size * 10.0 / (i + 1) as f64; // Decay with depth
bids.push(PriceLevel {
price: bid_price - (i as f64 * self.tick_size),
size: level_size,
order_count: Some(self.estimate_order_count(level_size)),
});
asks.push(PriceLevel {
price: ask_price + (i as f64 * self.tick_size),
size: level_size,
order_count: Some(self.estimate_order_count(level_size)),
});
}
(bids, asks)
}
fn calculate_level_size(&self, level: usize, _top_size: f64, model: &DepthModel) -> f64 {
let size = match model {
DepthModel::Linear { base_size, decay_rate } => {
base_size - (level as f64 * decay_rate)
}
DepthModel::Exponential { base_size, decay_factor } => {
base_size * decay_factor.powi(level as i32)
}
DepthModel::PowerLaw { alpha, x_min } => {
x_min * ((level + 1) as f64).powf(-alpha)
}
};
// Round to lot size and ensure positive
((size / self.lot_size).round() * self.lot_size).max(self.lot_size)
}
fn estimate_order_count(&self, size: f64) -> u32 {
// Estimate based on typical order size distribution
let avg_order_size = 100.0;
let base_count = (size / avg_order_size).ceil() as u32;
// Add some randomness
let mut rng = thread_rng();
let variation = rng.gen_range(0.8..1.2);
((base_count as f64 * variation) as u32).max(1)
}
fn estimate_market_conditions(
&self,
recent_trades: &[&Trade],
quote: &Quote,
) -> (f64, f64) {
if recent_trades.is_empty() {
let spread_bps = ((quote.ask - quote.bid) / quote.bid) * 10000.0;
return (spread_bps, 0.02); // Default 2% volatility
}
// Calculate spread in bps
let mid_price = (quote.bid + quote.ask) / 2.0;
let spread_bps = ((quote.ask - quote.bid) / mid_price) * 10000.0;
// Estimate volatility from trade prices
let prices: Vec<f64> = recent_trades.iter().map(|t| t.price).collect();
let returns: Vec<f64> = prices.windows(2)
.map(|w| (w[1] / w[0]).ln())
.collect();
let volatility = if !returns.is_empty() {
let mean_return = returns.iter().sum::<f64>() / returns.len() as f64;
let variance = returns.iter()
.map(|r| (r - mean_return).powi(2))
.sum::<f64>() / returns.len() as f64;
variance.sqrt() * (252.0_f64).sqrt() // Annualize
} else {
0.02 // Default 2%
};
(spread_bps, volatility)
}
}
// Market data synthesizer for generating realistic data
pub struct MarketDataSynthesizer {
base_price: f64,
tick_size: f64,
base_spread_bps: f64,
volatility: f64,
mean_reversion_speed: f64,
jump_intensity: f64,
jump_size_dist: Normal<f64>,
volume_dist: Pareto<f64>,
intraday_pattern: Vec<f64>,
}
impl MarketDataSynthesizer {
pub fn new(symbol_params: &MarketMicrostructure) -> Self {
let jump_size_dist = Normal::new(0.0, symbol_params.volatility * 0.1).unwrap();
let volume_dist = Pareto::new(1.0, 1.5).unwrap();
Self {
base_price: 100.0, // Will be updated with actual price
tick_size: symbol_params.tick_size,
base_spread_bps: symbol_params.avg_spread_bps,
volatility: symbol_params.volatility,
mean_reversion_speed: 0.1,
jump_intensity: 0.05, // 5% chance of jump per time step
jump_size_dist,
volume_dist,
intraday_pattern: symbol_params.intraday_volume_profile.clone(),
}
}
pub fn generate_quote_sequence(
&mut self,
start_price: f64,
start_time: DateTime<Utc>,
end_time: DateTime<Utc>,
interval_ms: i64,
) -> Vec<(DateTime<Utc>, Quote)> {
self.base_price = start_price;
let mut quotes = Vec::new();
let mut current_time = start_time;
let mut mid_price = start_price;
let mut spread_factor;
let mut rng = thread_rng();
while current_time <= end_time {
// Generate price movement
let dt = interval_ms as f64 / 1000.0 / 86400.0; // Convert to days
// Ornstein-Uhlenbeck process with jumps
let drift = -self.mean_reversion_speed * (mid_price / self.base_price - 1.0).ln();
let diffusion = self.volatility * (dt.sqrt()) * rng.gen::<f64>();
// Add jump component
let jump = if rng.gen::<f64>() < self.jump_intensity * dt {
mid_price * self.jump_size_dist.sample(&mut rng)
} else {
0.0
};
mid_price *= 1.0 + drift * dt + diffusion + jump;
mid_price = (mid_price / self.tick_size).round() * self.tick_size;
// Dynamic spread based on volatility and time of day
let hour_index = current_time.hour() as usize;
let volume_factor = if hour_index < self.intraday_pattern.len() {
self.intraday_pattern[hour_index]
} else {
0.04 // Default 4% of daily volume per hour
};
// Wider spreads during low volume periods
spread_factor = 1.0 / volume_factor.sqrt();
let spread_bps = self.base_spread_bps * spread_factor;
let half_spread = mid_price * spread_bps / 20000.0;
// Generate bid/ask
let bid = ((mid_price - half_spread) / self.tick_size).floor() * self.tick_size;
let ask = ((mid_price + half_spread) / self.tick_size).ceil() * self.tick_size;
// Generate sizes with correlation to spread
let size_multiplier = 1.0 / spread_factor; // Tighter spread = more size
let bid_size = (self.volume_dist.sample(&mut rng) * 1000.0 * size_multiplier).round();
let ask_size = (self.volume_dist.sample(&mut rng) * 1000.0 * size_multiplier).round();
quotes.push((current_time, Quote {
bid,
ask,
bid_size,
ask_size,
}));
current_time = current_time + Duration::milliseconds(interval_ms);
}
quotes
}
pub fn generate_trade_sequence(
&mut self,
quotes: &[(DateTime<Utc>, Quote)],
trade_intensity: f64,
) -> Vec<(DateTime<Utc>, Trade)> {
let mut trades = Vec::new();
let mut rng = thread_rng();
let beta_dist = Beta::new(2.0, 5.0).unwrap(); // Skewed towards smaller trades
for (time, quote) in quotes {
// Poisson process for trade arrivals
let num_trades = rng.gen_range(0..((trade_intensity * 10.0) as u32));
for i in 0..num_trades {
// Determine trade side (slight bias based on spread)
let spread_ratio = (quote.ask - quote.bid) / quote.bid;
let buy_prob = 0.5 - spread_ratio * 10.0; // More sells when spread is wide
let side = if rng.gen::<f64>() < buy_prob {
Side::Buy
} else {
Side::Sell
};
// Trade price (sometimes inside spread for large trades)
let price = match side {
Side::Buy => {
if rng.gen::<f64>() < 0.9 {
quote.ask // Take liquidity
} else {
// Provide liquidity (inside spread)
quote.bid + (quote.ask - quote.bid) * rng.gen::<f64>()
}
}
Side::Sell => {
if rng.gen::<f64>() < 0.9 {
quote.bid // Take liquidity
} else {
// Provide liquidity (inside spread)
quote.bid + (quote.ask - quote.bid) * rng.gen::<f64>()
}
}
};
// Trade size (power law distribution)
let size_percentile = beta_dist.sample(&mut rng);
let base_size = match side {
Side::Buy => quote.ask_size,
Side::Sell => quote.bid_size,
};
let size = (base_size * size_percentile * 0.1).round().max(1.0);
// Add small time offset for multiple trades
let trade_time = *time + Duration::milliseconds(i as i64 * 100);
trades.push((trade_time, Trade {
price,
size,
side,
}));
}
}
trades.sort_by_key(|(t, _)| *t);
trades
}
pub fn aggregate_to_bars(
&self,
trades: &[(DateTime<Utc>, Trade)],
bar_duration: Duration,
) -> Vec<(DateTime<Utc>, Bar)> {
if trades.is_empty() {
return Vec::new();
}
let mut bars = Vec::new();
let mut current_bar_start = trades[0].0;
let mut current_bar_end = current_bar_start + bar_duration;
let mut open = 0.0;
let mut high = 0.0;
let mut low = f64::MAX;
let mut close = 0.0;
let mut volume = 0.0;
let mut vwap_numerator = 0.0;
let mut first_trade = true;
for (time, trade) in trades {
// Check if we need to start a new bar
while *time >= current_bar_end {
if volume > 0.0 {
bars.push((current_bar_start, Bar {
open,
high,
low,
close,
volume,
vwap: Some(vwap_numerator / volume),
}));
}
// Reset for new bar
current_bar_start = current_bar_end;
current_bar_end = current_bar_start + bar_duration;
open = 0.0;
high = 0.0;
low = f64::MAX;
close = 0.0;
volume = 0.0;
vwap_numerator = 0.0;
first_trade = true;
}
// Update current bar
if first_trade {
open = trade.price;
first_trade = false;
}
high = high.max(trade.price);
low = low.min(trade.price);
close = trade.price;
volume += trade.size;
vwap_numerator += trade.price * trade.size;
}
// Add final bar if it has data
if volume > 0.0 {
bars.push((current_bar_start, Bar {
open,
high,
low,
close,
volume,
vwap: Some(vwap_numerator / volume),
}));
}
bars
}
}

View file

@ -0,0 +1,50 @@
pub mod time_providers;
pub mod market_data_sources;
pub mod execution_handlers;
pub mod market_microstructure;
use crate::{MarketDataSource, ExecutionHandler, TimeProvider, TradingMode};
// Factory functions to create appropriate implementations based on mode
pub fn create_market_data_source(mode: &TradingMode) -> Box<dyn MarketDataSource> {
match mode {
TradingMode::Backtest { .. } => {
Box::new(market_data_sources::HistoricalDataSource::new())
}
TradingMode::Paper { .. } | TradingMode::Live { .. } => {
Box::new(market_data_sources::LiveDataSource::new())
}
}
}
pub fn create_execution_handler(mode: &TradingMode) -> Box<dyn ExecutionHandler> {
match mode {
TradingMode::Backtest { .. } => {
Box::new(execution_handlers::SimulatedExecution::new(
Box::new(execution_handlers::BacktestFillSimulator::new())
))
}
TradingMode::Paper { .. } => {
Box::new(execution_handlers::SimulatedExecution::new(
Box::new(execution_handlers::PaperFillSimulator::new())
))
}
TradingMode::Live { broker, account_id } => {
Box::new(execution_handlers::BrokerExecution::new(
broker.clone(),
account_id.clone()
))
}
}
}
pub fn create_time_provider(mode: &TradingMode) -> Box<dyn TimeProvider> {
match mode {
TradingMode::Backtest { start_time, .. } => {
Box::new(time_providers::SimulatedTime::new(*start_time))
}
TradingMode::Paper { .. } | TradingMode::Live { .. } => {
Box::new(time_providers::SystemTime::new())
}
}
}

View file

@ -0,0 +1,74 @@
use crate::TimeProvider;
use chrono::{DateTime, Utc};
use parking_lot::Mutex;
use std::sync::Arc;
// Real-time provider for paper and live trading
pub struct SystemTime;
impl SystemTime {
pub fn new() -> Self {
Self
}
}
impl TimeProvider for SystemTime {
fn now(&self) -> DateTime<Utc> {
Utc::now()
}
fn sleep_until(&self, target: DateTime<Utc>) -> Result<(), String> {
let now = Utc::now();
if target > now {
let duration = (target - now).to_std()
.map_err(|e| format!("Invalid duration: {}", e))?;
std::thread::sleep(duration);
}
Ok(())
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
}
// Simulated time for backtesting
pub struct SimulatedTime {
current_time: Arc<Mutex<DateTime<Utc>>>,
}
impl SimulatedTime {
pub fn new(start_time: DateTime<Utc>) -> Self {
Self {
current_time: Arc::new(Mutex::new(start_time)),
}
}
pub fn advance_to(&self, new_time: DateTime<Utc>) {
let mut current = self.current_time.lock();
if new_time > *current {
*current = new_time;
}
}
pub fn advance_by(&self, duration: chrono::Duration) {
let mut current = self.current_time.lock();
*current = *current + duration;
}
}
impl TimeProvider for SimulatedTime {
fn now(&self) -> DateTime<Utc> {
*self.current_time.lock()
}
fn sleep_until(&self, _target: DateTime<Utc>) -> Result<(), String> {
// In backtest mode, we don't actually sleep
// Time is controlled by the backtest engine
Ok(())
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
}

221
apps/stock/core/src/lib.rs Normal file
View file

@ -0,0 +1,221 @@
#![deny(clippy::all)]
pub mod core;
pub mod orderbook;
pub mod risk;
pub mod positions;
pub mod api;
pub mod analytics;
// Re-export commonly used types
pub use positions::{Position, PositionUpdate};
pub use risk::{RiskLimits, RiskCheckResult, RiskMetrics};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use parking_lot::RwLock;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum TradingMode {
Backtest {
start_time: DateTime<Utc>,
end_time: DateTime<Utc>,
speed_multiplier: f64,
},
Paper {
starting_capital: f64,
},
Live {
broker: String,
account_id: String,
},
}
// Core traits that allow different implementations based on mode
#[async_trait::async_trait]
pub trait MarketDataSource: Send + Sync {
async fn get_next_update(&mut self) -> Option<MarketUpdate>;
fn seek_to_time(&mut self, timestamp: DateTime<Utc>) -> Result<(), String>;
fn as_any(&self) -> &dyn std::any::Any;
fn as_any_mut(&mut self) -> &mut dyn std::any::Any;
}
#[async_trait::async_trait]
pub trait ExecutionHandler: Send + Sync {
async fn execute_order(&mut self, order: Order) -> Result<ExecutionResult, String>;
fn get_fill_simulator(&self) -> Option<&dyn FillSimulator>;
}
pub trait TimeProvider: Send + Sync {
fn now(&self) -> DateTime<Utc>;
fn sleep_until(&self, target: DateTime<Utc>) -> Result<(), String>;
fn as_any(&self) -> &dyn std::any::Any;
}
pub trait FillSimulator: Send + Sync {
fn simulate_fill(&self, order: &Order, orderbook: &OrderBookSnapshot) -> Option<Fill>;
}
// Main trading core that works across all modes
pub struct TradingCore {
mode: TradingMode,
pub market_data_source: Arc<RwLock<Box<dyn MarketDataSource>>>,
pub execution_handler: Arc<RwLock<Box<dyn ExecutionHandler>>>,
pub time_provider: Arc<Box<dyn TimeProvider>>,
pub orderbooks: Arc<orderbook::OrderBookManager>,
pub risk_engine: Arc<risk::RiskEngine>,
pub position_tracker: Arc<positions::PositionTracker>,
}
// Core types used across the system
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MarketUpdate {
pub symbol: String,
pub timestamp: DateTime<Utc>,
pub data: MarketDataType,
}
// Market microstructure parameters
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MarketMicrostructure {
pub symbol: String,
pub avg_spread_bps: f64,
pub daily_volume: f64,
pub avg_trade_size: f64,
pub volatility: f64,
pub tick_size: f64,
pub lot_size: f64,
pub intraday_volume_profile: Vec<f64>, // 24 hourly buckets
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum MarketDataType {
Quote(Quote),
Trade(Trade),
Bar(Bar),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Quote {
pub bid: f64,
pub ask: f64,
pub bid_size: f64,
pub ask_size: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Trade {
pub price: f64,
pub size: f64,
pub side: Side,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Bar {
pub open: f64,
pub high: f64,
pub low: f64,
pub close: f64,
pub volume: f64,
pub vwap: Option<f64>,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
pub enum Side {
Buy,
Sell,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Order {
pub id: String,
pub symbol: String,
pub side: Side,
pub quantity: f64,
pub order_type: OrderType,
pub time_in_force: TimeInForce,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum OrderType {
Market,
Limit { price: f64 },
Stop { stop_price: f64 },
StopLimit { stop_price: f64, limit_price: f64 },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum TimeInForce {
Day,
GTC,
IOC,
FOK,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExecutionResult {
pub order_id: String,
pub status: OrderStatus,
pub fills: Vec<Fill>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum OrderStatus {
Pending,
Accepted,
PartiallyFilled,
Filled,
Cancelled,
Rejected(String),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Fill {
pub timestamp: DateTime<Utc>,
pub price: f64,
pub quantity: f64,
pub commission: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OrderBookSnapshot {
pub symbol: String,
pub timestamp: DateTime<Utc>,
pub bids: Vec<PriceLevel>,
pub asks: Vec<PriceLevel>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PriceLevel {
pub price: f64,
pub size: f64,
pub order_count: Option<u32>,
}
impl TradingCore {
pub fn new(
mode: TradingMode,
market_data_source: Box<dyn MarketDataSource>,
execution_handler: Box<dyn ExecutionHandler>,
time_provider: Box<dyn TimeProvider>,
) -> Self {
Self {
mode,
market_data_source: Arc::new(RwLock::new(market_data_source)),
execution_handler: Arc::new(RwLock::new(execution_handler)),
time_provider: Arc::new(time_provider),
orderbooks: Arc::new(orderbook::OrderBookManager::new()),
risk_engine: Arc::new(risk::RiskEngine::new()),
position_tracker: Arc::new(positions::PositionTracker::new()),
}
}
pub fn get_mode(&self) -> &TradingMode {
&self.mode
}
pub fn get_time(&self) -> DateTime<Utc> {
self.time_provider.now()
}
}

View file

@ -0,0 +1,244 @@
use crate::{Quote, Trade, Side, OrderBookSnapshot, PriceLevel};
use chrono::{DateTime, Utc};
use dashmap::DashMap;
use parking_lot::RwLock;
use std::collections::BTreeMap;
use std::sync::Arc;
// Manages order books for all symbols
pub struct OrderBookManager {
books: DashMap<String, Arc<RwLock<OrderBook>>>,
}
impl OrderBookManager {
pub fn new() -> Self {
Self {
books: DashMap::new(),
}
}
pub fn get_or_create(&self, symbol: &str) -> Arc<RwLock<OrderBook>> {
self.books
.entry(symbol.to_string())
.or_insert_with(|| Arc::new(RwLock::new(OrderBook::new(symbol.to_string()))))
.clone()
}
pub fn update_quote(&self, symbol: &str, quote: Quote, timestamp: DateTime<Utc>) {
let book = self.get_or_create(symbol);
let mut book_guard = book.write();
book_guard.update_quote(quote, timestamp);
}
pub fn update_trade(&self, symbol: &str, trade: Trade, timestamp: DateTime<Utc>) {
let book = self.get_or_create(symbol);
let mut book_guard = book.write();
book_guard.update_trade(trade, timestamp);
}
pub fn get_snapshot(&self, symbol: &str, depth: usize) -> Option<OrderBookSnapshot> {
self.books.get(symbol).map(|book| {
let book_guard = book.read();
book_guard.get_snapshot(depth)
})
}
pub fn get_best_bid_ask(&self, symbol: &str) -> Option<(f64, f64)> {
self.books.get(symbol).and_then(|book| {
let book_guard = book.read();
book_guard.get_best_bid_ask()
})
}
}
// Individual order book for a symbol
pub struct OrderBook {
symbol: String,
bids: BTreeMap<OrderedFloat, Level>,
asks: BTreeMap<OrderedFloat, Level>,
last_update: DateTime<Utc>,
last_trade_price: Option<f64>,
last_trade_size: Option<f64>,
}
#[derive(Clone, Debug)]
struct Level {
price: f64,
size: f64,
order_count: u32,
last_update: DateTime<Utc>,
}
// Wrapper for f64 to allow BTreeMap ordering
#[derive(Clone, Copy, Debug, PartialEq)]
struct OrderedFloat(f64);
impl Eq for OrderedFloat {}
impl PartialOrd for OrderedFloat {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.0.partial_cmp(&other.0)
}
}
impl Ord for OrderedFloat {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.partial_cmp(other).unwrap_or(std::cmp::Ordering::Equal)
}
}
impl OrderBook {
pub fn new(symbol: String) -> Self {
Self {
symbol,
bids: BTreeMap::new(),
asks: BTreeMap::new(),
last_update: Utc::now(),
last_trade_price: None,
last_trade_size: None,
}
}
pub fn update_quote(&mut self, quote: Quote, timestamp: DateTime<Utc>) {
// Update bid
if quote.bid > 0.0 && quote.bid_size > 0.0 {
self.bids.insert(
OrderedFloat(-quote.bid), // Negative for reverse ordering
Level {
price: quote.bid,
size: quote.bid_size,
order_count: 1,
last_update: timestamp,
},
);
}
// Update ask
if quote.ask > 0.0 && quote.ask_size > 0.0 {
self.asks.insert(
OrderedFloat(quote.ask),
Level {
price: quote.ask,
size: quote.ask_size,
order_count: 1,
last_update: timestamp,
},
);
}
self.last_update = timestamp;
self.clean_stale_levels(timestamp);
}
pub fn update_trade(&mut self, trade: Trade, timestamp: DateTime<Utc>) {
self.last_trade_price = Some(trade.price);
self.last_trade_size = Some(trade.size);
self.last_update = timestamp;
// Optionally update order book based on trade
// Remove liquidity that was likely consumed
match trade.side {
Side::Buy => {
// Trade hit the ask, remove liquidity
self.remove_liquidity_up_to_asks(trade.price, trade.size);
}
Side::Sell => {
// Trade hit the bid, remove liquidity
self.remove_liquidity_up_to_bids(trade.price, trade.size);
}
}
}
pub fn get_snapshot(&self, depth: usize) -> OrderBookSnapshot {
let bids: Vec<PriceLevel> = self.bids
.values()
.take(depth)
.map(|level| PriceLevel {
price: level.price,
size: level.size,
order_count: Some(level.order_count),
})
.collect();
let asks: Vec<PriceLevel> = self.asks
.values()
.take(depth)
.map(|level| PriceLevel {
price: level.price,
size: level.size,
order_count: Some(level.order_count),
})
.collect();
OrderBookSnapshot {
symbol: self.symbol.clone(),
timestamp: self.last_update,
bids,
asks,
}
}
pub fn get_best_bid_ask(&self) -> Option<(f64, f64)> {
let best_bid = self.bids.values().next()?.price;
let best_ask = self.asks.values().next()?.price;
Some((best_bid, best_ask))
}
fn clean_stale_levels(&mut self, current_time: DateTime<Utc>) {
let stale_threshold = chrono::Duration::seconds(60); // 60 seconds
self.bids.retain(|_, level| {
current_time - level.last_update < stale_threshold
});
self.asks.retain(|_, level| {
current_time - level.last_update < stale_threshold
});
}
fn remove_liquidity_up_to_asks(&mut self, price: f64, size: f64) {
let mut remaining_size = size;
let mut to_remove = Vec::new();
for (key, level) in self.asks.iter_mut() {
if level.price <= price {
if level.size <= remaining_size {
remaining_size -= level.size;
to_remove.push(*key);
} else {
level.size -= remaining_size;
break;
}
} else {
break;
}
}
for key in to_remove {
self.asks.remove(&key);
}
}
fn remove_liquidity_up_to_bids(&mut self, price: f64, size: f64) {
let mut remaining_size = size;
let mut to_remove = Vec::new();
for (key, level) in self.bids.iter_mut() {
if level.price >= price {
if level.size <= remaining_size {
remaining_size -= level.size;
to_remove.push(*key);
} else {
level.size -= remaining_size;
break;
}
} else {
break;
}
}
for key in to_remove {
self.bids.remove(&key);
}
}
}

View file

@ -0,0 +1,166 @@
use crate::{Fill, Side};
use chrono::{DateTime, Utc};
use dashmap::DashMap;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Position {
pub symbol: String,
pub quantity: f64,
pub average_price: f64,
pub realized_pnl: f64,
pub unrealized_pnl: f64,
pub total_cost: f64,
pub last_update: DateTime<Utc>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PositionUpdate {
pub symbol: String,
pub fill: Fill,
pub resulting_position: Position,
}
pub struct PositionTracker {
positions: DashMap<String, Position>,
}
impl PositionTracker {
pub fn new() -> Self {
Self {
positions: DashMap::new(),
}
}
pub fn process_fill(&self, symbol: &str, fill: &Fill, side: Side) -> PositionUpdate {
let mut entry = self.positions.entry(symbol.to_string()).or_insert_with(|| {
Position {
symbol: symbol.to_string(),
quantity: 0.0,
average_price: 0.0,
realized_pnl: 0.0,
unrealized_pnl: 0.0,
total_cost: 0.0,
last_update: fill.timestamp,
}
});
let position = entry.value_mut();
let old_quantity = position.quantity;
let old_avg_price = position.average_price;
// Calculate new position
match side {
Side::Buy => {
// Adding to position
position.quantity += fill.quantity;
if old_quantity >= 0.0 {
// Already long or flat, average up/down
position.total_cost += fill.price * fill.quantity;
position.average_price = if position.quantity > 0.0 {
position.total_cost / position.quantity
} else {
0.0
};
} else {
// Was short, closing or flipping
let close_quantity = fill.quantity.min(-old_quantity);
let open_quantity = fill.quantity - close_quantity;
// Realize P&L on closed portion
position.realized_pnl += close_quantity * (old_avg_price - fill.price);
// Update position for remaining
if open_quantity > 0.0 {
position.total_cost = open_quantity * fill.price;
position.average_price = fill.price;
} else {
position.total_cost = (position.quantity.abs()) * old_avg_price;
}
}
}
Side::Sell => {
// Reducing position
position.quantity -= fill.quantity;
if old_quantity <= 0.0 {
// Already short or flat, average up/down
position.total_cost += fill.price * fill.quantity;
position.average_price = if position.quantity < 0.0 {
position.total_cost / position.quantity.abs()
} else {
0.0
};
} else {
// Was long, closing or flipping
let close_quantity = fill.quantity.min(old_quantity);
let open_quantity = fill.quantity - close_quantity;
// Realize P&L on closed portion
position.realized_pnl += close_quantity * (fill.price - old_avg_price);
// Update position for remaining
if open_quantity > 0.0 {
position.total_cost = open_quantity * fill.price;
position.average_price = fill.price;
} else {
position.total_cost = (position.quantity.abs()) * old_avg_price;
}
}
}
}
// Subtract commission from realized P&L
position.realized_pnl -= fill.commission;
position.last_update = fill.timestamp;
PositionUpdate {
symbol: symbol.to_string(),
fill: fill.clone(),
resulting_position: position.clone(),
}
}
pub fn get_position(&self, symbol: &str) -> Option<Position> {
self.positions.get(symbol).map(|p| p.clone())
}
pub fn get_all_positions(&self) -> Vec<Position> {
self.positions.iter().map(|entry| entry.value().clone()).collect()
}
pub fn get_open_positions(&self) -> Vec<Position> {
self.positions
.iter()
.filter(|entry| entry.value().quantity.abs() > 0.0001)
.map(|entry| entry.value().clone())
.collect()
}
pub fn update_unrealized_pnl(&self, symbol: &str, current_price: f64) {
if let Some(mut position) = self.positions.get_mut(symbol) {
if position.quantity > 0.0 {
position.unrealized_pnl = position.quantity * (current_price - position.average_price);
} else if position.quantity < 0.0 {
position.unrealized_pnl = position.quantity * (current_price - position.average_price);
} else {
position.unrealized_pnl = 0.0;
}
}
}
pub fn get_total_pnl(&self) -> (f64, f64) {
let mut realized = 0.0;
let mut unrealized = 0.0;
for position in self.positions.iter() {
realized += position.realized_pnl;
unrealized += position.unrealized_pnl;
}
(realized, unrealized)
}
pub fn reset(&self) {
self.positions.clear();
}
}

View file

@ -0,0 +1,189 @@
use crate::{Order, Side};
use dashmap::DashMap;
use parking_lot::RwLock;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RiskLimits {
pub max_position_size: f64,
pub max_order_size: f64,
pub max_daily_loss: f64,
pub max_gross_exposure: f64,
pub max_symbol_exposure: f64,
}
impl Default for RiskLimits {
fn default() -> Self {
Self {
max_position_size: 100_000.0,
max_order_size: 10_000.0,
max_daily_loss: 5_000.0,
max_gross_exposure: 1_000_000.0,
max_symbol_exposure: 50_000.0,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RiskCheckResult {
pub passed: bool,
pub violations: Vec<String>,
pub checks: RiskChecks,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RiskChecks {
pub order_size: bool,
pub position_size: bool,
pub daily_loss: bool,
pub gross_exposure: bool,
pub symbol_exposure: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RiskMetrics {
pub current_exposure: f64,
pub daily_pnl: f64,
pub position_count: usize,
pub gross_exposure: f64,
pub max_position_size: f64,
pub utilization_pct: f64,
}
pub struct RiskEngine {
limits: Arc<RwLock<RiskLimits>>,
symbol_exposures: DashMap<String, f64>,
daily_pnl: Arc<RwLock<f64>>,
}
impl RiskEngine {
pub fn new() -> Self {
Self::with_limits(RiskLimits::default())
}
pub fn with_limits(limits: RiskLimits) -> Self {
Self {
limits: Arc::new(RwLock::new(limits)),
symbol_exposures: DashMap::new(),
daily_pnl: Arc::new(RwLock::new(0.0)),
}
}
pub fn update_limits(&self, new_limits: RiskLimits) {
*self.limits.write() = new_limits;
}
pub fn check_order(&self, order: &Order, current_position: Option<f64>) -> RiskCheckResult {
let mut violations = Vec::new();
let limits = self.limits.read();
// Check order size
if order.quantity > limits.max_order_size {
violations.push(format!(
"Order size {} exceeds limit {}",
order.quantity, limits.max_order_size
));
}
// Check position size after order
let current_pos = current_position.unwrap_or(0.0);
let new_position = match order.side {
Side::Buy => current_pos + order.quantity,
Side::Sell => current_pos - order.quantity,
};
if new_position.abs() > limits.max_position_size {
violations.push(format!(
"Position size {} would exceed limit {}",
new_position.abs(), limits.max_position_size
));
}
// Check symbol exposure
let symbol_exposure = self.symbol_exposures
.get(&order.symbol)
.map(|e| *e)
.unwrap_or(0.0);
let new_exposure = symbol_exposure + order.quantity;
if new_exposure > limits.max_symbol_exposure {
violations.push(format!(
"Symbol exposure {} would exceed limit {}",
new_exposure, limits.max_symbol_exposure
));
}
// Check daily loss
let daily_pnl = *self.daily_pnl.read();
if daily_pnl < -limits.max_daily_loss {
violations.push(format!(
"Daily loss {} exceeds limit {}",
-daily_pnl, limits.max_daily_loss
));
}
// Calculate gross exposure
let gross_exposure = self.calculate_gross_exposure();
if gross_exposure > limits.max_gross_exposure {
violations.push(format!(
"Gross exposure {} exceeds limit {}",
gross_exposure, limits.max_gross_exposure
));
}
RiskCheckResult {
passed: violations.is_empty(),
violations,
checks: RiskChecks {
order_size: order.quantity <= limits.max_order_size,
position_size: new_position.abs() <= limits.max_position_size,
daily_loss: daily_pnl >= -limits.max_daily_loss,
gross_exposure: gross_exposure <= limits.max_gross_exposure,
symbol_exposure: new_exposure <= limits.max_symbol_exposure,
},
}
}
pub fn update_position(&self, symbol: &str, new_position: f64) {
if new_position.abs() < 0.0001 {
self.symbol_exposures.remove(symbol);
} else {
self.symbol_exposures.insert(symbol.to_string(), new_position.abs());
}
}
pub fn update_daily_pnl(&self, pnl_change: f64) {
let mut daily_pnl = self.daily_pnl.write();
*daily_pnl += pnl_change;
}
pub fn reset_daily_metrics(&self) {
*self.daily_pnl.write() = 0.0;
}
fn calculate_gross_exposure(&self) -> f64 {
self.symbol_exposures
.iter()
.map(|entry| *entry.value())
.sum()
}
fn calculate_total_exposure(&self) -> f64 {
self.calculate_gross_exposure()
}
pub fn get_risk_metrics(&self) -> RiskMetrics {
let limits = self.limits.read();
let gross_exposure = self.calculate_gross_exposure();
RiskMetrics {
current_exposure: 0.0,
daily_pnl: *self.daily_pnl.read(),
position_count: self.symbol_exposures.len(),
gross_exposure,
max_position_size: limits.max_position_size,
utilization_pct: (gross_exposure / limits.max_gross_exposure * 100.0).min(100.0),
}
}
}