Files
gogo2/COBY/aggregation/aggregation_engine.py
Dobromir Popov 8ee9b7a90c wip
2025-08-04 17:40:30 +03:00

338 lines
12 KiB
Python

"""
Main aggregation engine implementation.
"""
from typing import Dict, List
from ..interfaces.aggregation_engine import AggregationEngine
from ..models.core import (
OrderBookSnapshot, PriceBuckets, HeatmapData,
ImbalanceMetrics, ConsolidatedOrderBook
)
from ..utils.logging import get_logger, set_correlation_id
from ..utils.exceptions import AggregationError
from .price_bucketer import PriceBucketer
from .heatmap_generator import HeatmapGenerator
from .cross_exchange_aggregator import CrossExchangeAggregator
from ..processing.metrics_calculator import MetricsCalculator
logger = get_logger(__name__)
class StandardAggregationEngine(AggregationEngine):
"""
Standard implementation of aggregation engine interface.
Provides:
- Price bucket creation with $1 USD buckets
- Heatmap generation
- Cross-exchange aggregation
- Imbalance calculations
- Support/resistance detection
"""
def __init__(self):
"""Initialize aggregation engine with components"""
self.price_bucketer = PriceBucketer()
self.heatmap_generator = HeatmapGenerator()
self.cross_exchange_aggregator = CrossExchangeAggregator()
self.metrics_calculator = MetricsCalculator()
# Processing statistics
self.buckets_created = 0
self.heatmaps_generated = 0
self.consolidations_performed = 0
logger.info("Standard aggregation engine initialized")
def create_price_buckets(self, orderbook: OrderBookSnapshot,
bucket_size: float = None) -> PriceBuckets:
"""
Convert order book data to price buckets.
Args:
orderbook: Order book snapshot
bucket_size: Size of each price bucket (uses $1 default)
Returns:
PriceBuckets: Aggregated price bucket data
"""
try:
set_correlation_id()
# Use provided bucket size or default $1
if bucket_size:
bucketer = PriceBucketer(bucket_size)
else:
bucketer = self.price_bucketer
buckets = bucketer.create_price_buckets(orderbook)
self.buckets_created += 1
logger.debug(f"Created price buckets for {orderbook.symbol}@{orderbook.exchange}")
return buckets
except Exception as e:
logger.error(f"Error creating price buckets: {e}")
raise AggregationError(f"Price bucket creation failed: {e}", "BUCKET_ERROR")
def update_heatmap(self, symbol: str, buckets: PriceBuckets) -> HeatmapData:
"""
Update heatmap data with new price buckets.
Args:
symbol: Trading symbol
buckets: Price bucket data
Returns:
HeatmapData: Updated heatmap visualization data
"""
try:
set_correlation_id()
heatmap = self.heatmap_generator.generate_heatmap(buckets)
self.heatmaps_generated += 1
logger.debug(f"Generated heatmap for {symbol}: {len(heatmap.data)} points")
return heatmap
except Exception as e:
logger.error(f"Error updating heatmap: {e}")
raise AggregationError(f"Heatmap update failed: {e}", "HEATMAP_ERROR")
def calculate_imbalances(self, orderbook: OrderBookSnapshot) -> ImbalanceMetrics:
"""
Calculate order book imbalance metrics.
Args:
orderbook: Order book snapshot
Returns:
ImbalanceMetrics: Calculated imbalance metrics
"""
try:
set_correlation_id()
return self.metrics_calculator.calculate_imbalance_metrics(orderbook)
except Exception as e:
logger.error(f"Error calculating imbalances: {e}")
raise AggregationError(f"Imbalance calculation failed: {e}", "IMBALANCE_ERROR")
def aggregate_across_exchanges(self, symbol: str,
orderbooks: List[OrderBookSnapshot]) -> ConsolidatedOrderBook:
"""
Aggregate order book data from multiple exchanges.
Args:
symbol: Trading symbol
orderbooks: List of order book snapshots from different exchanges
Returns:
ConsolidatedOrderBook: Consolidated order book data
"""
try:
set_correlation_id()
consolidated = self.cross_exchange_aggregator.aggregate_across_exchanges(
symbol, orderbooks
)
self.consolidations_performed += 1
logger.debug(f"Consolidated {len(orderbooks)} order books for {symbol}")
return consolidated
except Exception as e:
logger.error(f"Error aggregating across exchanges: {e}")
raise AggregationError(f"Cross-exchange aggregation failed: {e}", "CONSOLIDATION_ERROR")
def calculate_volume_weighted_price(self, orderbooks: List[OrderBookSnapshot]) -> float:
"""
Calculate volume-weighted average price across exchanges.
Args:
orderbooks: List of order book snapshots
Returns:
float: Volume-weighted average price
"""
try:
set_correlation_id()
return self.cross_exchange_aggregator._calculate_weighted_mid_price(orderbooks)
except Exception as e:
logger.error(f"Error calculating volume weighted price: {e}")
raise AggregationError(f"VWAP calculation failed: {e}", "VWAP_ERROR")
def get_market_depth(self, orderbook: OrderBookSnapshot,
depth_levels: List[float]) -> Dict[float, Dict[str, float]]:
"""
Calculate market depth at different price levels.
Args:
orderbook: Order book snapshot
depth_levels: List of depth percentages (e.g., [0.1, 0.5, 1.0])
Returns:
Dict: Market depth data {level: {'bid_volume': x, 'ask_volume': y}}
"""
try:
set_correlation_id()
depth_data = {}
if not orderbook.mid_price:
return depth_data
for level_pct in depth_levels:
# Calculate price range for this depth level
price_range = orderbook.mid_price * (level_pct / 100.0)
min_bid_price = orderbook.mid_price - price_range
max_ask_price = orderbook.mid_price + price_range
# Calculate volumes within this range
bid_volume = sum(
bid.size for bid in orderbook.bids
if bid.price >= min_bid_price
)
ask_volume = sum(
ask.size for ask in orderbook.asks
if ask.price <= max_ask_price
)
depth_data[level_pct] = {
'bid_volume': bid_volume,
'ask_volume': ask_volume,
'total_volume': bid_volume + ask_volume
}
logger.debug(f"Calculated market depth for {len(depth_levels)} levels")
return depth_data
except Exception as e:
logger.error(f"Error calculating market depth: {e}")
return {}
def smooth_heatmap(self, heatmap: HeatmapData, smoothing_factor: float) -> HeatmapData:
"""
Apply smoothing to heatmap data to reduce noise.
Args:
heatmap: Raw heatmap data
smoothing_factor: Smoothing factor (0.0 to 1.0)
Returns:
HeatmapData: Smoothed heatmap data
"""
try:
set_correlation_id()
return self.heatmap_generator.apply_smoothing(heatmap, smoothing_factor)
except Exception as e:
logger.error(f"Error smoothing heatmap: {e}")
return heatmap # Return original on error
def calculate_liquidity_score(self, orderbook: OrderBookSnapshot) -> float:
"""
Calculate liquidity score for an order book.
Args:
orderbook: Order book snapshot
Returns:
float: Liquidity score (0.0 to 1.0)
"""
try:
set_correlation_id()
return self.metrics_calculator.calculate_liquidity_score(orderbook)
except Exception as e:
logger.error(f"Error calculating liquidity score: {e}")
return 0.0
def detect_support_resistance(self, heatmap: HeatmapData) -> Dict[str, List[float]]:
"""
Detect support and resistance levels from heatmap data.
Args:
heatmap: Heatmap data
Returns:
Dict: {'support': [prices], 'resistance': [prices]}
"""
try:
set_correlation_id()
return self.heatmap_generator.calculate_support_resistance(heatmap)
except Exception as e:
logger.error(f"Error detecting support/resistance: {e}")
return {'support': [], 'resistance': []}
def create_consolidated_heatmap(self, symbol: str,
orderbooks: List[OrderBookSnapshot]) -> HeatmapData:
"""
Create consolidated heatmap from multiple exchanges.
Args:
symbol: Trading symbol
orderbooks: List of order book snapshots
Returns:
HeatmapData: Consolidated heatmap data
"""
try:
set_correlation_id()
return self.cross_exchange_aggregator.create_consolidated_heatmap(
symbol, orderbooks
)
except Exception as e:
logger.error(f"Error creating consolidated heatmap: {e}")
raise AggregationError(f"Consolidated heatmap creation failed: {e}", "CONSOLIDATED_HEATMAP_ERROR")
def detect_arbitrage_opportunities(self, orderbooks: List[OrderBookSnapshot]) -> List[Dict]:
"""
Detect arbitrage opportunities between exchanges.
Args:
orderbooks: List of order book snapshots
Returns:
List[Dict]: Arbitrage opportunities
"""
try:
set_correlation_id()
return self.cross_exchange_aggregator.detect_arbitrage_opportunities(orderbooks)
except Exception as e:
logger.error(f"Error detecting arbitrage opportunities: {e}")
return []
def get_processing_stats(self) -> Dict[str, any]:
"""Get processing statistics"""
return {
'buckets_created': self.buckets_created,
'heatmaps_generated': self.heatmaps_generated,
'consolidations_performed': self.consolidations_performed,
'price_bucketer_stats': self.price_bucketer.get_processing_stats(),
'heatmap_generator_stats': self.heatmap_generator.get_processing_stats(),
'cross_exchange_stats': self.cross_exchange_aggregator.get_processing_stats()
}
def reset_stats(self) -> None:
"""Reset processing statistics"""
self.buckets_created = 0
self.heatmaps_generated = 0
self.consolidations_performed = 0
self.price_bucketer.reset_stats()
self.heatmap_generator.reset_stats()
self.cross_exchange_aggregator.reset_stats()
logger.info("Aggregation engine statistics reset")