390 lines
15 KiB
Python
390 lines
15 KiB
Python
"""
|
|
Cross-exchange data aggregation and consolidation.
|
|
"""
|
|
|
|
from typing import List, Dict, Optional
|
|
from collections import defaultdict
|
|
from datetime import datetime
|
|
from ..models.core import (
|
|
OrderBookSnapshot, ConsolidatedOrderBook, PriceLevel,
|
|
PriceBuckets, HeatmapData, HeatmapPoint
|
|
)
|
|
from ..utils.logging import get_logger
|
|
from ..utils.timing import get_current_timestamp
|
|
from .price_bucketer import PriceBucketer
|
|
from .heatmap_generator import HeatmapGenerator
|
|
|
|
logger = get_logger(__name__)
|
|
|
|
|
|
class CrossExchangeAggregator:
|
|
"""
|
|
Aggregates data across multiple exchanges.
|
|
|
|
Provides consolidated order books and cross-exchange heatmaps.
|
|
"""
|
|
|
|
def __init__(self):
|
|
"""Initialize cross-exchange aggregator"""
|
|
self.price_bucketer = PriceBucketer()
|
|
self.heatmap_generator = HeatmapGenerator()
|
|
|
|
# Exchange weights for aggregation
|
|
self.exchange_weights = {
|
|
'binance': 1.0,
|
|
'coinbase': 0.9,
|
|
'kraken': 0.8,
|
|
'bybit': 0.7,
|
|
'okx': 0.7,
|
|
'huobi': 0.6,
|
|
'kucoin': 0.6,
|
|
'gateio': 0.5,
|
|
'bitfinex': 0.5,
|
|
'mexc': 0.4
|
|
}
|
|
|
|
# Statistics
|
|
self.consolidations_performed = 0
|
|
self.exchanges_processed = set()
|
|
|
|
logger.info("Cross-exchange aggregator initialized")
|
|
|
|
def aggregate_across_exchanges(self, symbol: str,
|
|
orderbooks: List[OrderBookSnapshot]) -> ConsolidatedOrderBook:
|
|
"""
|
|
Aggregate order book data from multiple exchanges.
|
|
|
|
Args:
|
|
symbol: Trading symbol
|
|
orderbooks: List of order book snapshots from different exchanges
|
|
|
|
Returns:
|
|
ConsolidatedOrderBook: Consolidated order book data
|
|
"""
|
|
if not orderbooks:
|
|
raise ValueError("Cannot aggregate empty orderbook list")
|
|
|
|
try:
|
|
# Track exchanges
|
|
exchanges = [ob.exchange for ob in orderbooks]
|
|
self.exchanges_processed.update(exchanges)
|
|
|
|
# Calculate weighted mid price
|
|
weighted_mid_price = self._calculate_weighted_mid_price(orderbooks)
|
|
|
|
# Consolidate bids and asks
|
|
consolidated_bids = self._consolidate_price_levels(
|
|
[ob.bids for ob in orderbooks],
|
|
[ob.exchange for ob in orderbooks],
|
|
'bid'
|
|
)
|
|
|
|
consolidated_asks = self._consolidate_price_levels(
|
|
[ob.asks for ob in orderbooks],
|
|
[ob.exchange for ob in orderbooks],
|
|
'ask'
|
|
)
|
|
|
|
# Calculate total volumes
|
|
total_bid_volume = sum(level.size for level in consolidated_bids)
|
|
total_ask_volume = sum(level.size for level in consolidated_asks)
|
|
|
|
# Create consolidated order book
|
|
consolidated = ConsolidatedOrderBook(
|
|
symbol=symbol,
|
|
timestamp=get_current_timestamp(),
|
|
exchanges=exchanges,
|
|
bids=consolidated_bids,
|
|
asks=consolidated_asks,
|
|
weighted_mid_price=weighted_mid_price,
|
|
total_bid_volume=total_bid_volume,
|
|
total_ask_volume=total_ask_volume,
|
|
exchange_weights={ex: self.exchange_weights.get(ex, 0.5) for ex in exchanges}
|
|
)
|
|
|
|
self.consolidations_performed += 1
|
|
|
|
logger.debug(
|
|
f"Consolidated {len(orderbooks)} order books for {symbol}: "
|
|
f"{len(consolidated_bids)} bids, {len(consolidated_asks)} asks"
|
|
)
|
|
|
|
return consolidated
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error aggregating across exchanges: {e}")
|
|
raise
|
|
|
|
def create_consolidated_heatmap(self, symbol: str,
|
|
orderbooks: List[OrderBookSnapshot]) -> HeatmapData:
|
|
"""
|
|
Create consolidated heatmap from multiple exchanges.
|
|
|
|
Args:
|
|
symbol: Trading symbol
|
|
orderbooks: List of order book snapshots
|
|
|
|
Returns:
|
|
HeatmapData: Consolidated heatmap data
|
|
"""
|
|
try:
|
|
# Create price buckets for each exchange
|
|
all_buckets = []
|
|
for orderbook in orderbooks:
|
|
buckets = self.price_bucketer.create_price_buckets(orderbook)
|
|
all_buckets.append(buckets)
|
|
|
|
# Aggregate all buckets
|
|
if len(all_buckets) == 1:
|
|
consolidated_buckets = all_buckets[0]
|
|
else:
|
|
consolidated_buckets = self.price_bucketer.aggregate_buckets(all_buckets)
|
|
|
|
# Generate heatmap from consolidated buckets
|
|
heatmap = self.heatmap_generator.generate_heatmap(consolidated_buckets)
|
|
|
|
# Add exchange metadata to heatmap points
|
|
self._add_exchange_metadata(heatmap, orderbooks)
|
|
|
|
logger.debug(f"Created consolidated heatmap for {symbol} from {len(orderbooks)} exchanges")
|
|
return heatmap
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error creating consolidated heatmap: {e}")
|
|
raise
|
|
|
|
def _calculate_weighted_mid_price(self, orderbooks: List[OrderBookSnapshot]) -> float:
|
|
"""Calculate volume-weighted mid price across exchanges"""
|
|
total_weight = 0.0
|
|
weighted_sum = 0.0
|
|
|
|
for orderbook in orderbooks:
|
|
if orderbook.mid_price:
|
|
# Use total volume as weight
|
|
volume_weight = orderbook.bid_volume + orderbook.ask_volume
|
|
exchange_weight = self.exchange_weights.get(orderbook.exchange, 0.5)
|
|
|
|
# Combined weight
|
|
weight = volume_weight * exchange_weight
|
|
|
|
weighted_sum += orderbook.mid_price * weight
|
|
total_weight += weight
|
|
|
|
return weighted_sum / total_weight if total_weight > 0 else 0.0
|
|
|
|
def _consolidate_price_levels(self, level_lists: List[List[PriceLevel]],
|
|
exchanges: List[str], side: str) -> List[PriceLevel]:
|
|
"""Consolidate price levels from multiple exchanges"""
|
|
# Group levels by price bucket
|
|
price_groups = defaultdict(lambda: {'size': 0.0, 'count': 0, 'exchanges': set()})
|
|
|
|
for levels, exchange in zip(level_lists, exchanges):
|
|
exchange_weight = self.exchange_weights.get(exchange, 0.5)
|
|
|
|
for level in levels:
|
|
# Round price to bucket
|
|
bucket_price = self.price_bucketer.get_bucket_price(level.price)
|
|
|
|
# Add weighted volume
|
|
weighted_size = level.size * exchange_weight
|
|
price_groups[bucket_price]['size'] += weighted_size
|
|
price_groups[bucket_price]['count'] += level.count or 1
|
|
price_groups[bucket_price]['exchanges'].add(exchange)
|
|
|
|
# Create consolidated price levels
|
|
consolidated_levels = []
|
|
for price, data in price_groups.items():
|
|
if data['size'] > 0: # Only include non-zero volumes
|
|
level = PriceLevel(
|
|
price=price,
|
|
size=data['size'],
|
|
count=data['count']
|
|
)
|
|
consolidated_levels.append(level)
|
|
|
|
# Sort levels appropriately
|
|
if side == 'bid':
|
|
consolidated_levels.sort(key=lambda x: x.price, reverse=True)
|
|
else:
|
|
consolidated_levels.sort(key=lambda x: x.price)
|
|
|
|
return consolidated_levels
|
|
|
|
def _add_exchange_metadata(self, heatmap: HeatmapData,
|
|
orderbooks: List[OrderBookSnapshot]) -> None:
|
|
"""Add exchange metadata to heatmap points"""
|
|
# Create exchange mapping by price bucket
|
|
exchange_map = defaultdict(set)
|
|
|
|
for orderbook in orderbooks:
|
|
# Map bid prices to exchanges
|
|
for bid in orderbook.bids:
|
|
bucket_price = self.price_bucketer.get_bucket_price(bid.price)
|
|
exchange_map[bucket_price].add(orderbook.exchange)
|
|
|
|
# Map ask prices to exchanges
|
|
for ask in orderbook.asks:
|
|
bucket_price = self.price_bucketer.get_bucket_price(ask.price)
|
|
exchange_map[bucket_price].add(orderbook.exchange)
|
|
|
|
# Add exchange information to heatmap points
|
|
for point in heatmap.data:
|
|
bucket_price = self.price_bucketer.get_bucket_price(point.price)
|
|
# Store exchange info in a custom attribute (would need to extend HeatmapPoint)
|
|
# For now, we'll log it
|
|
exchanges_at_price = exchange_map.get(bucket_price, set())
|
|
if len(exchanges_at_price) > 1:
|
|
logger.debug(f"Price {point.price} has data from {len(exchanges_at_price)} exchanges")
|
|
|
|
def calculate_exchange_dominance(self, orderbooks: List[OrderBookSnapshot]) -> Dict[str, float]:
|
|
"""
|
|
Calculate which exchanges dominate at different price levels.
|
|
|
|
Args:
|
|
orderbooks: List of order book snapshots
|
|
|
|
Returns:
|
|
Dict[str, float]: Exchange dominance scores
|
|
"""
|
|
exchange_volumes = defaultdict(float)
|
|
total_volume = 0.0
|
|
|
|
for orderbook in orderbooks:
|
|
volume = orderbook.bid_volume + orderbook.ask_volume
|
|
exchange_volumes[orderbook.exchange] += volume
|
|
total_volume += volume
|
|
|
|
# Calculate dominance percentages
|
|
dominance = {}
|
|
for exchange, volume in exchange_volumes.items():
|
|
dominance[exchange] = (volume / total_volume * 100) if total_volume > 0 else 0.0
|
|
|
|
return dominance
|
|
|
|
def detect_arbitrage_opportunities(self, orderbooks: List[OrderBookSnapshot],
|
|
min_spread_pct: float = 0.1) -> List[Dict]:
|
|
"""
|
|
Detect potential arbitrage opportunities between exchanges.
|
|
|
|
Args:
|
|
orderbooks: List of order book snapshots
|
|
min_spread_pct: Minimum spread percentage to consider
|
|
|
|
Returns:
|
|
List[Dict]: Arbitrage opportunities
|
|
"""
|
|
opportunities = []
|
|
|
|
if len(orderbooks) < 2:
|
|
return opportunities
|
|
|
|
try:
|
|
# Find best bid and ask across exchanges
|
|
best_bids = []
|
|
best_asks = []
|
|
|
|
for orderbook in orderbooks:
|
|
if orderbook.bids and orderbook.asks:
|
|
best_bids.append({
|
|
'exchange': orderbook.exchange,
|
|
'price': orderbook.bids[0].price,
|
|
'size': orderbook.bids[0].size
|
|
})
|
|
best_asks.append({
|
|
'exchange': orderbook.exchange,
|
|
'price': orderbook.asks[0].price,
|
|
'size': orderbook.asks[0].size
|
|
})
|
|
|
|
# Sort to find best opportunities
|
|
best_bids.sort(key=lambda x: x['price'], reverse=True)
|
|
best_asks.sort(key=lambda x: x['price'])
|
|
|
|
# Check for arbitrage opportunities
|
|
for bid in best_bids:
|
|
for ask in best_asks:
|
|
if bid['exchange'] != ask['exchange'] and bid['price'] > ask['price']:
|
|
spread = bid['price'] - ask['price']
|
|
spread_pct = (spread / ask['price']) * 100
|
|
|
|
if spread_pct >= min_spread_pct:
|
|
opportunities.append({
|
|
'buy_exchange': ask['exchange'],
|
|
'sell_exchange': bid['exchange'],
|
|
'buy_price': ask['price'],
|
|
'sell_price': bid['price'],
|
|
'spread': spread,
|
|
'spread_percentage': spread_pct,
|
|
'max_size': min(bid['size'], ask['size'])
|
|
})
|
|
|
|
# Sort by spread percentage
|
|
opportunities.sort(key=lambda x: x['spread_percentage'], reverse=True)
|
|
|
|
if opportunities:
|
|
logger.info(f"Found {len(opportunities)} arbitrage opportunities")
|
|
|
|
return opportunities
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error detecting arbitrage opportunities: {e}")
|
|
return []
|
|
|
|
def get_exchange_correlation(self, orderbooks: List[OrderBookSnapshot]) -> Dict[str, Dict[str, float]]:
|
|
"""
|
|
Calculate price correlation between exchanges.
|
|
|
|
Args:
|
|
orderbooks: List of order book snapshots
|
|
|
|
Returns:
|
|
Dict: Correlation matrix between exchanges
|
|
"""
|
|
correlations = {}
|
|
|
|
# Extract mid prices by exchange
|
|
exchange_prices = {}
|
|
for orderbook in orderbooks:
|
|
if orderbook.mid_price:
|
|
exchange_prices[orderbook.exchange] = orderbook.mid_price
|
|
|
|
# Calculate simple correlation (would need historical data for proper correlation)
|
|
exchanges = list(exchange_prices.keys())
|
|
for i, exchange1 in enumerate(exchanges):
|
|
correlations[exchange1] = {}
|
|
for j, exchange2 in enumerate(exchanges):
|
|
if i == j:
|
|
correlations[exchange1][exchange2] = 1.0
|
|
else:
|
|
# Simple price difference as correlation proxy
|
|
price1 = exchange_prices[exchange1]
|
|
price2 = exchange_prices[exchange2]
|
|
diff_pct = abs(price1 - price2) / max(price1, price2) * 100
|
|
# Convert to correlation-like score (lower difference = higher correlation)
|
|
correlation = max(0.0, 1.0 - (diff_pct / 10.0))
|
|
correlations[exchange1][exchange2] = correlation
|
|
|
|
return correlations
|
|
|
|
def get_processing_stats(self) -> Dict[str, int]:
|
|
"""Get processing statistics"""
|
|
return {
|
|
'consolidations_performed': self.consolidations_performed,
|
|
'unique_exchanges_processed': len(self.exchanges_processed),
|
|
'exchanges_processed': list(self.exchanges_processed),
|
|
'bucketer_stats': self.price_bucketer.get_processing_stats(),
|
|
'heatmap_stats': self.heatmap_generator.get_processing_stats()
|
|
}
|
|
|
|
def update_exchange_weights(self, new_weights: Dict[str, float]) -> None:
|
|
"""Update exchange weights for aggregation"""
|
|
self.exchange_weights.update(new_weights)
|
|
logger.info(f"Updated exchange weights: {new_weights}")
|
|
|
|
def reset_stats(self) -> None:
|
|
"""Reset processing statistics"""
|
|
self.consolidations_performed = 0
|
|
self.exchanges_processed.clear()
|
|
self.price_bucketer.reset_stats()
|
|
self.heatmap_generator.reset_stats()
|
|
logger.info("Cross-exchange aggregator statistics reset") |