720 lines
31 KiB
Python
720 lines
31 KiB
Python
"""
|
|
Consolidated Order Book (COB) Integration Module
|
|
|
|
This module integrates the Multi-Exchange COB Provider with the existing
|
|
gogo2 trading system architecture, providing:
|
|
|
|
- Integration with existing DataProvider
|
|
- CNN/DQN model data feeding
|
|
- Dashboard data formatting
|
|
- Trading signal generation based on COB analysis
|
|
- Enhanced market microstructure analysis
|
|
|
|
Connects to the main trading dashboard and AI models.
|
|
"""
|
|
|
|
import asyncio
|
|
import logging
|
|
import numpy as np
|
|
import pandas as pd
|
|
from datetime import datetime, timedelta
|
|
from typing import Dict, List, Optional, Any, Callable
|
|
from threading import Thread
|
|
import json
|
|
import math
|
|
from collections import defaultdict
|
|
|
|
from .multi_exchange_cob_provider import MultiExchangeCOBProvider, COBSnapshot, ConsolidatedOrderBookLevel
|
|
from .data_provider import DataProvider, MarketTick
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class COBIntegration:
|
|
"""
|
|
Integration layer for Multi-Exchange COB data with gogo2 trading system
|
|
"""
|
|
|
|
def __init__(self, data_provider: Optional[DataProvider] = None, symbols: Optional[List[str]] = None):
|
|
"""
|
|
Initialize COB Integration
|
|
|
|
Args:
|
|
data_provider: Existing DataProvider instance
|
|
symbols: List of symbols to monitor
|
|
"""
|
|
self.data_provider = data_provider
|
|
self.symbols = symbols or ['BTC/USDT', 'ETH/USDT']
|
|
|
|
# Initialize COB provider to None, will be set in start()
|
|
self.cob_provider = None
|
|
|
|
# CNN/DQN integration
|
|
self.cnn_callbacks: List[Callable] = []
|
|
self.dqn_callbacks: List[Callable] = []
|
|
self.dashboard_callbacks: List[Callable] = []
|
|
|
|
# COB analysis and signals
|
|
self.cob_signals: Dict[str, List[Dict]] = {}
|
|
self.liquidity_alerts: Dict[str, List[Dict]] = {}
|
|
self.arbitrage_opportunities: Dict[str, List[Dict]] = {}
|
|
|
|
# Performance tracking
|
|
self.cob_feature_cache: Dict[str, np.ndarray] = {}
|
|
self.last_cob_features_update: Dict[str, datetime] = {}
|
|
|
|
# Initialize signal tracking
|
|
for symbol in self.symbols:
|
|
self.cob_signals[symbol] = []
|
|
self.liquidity_alerts[symbol] = []
|
|
self.arbitrage_opportunities[symbol] = []
|
|
|
|
logger.info("COB Integration initialized (provider will be started in async)")
|
|
logger.info(f"Symbols: {self.symbols}")
|
|
|
|
async def start(self):
|
|
"""Start COB integration"""
|
|
logger.info("Starting COB Integration")
|
|
|
|
# Initialize COB provider here, within the async context
|
|
self.cob_provider = MultiExchangeCOBProvider(
|
|
symbols=self.symbols,
|
|
bucket_size_bps=1.0 # 1 basis point granularity
|
|
)
|
|
|
|
# Register callbacks
|
|
self.cob_provider.subscribe_to_cob_updates(self._on_cob_update)
|
|
self.cob_provider.subscribe_to_bucket_updates(self._on_bucket_update)
|
|
|
|
# Start COB provider streaming
|
|
try:
|
|
logger.info("Starting COB provider streaming...")
|
|
await self.cob_provider.start_streaming()
|
|
except Exception as e:
|
|
logger.error(f"Error starting COB provider streaming: {e}")
|
|
# Start a background task instead
|
|
asyncio.create_task(self._start_cob_provider_background())
|
|
|
|
# Start analysis threads
|
|
asyncio.create_task(self._continuous_cob_analysis())
|
|
asyncio.create_task(self._continuous_signal_generation())
|
|
|
|
logger.info("COB Integration started successfully")
|
|
|
|
async def _start_cob_provider_background(self):
|
|
"""Start COB provider in background task"""
|
|
try:
|
|
logger.info("Starting COB provider in background...")
|
|
await self.cob_provider.start_streaming()
|
|
except Exception as e:
|
|
logger.error(f"Error in background COB provider: {e}")
|
|
|
|
async def stop(self):
|
|
"""Stop COB integration"""
|
|
logger.info("Stopping COB Integration")
|
|
if self.cob_provider:
|
|
await self.cob_provider.stop_streaming()
|
|
logger.info("COB Integration stopped")
|
|
|
|
def add_cnn_callback(self, callback: Callable[[str, Dict], None]):
|
|
"""Add CNN model callback for COB features"""
|
|
self.cnn_callbacks.append(callback)
|
|
logger.info(f"Added CNN callback: {len(self.cnn_callbacks)} total")
|
|
|
|
def add_dqn_callback(self, callback: Callable[[str, Dict], None]):
|
|
"""Add DQN model callback for COB state features"""
|
|
self.dqn_callbacks.append(callback)
|
|
logger.info(f"Added DQN callback: {len(self.dqn_callbacks)} total")
|
|
|
|
def add_dashboard_callback(self, callback: Callable[[str, Dict], None]):
|
|
"""Add dashboard callback for COB visualization data"""
|
|
self.dashboard_callbacks.append(callback)
|
|
logger.info(f"Added dashboard callback: {len(self.dashboard_callbacks)} total")
|
|
|
|
async def _on_cob_update(self, symbol: str, cob_snapshot: COBSnapshot):
|
|
"""Handle COB update from provider"""
|
|
try:
|
|
# Generate CNN features
|
|
cnn_features = self._generate_cnn_features(symbol, cob_snapshot)
|
|
if cnn_features is not None:
|
|
self.cob_feature_cache[symbol] = cnn_features
|
|
self.last_cob_features_update[symbol] = datetime.now()
|
|
|
|
# Notify CNN callbacks
|
|
for callback in self.cnn_callbacks:
|
|
try:
|
|
callback(symbol, {
|
|
'features': cnn_features,
|
|
'timestamp': cob_snapshot.timestamp,
|
|
'type': 'cob_features'
|
|
})
|
|
except Exception as e:
|
|
logger.warning(f"Error in CNN callback: {e}")
|
|
|
|
# Generate DQN state features
|
|
dqn_features = self._generate_dqn_features(symbol, cob_snapshot)
|
|
if dqn_features is not None:
|
|
for callback in self.dqn_callbacks:
|
|
try:
|
|
callback(symbol, {
|
|
'state': dqn_features,
|
|
'timestamp': cob_snapshot.timestamp,
|
|
'type': 'cob_state'
|
|
})
|
|
except Exception as e:
|
|
logger.warning(f"Error in DQN callback: {e}")
|
|
|
|
# Generate dashboard data
|
|
dashboard_data = self._generate_dashboard_data(symbol, cob_snapshot)
|
|
for callback in self.dashboard_callbacks:
|
|
try:
|
|
if asyncio.iscoroutinefunction(callback):
|
|
asyncio.create_task(callback(symbol, dashboard_data))
|
|
else:
|
|
callback(symbol, dashboard_data)
|
|
except Exception as e:
|
|
logger.warning(f"Error in dashboard callback: {e}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error processing COB update for {symbol}: {e}")
|
|
|
|
async def _on_bucket_update(self, symbol: str, price_buckets: Dict):
|
|
"""Handle price bucket update from provider"""
|
|
try:
|
|
# Analyze bucket distribution and generate alerts
|
|
await self._analyze_bucket_distribution(symbol, price_buckets)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error processing bucket update for {symbol}: {e}")
|
|
|
|
def _generate_cnn_features(self, symbol: str, cob_snapshot: COBSnapshot) -> Optional[np.ndarray]:
|
|
"""Generate CNN input features from COB data"""
|
|
try:
|
|
features = []
|
|
|
|
# Order book depth features (200 features: 20 levels x 5 features x 2 sides)
|
|
max_levels = 20
|
|
|
|
# Process bids
|
|
for i in range(max_levels):
|
|
if i < len(cob_snapshot.consolidated_bids):
|
|
level = cob_snapshot.consolidated_bids[i]
|
|
price_offset = (level.price - cob_snapshot.volume_weighted_mid) / cob_snapshot.volume_weighted_mid
|
|
features.extend([
|
|
price_offset,
|
|
level.total_volume_usd / 1000000, # Normalize to millions
|
|
level.total_size / 1000, # Normalize to thousands
|
|
len(level.exchange_breakdown),
|
|
level.liquidity_score
|
|
])
|
|
else:
|
|
features.extend([0.0, 0.0, 0.0, 0.0, 0.0])
|
|
|
|
# Process asks
|
|
for i in range(max_levels):
|
|
if i < len(cob_snapshot.consolidated_asks):
|
|
level = cob_snapshot.consolidated_asks[i]
|
|
price_offset = (level.price - cob_snapshot.volume_weighted_mid) / cob_snapshot.volume_weighted_mid
|
|
features.extend([
|
|
price_offset,
|
|
level.total_volume_usd / 1000000,
|
|
level.total_size / 1000,
|
|
len(level.exchange_breakdown),
|
|
level.liquidity_score
|
|
])
|
|
else:
|
|
features.extend([0.0, 0.0, 0.0, 0.0, 0.0])
|
|
|
|
# Market microstructure features (20 features)
|
|
features.extend([
|
|
cob_snapshot.spread_bps / 100, # Normalize spread
|
|
cob_snapshot.liquidity_imbalance,
|
|
cob_snapshot.total_bid_liquidity / 1000000,
|
|
cob_snapshot.total_ask_liquidity / 1000000,
|
|
len(cob_snapshot.exchanges_active) / 5, # Normalize to max 5 exchanges
|
|
cob_snapshot.volume_weighted_mid / 100000, # Normalize price
|
|
|
|
# Exchange diversity metrics
|
|
self._calculate_exchange_diversity(cob_snapshot.consolidated_bids),
|
|
self._calculate_exchange_diversity(cob_snapshot.consolidated_asks),
|
|
|
|
# Price bucket concentration
|
|
self._calculate_bucket_concentration(cob_snapshot.price_buckets, 'bids'),
|
|
self._calculate_bucket_concentration(cob_snapshot.price_buckets, 'asks'),
|
|
|
|
# Liquidity depth metrics
|
|
self._calculate_liquidity_depth_ratio(cob_snapshot.consolidated_bids, 5),
|
|
self._calculate_liquidity_depth_ratio(cob_snapshot.consolidated_asks, 5),
|
|
|
|
# Time-based features
|
|
cob_snapshot.timestamp.hour / 24,
|
|
cob_snapshot.timestamp.minute / 60,
|
|
cob_snapshot.timestamp.weekday() / 7,
|
|
|
|
# Additional features
|
|
0.0, 0.0, 0.0, 0.0, 0.0
|
|
])
|
|
|
|
return np.array(features, dtype=np.float32)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error generating CNN features for {symbol}: {e}")
|
|
return None
|
|
|
|
def _generate_dqn_features(self, symbol: str, cob_snapshot: COBSnapshot) -> Optional[np.ndarray]:
|
|
"""Generate DQN state features from COB data"""
|
|
try:
|
|
state_features = []
|
|
|
|
# Normalized order book state (20 features)
|
|
total_liquidity = cob_snapshot.total_bid_liquidity + cob_snapshot.total_ask_liquidity
|
|
|
|
if total_liquidity > 0:
|
|
# Top 10 bid levels (normalized by total liquidity)
|
|
for i in range(10):
|
|
if i < len(cob_snapshot.consolidated_bids):
|
|
level = cob_snapshot.consolidated_bids[i]
|
|
state_features.append(level.total_volume_usd / total_liquidity)
|
|
else:
|
|
state_features.append(0.0)
|
|
|
|
# Top 10 ask levels (normalized by total liquidity)
|
|
for i in range(10):
|
|
if i < len(cob_snapshot.consolidated_asks):
|
|
level = cob_snapshot.consolidated_asks[i]
|
|
state_features.append(level.total_volume_usd / total_liquidity)
|
|
else:
|
|
state_features.append(0.0)
|
|
else:
|
|
state_features.extend([0.0] * 20)
|
|
|
|
# Market state indicators (10 features)
|
|
state_features.extend([
|
|
cob_snapshot.spread_bps / 1000, # Normalized spread
|
|
cob_snapshot.liquidity_imbalance,
|
|
len(cob_snapshot.exchanges_active) / 5, # Exchange count ratio
|
|
min(1.0, total_liquidity / 10000000), # Liquidity abundance
|
|
0.5, # Price efficiency placeholder
|
|
min(1.0, total_liquidity / 5000000), # Market impact resistance
|
|
0.0, # Arbitrage score placeholder
|
|
0.0, # Liquidity fragmentation placeholder
|
|
(datetime.now().hour * 60 + datetime.now().minute) / 1440, # Time of day
|
|
0.5 # Market regime indicator placeholder
|
|
])
|
|
|
|
return np.array(state_features, dtype=np.float32)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error generating DQN features for {symbol}: {e}")
|
|
return None
|
|
|
|
def _generate_dashboard_data(self, symbol: str, cob_snapshot: COBSnapshot) -> Dict:
|
|
"""Generate formatted data for dashboard visualization"""
|
|
try:
|
|
# Get fixed bucket size for the symbol
|
|
bucket_size = 1.0 # Default bucket size
|
|
if self.cob_provider:
|
|
bucket_size = self.cob_provider.fixed_usd_buckets.get(symbol, 1.0)
|
|
|
|
# Calculate price range for buckets
|
|
mid_price = cob_snapshot.volume_weighted_mid
|
|
price_range = 100 # Show 100 price levels on each side
|
|
|
|
# Initialize bucket arrays
|
|
bid_buckets = defaultdict(float)
|
|
ask_buckets = defaultdict(float)
|
|
|
|
# Process bids into fixed USD buckets
|
|
for bid in cob_snapshot.consolidated_bids:
|
|
bucket_price = math.floor(bid.price / bucket_size) * bucket_size
|
|
bid_buckets[bucket_price] += bid.total_volume_usd
|
|
|
|
# Process asks into fixed USD buckets
|
|
for ask in cob_snapshot.consolidated_asks:
|
|
bucket_price = math.floor(ask.price / bucket_size) * bucket_size
|
|
ask_buckets[bucket_price] += ask.total_volume_usd
|
|
|
|
# Convert directly from consolidated order book levels
|
|
bid_data = []
|
|
ask_data = []
|
|
|
|
# Use actual order book data instead of bucketed data for better precision
|
|
for i, bid in enumerate(cob_snapshot.consolidated_bids[:100]): # Increased from 25 to 100 bid levels
|
|
bid_data.append({
|
|
'price': bid.price,
|
|
'volume': bid.total_volume_usd,
|
|
'side': 'bid'
|
|
})
|
|
|
|
for i, ask in enumerate(cob_snapshot.consolidated_asks[:100]): # Increased from 25 to 100 ask levels
|
|
ask_data.append({
|
|
'price': ask.price,
|
|
'volume': ask.total_volume_usd,
|
|
'side': 'ask'
|
|
})
|
|
|
|
logger.debug(f"Dashboard data for {symbol}: {len(bid_data)} bids, {len(ask_data)} asks")
|
|
logger.debug(f"Top bid: ${bid_data[0]['price']:.2f} (${bid_data[0]['volume']:,.0f})" if bid_data else "No bids")
|
|
logger.debug(f"Top ask: ${ask_data[0]['price']:.2f} (${ask_data[0]['volume']:,.0f})" if ask_data else "No asks")
|
|
|
|
# Get actual Session Volume Profile (SVP) from trade data
|
|
svp_data = []
|
|
if self.cob_provider:
|
|
try:
|
|
svp_result = self.cob_provider.get_session_volume_profile(symbol, bucket_size)
|
|
if svp_result and 'data' in svp_result:
|
|
svp_data = svp_result['data']
|
|
logger.debug(f"Retrieved SVP data for {symbol}: {len(svp_data)} price levels")
|
|
else:
|
|
logger.warning(f"No SVP data available for {symbol}")
|
|
except Exception as e:
|
|
logger.error(f"Error getting SVP data for {symbol}: {e}")
|
|
|
|
# Generate market stats
|
|
stats = {
|
|
'symbol': symbol,
|
|
'timestamp': cob_snapshot.timestamp.isoformat(),
|
|
'mid_price': cob_snapshot.volume_weighted_mid,
|
|
'spread_bps': cob_snapshot.spread_bps,
|
|
'bid_liquidity': cob_snapshot.total_bid_liquidity,
|
|
'ask_liquidity': cob_snapshot.total_ask_liquidity,
|
|
'total_bid_liquidity': cob_snapshot.total_bid_liquidity,
|
|
'total_ask_liquidity': cob_snapshot.total_ask_liquidity,
|
|
'imbalance': cob_snapshot.liquidity_imbalance,
|
|
'liquidity_imbalance': cob_snapshot.liquidity_imbalance,
|
|
'bid_levels': len(bid_data),
|
|
'ask_levels': len(ask_data),
|
|
'exchanges_active': cob_snapshot.exchanges_active,
|
|
'bucket_size': bucket_size
|
|
}
|
|
|
|
# Add exchange diversity metrics
|
|
stats['bid_exchange_diversity'] = self._calculate_exchange_diversity(cob_snapshot.consolidated_bids[:20])
|
|
stats['ask_exchange_diversity'] = self._calculate_exchange_diversity(cob_snapshot.consolidated_asks[:20])
|
|
|
|
# Add SVP statistics
|
|
if svp_data:
|
|
total_traded_volume = sum(item['total_volume'] for item in svp_data)
|
|
stats['total_traded_volume'] = total_traded_volume
|
|
stats['svp_price_levels'] = len(svp_data)
|
|
stats['session_start'] = svp_result.get('session_start', '')
|
|
else:
|
|
stats['total_traded_volume'] = 0
|
|
stats['svp_price_levels'] = 0
|
|
stats['session_start'] = ''
|
|
|
|
# Get additional real-time stats
|
|
realtime_stats = {}
|
|
if self.cob_provider:
|
|
try:
|
|
realtime_stats = self.cob_provider.get_realtime_stats(symbol)
|
|
if realtime_stats:
|
|
stats['realtime_1s'] = realtime_stats.get('1s_stats', {})
|
|
stats['realtime_5s'] = realtime_stats.get('5s_stats', {})
|
|
else:
|
|
stats['realtime_1s'] = {}
|
|
stats['realtime_5s'] = {}
|
|
except Exception as e:
|
|
logger.error(f"Error getting real-time stats for {symbol}: {e}")
|
|
stats['realtime_1s'] = {}
|
|
stats['realtime_5s'] = {}
|
|
|
|
return {
|
|
'type': 'cob_update',
|
|
'data': {
|
|
'bids': bid_data,
|
|
'asks': ask_data,
|
|
'svp': svp_data,
|
|
'stats': stats
|
|
}
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error generating dashboard data for {symbol}: {e}")
|
|
return {
|
|
'type': 'error',
|
|
'data': {'error': str(e)}
|
|
}
|
|
|
|
def _calculate_exchange_diversity(self, levels: List[ConsolidatedOrderBookLevel]) -> float:
|
|
"""Calculate exchange diversity in order book levels"""
|
|
if not levels:
|
|
return 0.0
|
|
|
|
exchange_counts = {}
|
|
total_volume = 0
|
|
|
|
for level in levels[:10]: # Top 10 levels
|
|
total_volume += level.total_volume_usd
|
|
for exchange in level.exchange_breakdown:
|
|
exchange_counts[exchange] = exchange_counts.get(exchange, 0) + level.exchange_breakdown[exchange].volume_usd
|
|
|
|
if total_volume == 0:
|
|
return 0.0
|
|
|
|
# Calculate diversity score
|
|
hhi = sum((volume / total_volume) ** 2 for volume in exchange_counts.values())
|
|
return 1 - hhi
|
|
|
|
def _calculate_bucket_concentration(self, price_buckets: Dict, side: str) -> float:
|
|
"""Calculate concentration of liquidity in price buckets"""
|
|
buckets = price_buckets.get(side, {})
|
|
if not buckets:
|
|
return 0.0
|
|
|
|
volumes = [bucket['volume_usd'] for bucket in buckets.values()]
|
|
total_volume = sum(volumes)
|
|
|
|
if total_volume == 0:
|
|
return 0.0
|
|
|
|
sorted_volumes = sorted(volumes, reverse=True)
|
|
top_20_percent = int(len(sorted_volumes) * 0.2) or 1
|
|
return sum(sorted_volumes[:top_20_percent]) / total_volume
|
|
|
|
def _calculate_liquidity_depth_ratio(self, levels: List[ConsolidatedOrderBookLevel], top_n: int) -> float:
|
|
"""Calculate ratio of top N levels liquidity to total"""
|
|
if not levels:
|
|
return 0.0
|
|
|
|
top_n_volume = sum(level.total_volume_usd for level in levels[:top_n])
|
|
total_volume = sum(level.total_volume_usd for level in levels)
|
|
|
|
return top_n_volume / total_volume if total_volume > 0 else 0.0
|
|
|
|
async def _continuous_cob_analysis(self):
|
|
"""Continuously analyze COB data for patterns and signals"""
|
|
while True:
|
|
try:
|
|
for symbol in self.symbols:
|
|
if self.cob_provider:
|
|
cob_snapshot = self.cob_provider.get_consolidated_orderbook(symbol)
|
|
if cob_snapshot:
|
|
await self._analyze_cob_patterns(symbol, cob_snapshot)
|
|
|
|
await asyncio.sleep(1)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in COB analysis loop: {e}")
|
|
await asyncio.sleep(5)
|
|
|
|
async def _analyze_cob_patterns(self, symbol: str, cob_snapshot: COBSnapshot):
|
|
"""Analyze COB data for trading patterns and signals"""
|
|
try:
|
|
# Enhanced liquidity imbalance detection with dynamic thresholds
|
|
imbalance = abs(cob_snapshot.liquidity_imbalance)
|
|
|
|
# Dynamic threshold based on imbalance strength
|
|
if imbalance > 0.8: # Very strong imbalance (>80%)
|
|
threshold = 0.05 # 5% threshold for very strong signals
|
|
confidence_multiplier = 3.0
|
|
elif imbalance > 0.5: # Strong imbalance (>50%)
|
|
threshold = 0.1 # 10% threshold for strong signals
|
|
confidence_multiplier = 2.5
|
|
elif imbalance > 0.3: # Moderate imbalance (>30%)
|
|
threshold = 0.15 # 15% threshold for moderate signals
|
|
confidence_multiplier = 2.0
|
|
else: # Weak imbalance
|
|
threshold = 0.2 # 20% threshold for weak signals
|
|
confidence_multiplier = 1.5
|
|
|
|
# Generate signal if imbalance exceeds threshold
|
|
if abs(cob_snapshot.liquidity_imbalance) > threshold:
|
|
signal = {
|
|
'timestamp': cob_snapshot.timestamp.isoformat(),
|
|
'type': 'liquidity_imbalance',
|
|
'side': 'buy' if cob_snapshot.liquidity_imbalance > 0 else 'sell',
|
|
'strength': abs(cob_snapshot.liquidity_imbalance),
|
|
'confidence': min(1.0, abs(cob_snapshot.liquidity_imbalance) * confidence_multiplier),
|
|
'threshold_used': threshold,
|
|
'signal_strength': 'very_strong' if imbalance > 0.8 else 'strong' if imbalance > 0.5 else 'moderate' if imbalance > 0.3 else 'weak'
|
|
}
|
|
self.cob_signals[symbol].append(signal)
|
|
logger.info(f"COB SIGNAL: {symbol} {signal['side'].upper()} signal generated - imbalance: {cob_snapshot.liquidity_imbalance:.3f}, confidence: {signal['confidence']:.3f}")
|
|
|
|
# Cleanup old signals
|
|
self.cob_signals[symbol] = self.cob_signals[symbol][-100:]
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error analyzing COB patterns for {symbol}: {e}")
|
|
|
|
async def _analyze_bucket_distribution(self, symbol: str, price_buckets: Dict):
|
|
"""Analyze price bucket distribution for patterns"""
|
|
try:
|
|
# Placeholder for bucket analysis
|
|
pass
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error analyzing bucket distribution for {symbol}: {e}")
|
|
|
|
async def _continuous_signal_generation(self):
|
|
"""Continuously generate trading signals based on COB analysis"""
|
|
while True:
|
|
try:
|
|
await asyncio.sleep(5)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in signal generation loop: {e}")
|
|
await asyncio.sleep(10)
|
|
|
|
# Public interface methods
|
|
|
|
def get_cob_features(self, symbol: str) -> Optional[np.ndarray]:
|
|
"""Get latest CNN features for a symbol"""
|
|
return self.cob_feature_cache.get(symbol)
|
|
|
|
def get_cob_snapshot(self, symbol: str) -> Optional[COBSnapshot]:
|
|
"""Get latest COB snapshot for a symbol"""
|
|
if not self.cob_provider:
|
|
return None
|
|
return self.cob_provider.get_consolidated_orderbook(symbol)
|
|
|
|
def get_market_depth_analysis(self, symbol: str) -> Optional[Dict]:
|
|
"""Get detailed market depth analysis"""
|
|
if not self.cob_provider:
|
|
return None
|
|
return self.cob_provider.get_market_depth_analysis(symbol)
|
|
|
|
def get_exchange_breakdown(self, symbol: str) -> Optional[Dict]:
|
|
"""Get liquidity breakdown by exchange"""
|
|
if not self.cob_provider:
|
|
return None
|
|
return self.cob_provider.get_exchange_breakdown(symbol)
|
|
|
|
def get_price_buckets(self, symbol: str) -> Optional[Dict]:
|
|
"""Get fine-grain price buckets"""
|
|
if not self.cob_provider:
|
|
return None
|
|
return self.cob_provider.get_price_buckets(symbol)
|
|
|
|
def get_recent_signals(self, symbol: str, count: int = 20) -> List[Dict]:
|
|
"""Get recent COB-based trading signals"""
|
|
return self.cob_signals.get(symbol, [])[-count:]
|
|
|
|
def get_statistics(self) -> Dict[str, Any]:
|
|
"""Get COB integration statistics"""
|
|
if not self.cob_provider:
|
|
return {
|
|
'cnn_callbacks': len(self.cnn_callbacks),
|
|
'dqn_callbacks': len(self.dqn_callbacks),
|
|
'dashboard_callbacks': len(self.dashboard_callbacks),
|
|
'cached_features': list(self.cob_feature_cache.keys()),
|
|
'total_signals': {symbol: len(signals) for symbol, signals in self.cob_signals.items()},
|
|
'provider_status': 'Not initialized'
|
|
}
|
|
|
|
provider_stats = self.cob_provider.get_statistics()
|
|
|
|
return {
|
|
**provider_stats,
|
|
'cnn_callbacks': len(self.cnn_callbacks),
|
|
'dqn_callbacks': len(self.dqn_callbacks),
|
|
'dashboard_callbacks': len(self.dashboard_callbacks),
|
|
'cached_features': list(self.cob_feature_cache.keys()),
|
|
'total_signals': {symbol: len(signals) for symbol, signals in self.cob_signals.items()}
|
|
}
|
|
|
|
def get_realtime_stats_for_nn(self, symbol: str) -> Dict:
|
|
"""Get real-time statistics formatted for NN models"""
|
|
try:
|
|
# Check if COB provider is initialized
|
|
if not self.cob_provider:
|
|
logger.debug(f"COB provider not initialized yet for {symbol}")
|
|
return {}
|
|
|
|
realtime_stats = self.cob_provider.get_realtime_stats(symbol)
|
|
if not realtime_stats:
|
|
return {}
|
|
|
|
# Format for NN consumption
|
|
nn_stats = {
|
|
'symbol': symbol,
|
|
'timestamp': datetime.now().isoformat(),
|
|
'current': {
|
|
'mid_price': 0.0,
|
|
'spread_bps': 0.0,
|
|
'bid_liquidity': 0.0,
|
|
'ask_liquidity': 0.0,
|
|
'imbalance': 0.0
|
|
},
|
|
'1s_window': realtime_stats.get('1s_stats', {}),
|
|
'5s_window': realtime_stats.get('5s_stats', {})
|
|
}
|
|
|
|
# Get current values from latest COB snapshot
|
|
cob_snapshot = self.cob_provider.get_consolidated_orderbook(symbol)
|
|
if cob_snapshot:
|
|
nn_stats['current'] = {
|
|
'mid_price': cob_snapshot.volume_weighted_mid,
|
|
'spread_bps': cob_snapshot.spread_bps,
|
|
'bid_liquidity': cob_snapshot.total_bid_liquidity,
|
|
'ask_liquidity': cob_snapshot.total_ask_liquidity,
|
|
'imbalance': cob_snapshot.liquidity_imbalance
|
|
}
|
|
|
|
return nn_stats
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting NN stats for {symbol}: {e}")
|
|
return {}
|
|
|
|
def get_realtime_stats(self):
|
|
# Added null check to ensure the COB provider is initialized
|
|
if self.cob_provider is None:
|
|
logger.warning("COB provider is uninitialized; attempting initialization.")
|
|
self.initialize_provider()
|
|
if self.cob_provider is None:
|
|
logger.error("COB provider failed to initialize; returning default empty snapshot.")
|
|
return COBSnapshot(
|
|
symbol="",
|
|
timestamp=0,
|
|
exchanges_active=0,
|
|
total_bid_liquidity=0,
|
|
total_ask_liquidity=0,
|
|
price_buckets=[],
|
|
volume_weighted_mid=0,
|
|
spread_bps=0,
|
|
liquidity_imbalance=0,
|
|
consolidated_bids=[],
|
|
consolidated_asks=[]
|
|
)
|
|
try:
|
|
snapshot = self.cob_provider.get_realtime_stats()
|
|
return snapshot
|
|
except Exception as e:
|
|
logger.error(f"Error retrieving COB snapshot: {e}")
|
|
return COBSnapshot(
|
|
symbol="",
|
|
timestamp=0,
|
|
exchanges_active=0,
|
|
total_bid_liquidity=0,
|
|
total_ask_liquidity=0,
|
|
price_buckets=[],
|
|
volume_weighted_mid=0,
|
|
spread_bps=0,
|
|
liquidity_imbalance=0,
|
|
consolidated_bids=[],
|
|
consolidated_asks=[]
|
|
)
|
|
|
|
def stop_streaming(self):
|
|
pass
|
|
|
|
def _initialize_cob_integration(self):
|
|
"""Initialize COB integration with high-frequency data handling"""
|
|
logger.info("Initializing COB integration...")
|
|
if not COB_INTEGRATION_AVAILABLE:
|
|
logger.warning("COB integration not available - skipping initialization")
|
|
return
|
|
|
|
try:
|
|
if not hasattr(self.orchestrator, 'cob_integration') or self.orchestrator.cob_integration is None:
|
|
logger.info("Creating new COB integration instance")
|
|
self.orchestrator.cob_integration = COBIntegration(self.data_provider)
|
|
else:
|
|
logger.info("Using existing COB integration from orchestrator")
|
|
|
|
# Start simple COB data collection for both symbols
|
|
self._start_simple_cob_collection()
|
|
logger.info("COB integration initialized successfully")
|
|
except Exception as e:
|
|
logger.error(f"Error initializing COB integration: {e}") |