working with errors

This commit is contained in:
Dobromir Popov
2025-07-20 01:52:36 +03:00
parent 92919cb1ef
commit 469269e809
7 changed files with 1237 additions and 149 deletions

View File

@ -18,6 +18,15 @@ This ensures consistent data across all models and components.
Uses layout and component managers to reduce file size and improve maintainability
"""
# Force matplotlib to use non-interactive backend before any imports
import os
os.environ['MPLBACKEND'] = 'Agg'
# Set matplotlib configuration
import matplotlib
matplotlib.use('Agg') # Use non-interactive Agg backend
matplotlib.interactive(False) # Disable interactive mode
import dash
from dash import Dash, dcc, html, Input, Output, State
import plotly.graph_objects as go
@ -33,6 +42,7 @@ import threading
from typing import Dict, List, Optional, Any, Union
import os
import asyncio
import sys # Import sys for global exception handler
import dash_bootstrap_components as dbc
from dash.exceptions import PreventUpdate
from collections import deque
@ -236,7 +246,7 @@ class CleanTradingDashboard:
logger.debug("Clean Trading Dashboard initialized with HIGH-FREQUENCY COB integration and signal generation")
logger.info("🌙 Overnight Training Coordinator ready - call start_overnight_training() to begin")
def start_overnight_training(self):
"""Start the overnight training session"""
try:
@ -411,6 +421,19 @@ class CleanTradingDashboard:
logger.error(f"Error getting model status: {e}")
return {'loaded_models': {}, 'total_models': 0, 'system_status': 'ERROR'}
def _safe_strftime(self, timestamp_val, format_str='%H:%M:%S'):
"""Safely format timestamp, handling both string and datetime objects"""
try:
if isinstance(timestamp_val, str):
return timestamp_val
elif hasattr(timestamp_val, 'strftime'):
return timestamp_val.strftime(format_str)
else:
return datetime.now().strftime(format_str)
except Exception as e:
logger.debug(f"Error formatting timestamp {timestamp_val}: {e}")
return datetime.now().strftime(format_str)
def _get_initial_balance(self) -> float:
"""Get initial balance from trading executor or default"""
try:
@ -616,7 +639,18 @@ class CleanTradingDashboard:
# Only show signals that are significantly different or from different time periods
signal_key = f"{action}_{int(price)}_{int(confidence*100)}"
time_key = int(timestamp.timestamp() // 30) # Group by 30-second intervals
# Handle timestamp safely - could be string or datetime
if isinstance(timestamp, str):
try:
# Try to parse string timestamp
timestamp_dt = datetime.strptime(timestamp, '%H:%M:%S')
time_key = int(timestamp_dt.timestamp() // 30)
except:
time_key = int(datetime.now().timestamp() // 30)
elif hasattr(timestamp, 'timestamp'):
time_key = int(timestamp.timestamp() // 30)
else:
time_key = int(datetime.now().timestamp() // 30)
full_key = f"{signal_key}_{time_key}"
if full_key not in seen_signals:
@ -709,6 +743,9 @@ class CleanTradingDashboard:
btc_data_time = self.cob_last_update.get('BTC/USDT', 0) if hasattr(self, 'cob_last_update') else 0
import time
current_time = time.time()
# Ensure data times are not None
eth_data_time = eth_data_time or 0
btc_data_time = btc_data_time or 0
logger.info(f"COB Data Age: ETH: {current_time - eth_data_time:.1f}s, BTC: {current_time - btc_data_time:.1f}s")
eth_imbalance_stats = self._calculate_cumulative_imbalance('ETH/USDT')
@ -717,6 +754,20 @@ class CleanTradingDashboard:
# Determine COB data source mode
cob_mode = self._get_cob_mode()
# Debug: Log snapshot types only when needed (every 1000 intervals)
if n % 1000 == 0:
logger.debug(f"DEBUG: ETH snapshot type: {type(eth_snapshot)}, BTC snapshot type: {type(btc_snapshot)}")
if isinstance(eth_snapshot, list):
logger.debug(f"ETH snapshot is a list with {len(eth_snapshot)} items: {eth_snapshot[:2] if eth_snapshot else 'empty'}")
if isinstance(btc_snapshot, list):
logger.error(f"BTC snapshot is a list with {len(btc_snapshot)} items: {btc_snapshot[:2] if btc_snapshot else 'empty'}")
# If we get a list, don't pass it to the formatter - create a proper object or return None
if isinstance(eth_snapshot, list):
eth_snapshot = None
if isinstance(btc_snapshot, list):
btc_snapshot = None
eth_components = self.component_manager.format_cob_data(eth_snapshot, 'ETH/USDT', eth_imbalance_stats, cob_mode)
btc_components = self.component_manager.format_cob_data(btc_snapshot, 'BTC/USDT', btc_imbalance_stats, cob_mode)
@ -2252,16 +2303,73 @@ class CleanTradingDashboard:
return {'error': str(e), 'cob_status': 'Error Getting Status', 'orchestrator_type': 'Unknown'}
def _get_cob_snapshot(self, symbol: str) -> Optional[Any]:
"""Get COB snapshot for symbol - PERFORMANCE OPTIMIZED: Use orchestrator's COB integration"""
"""Get COB snapshot for symbol - CENTRALIZED: Use data provider's COB data"""
try:
# PERFORMANCE FIX: Use orchestrator's COB integration instead of separate dashboard integration
# This eliminates redundant COB providers and improves performance
# Priority 1: Use data provider's centralized COB data (primary source)
if self.data_provider:
try:
cob_data = self.data_provider.get_latest_cob_data(symbol)
logger.debug(f"COB data type for {symbol}: {type(cob_data)}, data: {cob_data}")
if cob_data and isinstance(cob_data, dict) and 'stats' in cob_data:
logger.debug(f"COB snapshot available for {symbol} from centralized data provider")
# Create a snapshot object from the data provider's data
class COBSnapshot:
def __init__(self, data):
# Convert list format [[price, qty], ...] to dictionary format
raw_bids = data.get('bids', [])
raw_asks = data.get('asks', [])
# Convert to dictionary format expected by component manager
self.consolidated_bids = []
for bid in raw_bids:
if isinstance(bid, list) and len(bid) >= 2:
self.consolidated_bids.append({
'price': bid[0],
'size': bid[1],
'total_size': bid[1],
'total_volume_usd': bid[0] * bid[1]
})
self.consolidated_asks = []
for ask in raw_asks:
if isinstance(ask, list) and len(ask) >= 2:
self.consolidated_asks.append({
'price': ask[0],
'size': ask[1],
'total_size': ask[1],
'total_volume_usd': ask[0] * ask[1]
})
self.stats = data.get('stats', {})
# Add direct attributes for new format compatibility
self.volume_weighted_mid = self.stats.get('mid_price', 0)
self.spread_bps = self.stats.get('spread_bps', 0)
self.liquidity_imbalance = self.stats.get('imbalance', 0)
self.total_bid_liquidity = self.stats.get('bid_liquidity', 0)
self.total_ask_liquidity = self.stats.get('ask_liquidity', 0)
self.exchanges_active = ['Binance'] # Default for now
return COBSnapshot(cob_data)
else:
logger.warning(f"Invalid COB data for {symbol}: type={type(cob_data)}, has_stats={'stats' in cob_data if isinstance(cob_data, dict) else False}")
except Exception as e:
logger.error(f"Error getting COB data from data provider: {e}")
# Priority 2: Use orchestrator's COB integration (secondary source)
if hasattr(self.orchestrator, 'cob_integration') and self.orchestrator.cob_integration:
# First try to get snapshot from orchestrator's COB integration
# Try to get snapshot from orchestrator's COB integration
snapshot = self.orchestrator.cob_integration.get_cob_snapshot(symbol)
if snapshot:
logger.debug(f"COB snapshot available for {symbol} from orchestrator COB integration")
return snapshot
logger.debug(f"COB snapshot available for {symbol} from orchestrator COB integration, type: {type(snapshot)}")
# Check if it's a list (which would cause the error)
if isinstance(snapshot, list):
logger.warning(f"Orchestrator returned list instead of COB snapshot for {symbol}")
# Don't return the list, continue to other sources
else:
return snapshot
# If no snapshot, try to get from orchestrator's cached data
if hasattr(self.orchestrator, 'latest_cob_data') and symbol in self.orchestrator.latest_cob_data:
@ -2277,7 +2385,7 @@ class CleanTradingDashboard:
return COBSnapshot(cob_data)
# Fallback: Use cached COB data if orchestrator integration not available
# Priority 3: Use dashboard's cached COB data (last resort fallback)
if symbol in self.latest_cob_data and self.latest_cob_data[symbol]:
cob_data = self.latest_cob_data[symbol]
logger.debug(f"COB snapshot available for {symbol} from dashboard cached data (fallback)")
@ -2298,7 +2406,7 @@ class CleanTradingDashboard:
return COBSnapshot(cob_data)
logger.debug(f"No COB snapshot available for {symbol} - no orchestrator integration or cached data")
logger.debug(f"No COB snapshot available for {symbol} - no data provider, orchestrator integration, or cached data")
return None
except Exception as e:
@ -2458,7 +2566,13 @@ class CleanTradingDashboard:
if dqn_latest:
last_action = dqn_latest.get('action', 'NONE')
last_confidence = dqn_latest.get('confidence', 0.72)
last_timestamp = dqn_latest.get('timestamp', datetime.now()).strftime('%H:%M:%S')
timestamp_val = dqn_latest.get('timestamp', datetime.now())
if isinstance(timestamp_val, str):
last_timestamp = timestamp_val
elif hasattr(timestamp_val, 'strftime'):
last_timestamp = timestamp_val.strftime('%H:%M:%S')
else:
last_timestamp = datetime.now().strftime('%H:%M:%S')
else:
if signal_generation_active and len(self.recent_decisions) > 0:
recent_signal = self.recent_decisions[-1]
@ -2531,7 +2645,13 @@ class CleanTradingDashboard:
if cnn_latest:
cnn_action = cnn_latest.get('action', 'PATTERN_ANALYSIS')
cnn_confidence = cnn_latest.get('confidence', 0.68)
cnn_timestamp = cnn_latest.get('timestamp', datetime.now()).strftime('%H:%M:%S')
timestamp_val = cnn_latest.get('timestamp', datetime.now())
if isinstance(timestamp_val, str):
cnn_timestamp = timestamp_val
elif hasattr(timestamp_val, 'strftime'):
cnn_timestamp = timestamp_val.strftime('%H:%M:%S')
else:
cnn_timestamp = datetime.now().strftime('%H:%M:%S')
cnn_predicted_price = cnn_latest.get('predicted_price', 0)
else:
cnn_action = 'PATTERN_ANALYSIS'
@ -2594,7 +2714,13 @@ class CleanTradingDashboard:
if transformer_latest:
transformer_action = transformer_latest.get('action', 'PRICE_PREDICTION')
transformer_confidence = transformer_latest.get('confidence', 0.75)
transformer_timestamp = transformer_latest.get('timestamp', datetime.now()).strftime('%H:%M:%S')
timestamp_val = transformer_latest.get('timestamp', datetime.now())
if isinstance(timestamp_val, str):
transformer_timestamp = timestamp_val
elif hasattr(timestamp_val, 'strftime'):
transformer_timestamp = timestamp_val.strftime('%H:%M:%S')
else:
transformer_timestamp = datetime.now().strftime('%H:%M:%S')
transformer_predicted_price = transformer_latest.get('predicted_price', 0)
transformer_price_change = transformer_latest.get('price_change', 0)
else:
@ -5159,11 +5285,11 @@ class CleanTradingDashboard:
self.position_sync_enabled = False
def _initialize_cob_integration(self):
"""Initialize COB integration using orchestrator's COB system"""
"""Initialize COB integration using centralized data provider"""
try:
logger.info("Initializing COB integration via orchestrator")
logger.info("Initializing COB integration via centralized data provider")
# Initialize COB data storage (for fallback)
# Initialize COB data storage (for dashboard display)
self.cob_data_history = {
'ETH/USDT': [],
'BTC/USDT': []
@ -5181,9 +5307,15 @@ class CleanTradingDashboard:
'BTC/USDT': None
}
# Check if orchestrator has COB integration
# Primary approach: Use the data provider's centralized COB collection
if self.data_provider:
logger.info("Using centralized data provider for COB data collection")
self._start_simple_cob_collection() # This now uses the data provider
# Secondary approach: If orchestrator has COB integration, use that as well
# This ensures we have multiple data sources for redundancy
if hasattr(self.orchestrator, 'cob_integration') and self.orchestrator.cob_integration:
logger.info("Using orchestrator's COB integration")
logger.info("Also using orchestrator's COB integration as secondary source")
# Start orchestrator's COB integration in background
def start_orchestrator_cob():
@ -5199,137 +5331,129 @@ class CleanTradingDashboard:
cob_thread = threading.Thread(target=start_orchestrator_cob, daemon=True)
cob_thread.start()
logger.info("Orchestrator COB integration started successfully")
else:
logger.warning("Orchestrator COB integration not available, using fallback simple collection")
# Fallback to simple collection
self._start_simple_cob_collection()
# ALWAYS start simple collection as backup even if orchestrator COB exists
# This ensures we have data flowing while orchestrator COB integration starts up
logger.info("Starting simple COB collection as backup/fallback")
self._start_simple_cob_collection()
logger.info("Orchestrator COB integration started as secondary source")
except Exception as e:
logger.error(f"Error initializing COB integration: {e}")
# Fallback to simple collection
self._start_simple_cob_collection()
# Last resort fallback
if self.data_provider:
logger.warning("Falling back to direct data provider COB collection")
self._start_simple_cob_collection()
def _start_simple_cob_collection(self):
"""Start simple COB data collection using REST APIs (no async required)"""
"""Start COB data collection using the centralized data provider"""
try:
import threading
import time
def cob_collector():
"""Collect COB data using simple REST API calls"""
while True:
# Use the data provider's COB collection instead of implementing our own
if self.data_provider:
# Start the centralized COB data collection in the data provider
self.data_provider.start_cob_collection()
# Subscribe to COB updates from the data provider
def cob_update_callback(symbol, cob_snapshot):
"""Callback for COB data updates from data provider"""
try:
# Collect data for both symbols
for symbol in ['ETH/USDT', 'BTC/USDT']:
self._collect_simple_cob_data(symbol)
# Store the latest COB data
if not hasattr(self, 'latest_cob_data'):
self.latest_cob_data = {}
# Sleep for 1 second between collections
time.sleep(1)
self.latest_cob_data[symbol] = cob_snapshot
# Update current price from COB data
if 'stats' in cob_snapshot and 'mid_price' in cob_snapshot['stats']:
self.current_prices[symbol] = cob_snapshot['stats']['mid_price']
except Exception as e:
logger.debug(f"Error in COB collection: {e}")
time.sleep(5) # Wait longer on error
# Start collector in background thread
cob_thread = threading.Thread(target=cob_collector, daemon=True)
cob_thread.start()
logger.info("Simple COB data collection started")
logger.debug(f"Error in COB update callback: {e}")
# Register for COB updates
self.data_provider.subscribe_to_cob(cob_update_callback)
logger.info("Centralized COB data collection started via data provider")
else:
logger.error("Cannot start COB collection - data provider not available")
except Exception as e:
logger.error(f"Error starting COB collection: {e}")
def _collect_simple_cob_data(self, symbol: str):
"""Collect simple COB data using Binance REST API"""
"""Get COB data from the centralized data provider"""
try:
import requests
import time
# Use Binance REST API for order book data
binance_symbol = symbol.replace('/', '')
url = f"https://api.binance.com/api/v3/depth?symbol={binance_symbol}&limit=500"
response = requests.get(url, timeout=5)
if response.status_code == 200:
data = response.json()
# Use the data provider to get COB data
if self.data_provider:
# Get the COB data from the data provider
cob_snapshot = self.data_provider.collect_cob_data(symbol)
# Process order book data
bids = []
asks = []
# Process bids (buy orders)
for bid in data['bids'][:100]: # Top 100 levels
price = float(bid[0])
size = float(bid[1])
bids.append({
'price': price,
'size': size,
'total': price * size
})
# Process asks (sell orders)
for ask in data['asks'][:100]: # Top 100 levels
price = float(ask[0])
size = float(ask[1])
asks.append({
'price': price,
'size': size,
'total': price * size
})
# Calculate statistics
if bids and asks:
best_bid = max(bids, key=lambda x: x['price'])
best_ask = min(asks, key=lambda x: x['price'])
mid_price = (best_bid['price'] + best_ask['price']) / 2
spread_bps = ((best_ask['price'] - best_bid['price']) / mid_price) * 10000 if mid_price > 0 else 0
if cob_snapshot and 'stats' in cob_snapshot:
# Process the COB data for dashboard display
total_bid_liquidity = sum(bid['total'] for bid in bids[:20])
total_ask_liquidity = sum(ask['total'] for ask in asks[:20])
total_liquidity = total_bid_liquidity + total_ask_liquidity
imbalance = (total_bid_liquidity - total_ask_liquidity) / total_liquidity if total_liquidity > 0 else 0
# Format the data for our dashboard
bids = []
asks = []
# Create COB snapshot
cob_snapshot = {
# Process bids
for bid_price, bid_size in cob_snapshot.get('bids', [])[:100]:
bids.append({
'price': bid_price,
'size': bid_size,
'total': bid_price * bid_size
})
# Process asks
for ask_price, ask_size in cob_snapshot.get('asks', [])[:100]:
asks.append({
'price': ask_price,
'size': ask_size,
'total': ask_price * ask_size
})
# Create dashboard-friendly COB snapshot
dashboard_cob_snapshot = {
'symbol': symbol,
'timestamp': time.time(),
'timestamp': cob_snapshot.get('timestamp', time.time()),
'bids': bids,
'asks': asks,
'stats': {
'mid_price': mid_price,
'spread_bps': spread_bps,
'total_bid_liquidity': total_bid_liquidity,
'total_ask_liquidity': total_ask_liquidity,
'imbalance': imbalance,
'mid_price': cob_snapshot['stats'].get('mid_price', 0),
'spread_bps': cob_snapshot['stats'].get('spread_bps', 0),
'total_bid_liquidity': cob_snapshot['stats'].get('bid_liquidity', 0),
'total_ask_liquidity': cob_snapshot['stats'].get('ask_liquidity', 0),
'imbalance': cob_snapshot['stats'].get('imbalance', 0),
'exchanges_active': ['Binance']
}
}
# Initialize history if needed
if not hasattr(self, 'cob_data_history'):
self.cob_data_history = {}
if symbol not in self.cob_data_history:
self.cob_data_history[symbol] = []
# Store in history (keep last 15 seconds)
self.cob_data_history[symbol].append(cob_snapshot)
self.cob_data_history[symbol].append(dashboard_cob_snapshot)
if len(self.cob_data_history[symbol]) > 15: # Keep 15 seconds
self.cob_data_history[symbol] = self.cob_data_history[symbol][-15:]
# Initialize latest data if needed
if not hasattr(self, 'latest_cob_data'):
self.latest_cob_data = {}
if not hasattr(self, 'cob_last_update'):
self.cob_last_update = {}
# Update latest data
self.latest_cob_data[symbol] = cob_snapshot
self.latest_cob_data[symbol] = dashboard_cob_snapshot
self.cob_last_update[symbol] = time.time()
# Generate bucketed data for models
self._generate_bucketed_cob_data(symbol, cob_snapshot)
self._generate_bucketed_cob_data(symbol, dashboard_cob_snapshot)
# Generate COB signals based on imbalance
self._generate_cob_signal(symbol, cob_snapshot)
self._generate_cob_signal(symbol, dashboard_cob_snapshot)
logger.debug(f"COB data collected for {symbol}: {len(bids)} bids, {len(asks)} asks")
logger.debug(f"COB data retrieved from data provider for {symbol}: {len(bids)} bids, {len(asks)} asks")
except Exception as e:
logger.debug(f"Error collecting COB data for {symbol}: {e}")
logger.debug(f"Error getting COB data for {symbol}: {e}")
def _generate_bucketed_cob_data(self, symbol: str, cob_snapshot: dict):
"""Generate bucketed COB data for model feeding"""