wip improve
This commit is contained in:
parent
9a44ddfa3c
commit
543b53883e
@ -188,7 +188,7 @@ class DataProvider:
|
|||||||
if self.cache_enabled:
|
if self.cache_enabled:
|
||||||
cached_data = self._load_from_cache(symbol, timeframe)
|
cached_data = self._load_from_cache(symbol, timeframe)
|
||||||
if cached_data is not None and len(cached_data) >= limit * 0.8:
|
if cached_data is not None and len(cached_data) >= limit * 0.8:
|
||||||
logger.info(f"Using cached data for {symbol} {timeframe}")
|
# logger.info(f"Using cached data for {symbol} {timeframe}")
|
||||||
return cached_data.tail(limit)
|
return cached_data.tail(limit)
|
||||||
|
|
||||||
# Check if we need to preload 300s of data for first load
|
# Check if we need to preload 300s of data for first load
|
||||||
|
400
web/dashboard.py
400
web/dashboard.py
@ -290,22 +290,20 @@ class TradingDashboard:
|
|||||||
self.adaptive_learner = AdaptiveThresholdLearner(initial_threshold=0.30)
|
self.adaptive_learner = AdaptiveThresholdLearner(initial_threshold=0.30)
|
||||||
logger.info("[ADAPTIVE] Adaptive threshold learning enabled - will adjust based on trade outcomes")
|
logger.info("[ADAPTIVE] Adaptive threshold learning enabled - will adjust based on trade outcomes")
|
||||||
|
|
||||||
# Real-time tick data infrastructure
|
# Lightweight WebSocket implementation for real-time scalping data
|
||||||
self.tick_cache = deque(maxlen=54000) # 15 minutes * 60 seconds * 60 ticks/second = 54000 ticks
|
self.ws_price_cache = {} # Just current prices, no tick history
|
||||||
self.one_second_bars = deque(maxlen=900) # 15 minutes of 1-second bars
|
|
||||||
self.current_second_data = {
|
|
||||||
'timestamp': None,
|
|
||||||
'open': None,
|
|
||||||
'high': None,
|
|
||||||
'low': None,
|
|
||||||
'close': None,
|
|
||||||
'volume': 0,
|
|
||||||
'tick_count': 0
|
|
||||||
}
|
|
||||||
self.ws_connection = None
|
self.ws_connection = None
|
||||||
self.ws_thread = None
|
self.ws_thread = None
|
||||||
self.is_streaming = False
|
self.is_streaming = False
|
||||||
|
|
||||||
|
# Performance-focused: only track essentials
|
||||||
|
self.last_ws_update = 0
|
||||||
|
self.ws_update_count = 0
|
||||||
|
|
||||||
|
# Compatibility stubs for removed tick infrastructure
|
||||||
|
self.tick_cache = [] # Empty list for compatibility
|
||||||
|
self.one_second_bars = [] # Empty list for compatibility
|
||||||
|
|
||||||
# Enhanced RL Training System - Train on closed trades with comprehensive data
|
# Enhanced RL Training System - Train on closed trades with comprehensive data
|
||||||
self.rl_training_enabled = True
|
self.rl_training_enabled = True
|
||||||
# Force enable Enhanced RL training (bypass import issues)
|
# Force enable Enhanced RL training (bypass import issues)
|
||||||
@ -467,9 +465,9 @@ class TradingDashboard:
|
|||||||
def _initialize_streaming(self):
|
def _initialize_streaming(self):
|
||||||
"""Initialize unified data streaming and WebSocket fallback"""
|
"""Initialize unified data streaming and WebSocket fallback"""
|
||||||
try:
|
try:
|
||||||
# Start WebSocket first (non-blocking)
|
# Start lightweight WebSocket for real-time price updates
|
||||||
self._start_websocket_stream()
|
self._start_lightweight_websocket()
|
||||||
logger.info("WebSocket streaming initialized")
|
logger.info("Lightweight WebSocket streaming initialized")
|
||||||
|
|
||||||
if ENHANCED_RL_AVAILABLE:
|
if ENHANCED_RL_AVAILABLE:
|
||||||
# Start unified data stream in background
|
# Start unified data stream in background
|
||||||
@ -490,8 +488,8 @@ class TradingDashboard:
|
|||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error initializing streaming: {e}")
|
logger.error(f"Error initializing streaming: {e}")
|
||||||
# Ensure WebSocket is started as fallback
|
# Ensure lightweight WebSocket is started as fallback
|
||||||
self._start_websocket_stream()
|
self._start_lightweight_websocket()
|
||||||
|
|
||||||
def _start_enhanced_training_data_collection(self):
|
def _start_enhanced_training_data_collection(self):
|
||||||
"""Start enhanced training data collection using unified stream"""
|
"""Start enhanced training data collection using unified stream"""
|
||||||
@ -990,7 +988,14 @@ class TradingDashboard:
|
|||||||
)
|
)
|
||||||
def update_dashboard(n_intervals):
|
def update_dashboard(n_intervals):
|
||||||
"""Update all dashboard components with trading signals"""
|
"""Update all dashboard components with trading signals"""
|
||||||
|
start_time = time.time() # Performance monitoring
|
||||||
try:
|
try:
|
||||||
|
# Periodic cleanup to prevent memory leaks
|
||||||
|
if n_intervals % 60 == 0: # Every 60 seconds
|
||||||
|
self._cleanup_old_data()
|
||||||
|
|
||||||
|
# Lightweight update every 10 intervals to reduce load
|
||||||
|
is_lightweight_update = (n_intervals % 10 != 0)
|
||||||
# Get current prices with improved fallback handling
|
# Get current prices with improved fallback handling
|
||||||
symbol = self.config.symbols[0] if self.config.symbols else "ETH/USDT"
|
symbol = self.config.symbols[0] if self.config.symbols else "ETH/USDT"
|
||||||
current_price = None
|
current_price = None
|
||||||
@ -998,12 +1003,11 @@ class TradingDashboard:
|
|||||||
data_source = "UNKNOWN"
|
data_source = "UNKNOWN"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# First try WebSocket current price (lowest latency)
|
# First try real-time WebSocket price (sub-second latency)
|
||||||
ws_symbol = symbol.replace('/', '') # Convert ETH/USDT to ETHUSDT for WebSocket
|
current_price = self.get_realtime_price(symbol)
|
||||||
if ws_symbol in self.current_prices and self.current_prices[ws_symbol] > 0:
|
if current_price:
|
||||||
current_price = self.current_prices[ws_symbol]
|
data_source = "WEBSOCKET_RT"
|
||||||
data_source = "WEBSOCKET"
|
logger.debug(f"[WS_RT] Using real-time WebSocket price for {symbol}: ${current_price:.2f}")
|
||||||
logger.debug(f"[WS_PRICE] Using WebSocket price for {symbol}: ${current_price:.2f}")
|
|
||||||
else:
|
else:
|
||||||
# Try cached data first (faster than API calls)
|
# Try cached data first (faster than API calls)
|
||||||
cached_data = self.data_provider.get_historical_data(symbol, '1m', limit=1, refresh=False)
|
cached_data = self.data_provider.get_historical_data(symbol, '1m', limit=1, refresh=False)
|
||||||
@ -1032,30 +1036,30 @@ class TradingDashboard:
|
|||||||
current_price = None
|
current_price = None
|
||||||
data_source = "ERROR"
|
data_source = "ERROR"
|
||||||
|
|
||||||
# Get chart data - ONLY REAL DATA
|
# Get chart data - ONLY REAL DATA (optimized for performance)
|
||||||
chart_data = None
|
chart_data = None
|
||||||
try:
|
try:
|
||||||
# First try WebSocket 1s bars
|
if not is_lightweight_update: # Only refresh charts every 10 seconds
|
||||||
chart_data = self.get_one_second_bars(count=50)
|
# Use cached data only (limited to 30 bars for performance)
|
||||||
if not chart_data.empty:
|
chart_data = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=False)
|
||||||
logger.debug(f"[CHART] Using WebSocket 1s bars: {len(chart_data)} bars")
|
|
||||||
else:
|
|
||||||
# Try cached data only
|
|
||||||
chart_data = self.data_provider.get_historical_data(symbol, '1m', limit=50, refresh=False)
|
|
||||||
if chart_data is not None and not chart_data.empty:
|
if chart_data is not None and not chart_data.empty:
|
||||||
logger.debug(f"[CHART] Using cached 1m data: {len(chart_data)} bars")
|
logger.debug(f"[CHART] Using cached 1m data: {len(chart_data)} bars")
|
||||||
else:
|
else:
|
||||||
# NO SYNTHETIC DATA - Wait for real data
|
# Wait for real data - no synthetic data
|
||||||
logger.warning("[CHART] No real chart data available - waiting for data provider")
|
logger.debug("[CHART] No chart data available - waiting for data provider")
|
||||||
chart_data = None
|
chart_data = None
|
||||||
|
else:
|
||||||
|
# Use cached chart data for lightweight updates
|
||||||
|
chart_data = getattr(self, '_cached_chart_data', None)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"[CHART_ERROR] Error getting chart data: {e}")
|
logger.warning(f"[CHART_ERROR] Error getting chart data: {e}")
|
||||||
chart_data = None
|
chart_data = None
|
||||||
|
|
||||||
# Generate trading signals based on model decisions - NO FREQUENCY LIMITS
|
# Generate trading signals based on model decisions - OPTIMIZED
|
||||||
try:
|
try:
|
||||||
if current_price and chart_data is not None and not chart_data.empty and len(chart_data) >= 5:
|
# Only generate signals every few intervals to reduce CPU load
|
||||||
# Model decides when to act - check every update for signals
|
if not is_lightweight_update and current_price and chart_data is not None and not chart_data.empty and len(chart_data) >= 5:
|
||||||
|
# Model decides when to act - check for signals but not every single second
|
||||||
signal = self._generate_trading_signal(symbol, current_price, chart_data)
|
signal = self._generate_trading_signal(symbol, current_price, chart_data)
|
||||||
if signal:
|
if signal:
|
||||||
# Add to signals list (all signals, regardless of execution)
|
# Add to signals list (all signals, regardless of execution)
|
||||||
@ -1181,12 +1185,20 @@ class TradingDashboard:
|
|||||||
else:
|
else:
|
||||||
mexc_status = "OFFLINE"
|
mexc_status = "OFFLINE"
|
||||||
|
|
||||||
# Create charts with error handling - NO SYNTHETIC DATA
|
# Create charts with error handling - OPTIMIZED
|
||||||
try:
|
try:
|
||||||
|
if not is_lightweight_update: # Only recreate chart every 10 seconds
|
||||||
if current_price and chart_data is not None and not chart_data.empty:
|
if current_price and chart_data is not None and not chart_data.empty:
|
||||||
price_chart = self._create_price_chart(symbol)
|
price_chart = self._create_price_chart(symbol)
|
||||||
|
self._cached_chart_data = chart_data # Cache for lightweight updates
|
||||||
|
self._cached_price_chart = price_chart # Cache chart
|
||||||
else:
|
else:
|
||||||
price_chart = self._create_empty_chart("Price Chart", "Waiting for real market data...")
|
price_chart = self._create_empty_chart("Price Chart", "Waiting for real market data...")
|
||||||
|
self._cached_price_chart = price_chart
|
||||||
|
else:
|
||||||
|
# Use cached chart for lightweight updates
|
||||||
|
price_chart = getattr(self, '_cached_price_chart',
|
||||||
|
self._create_empty_chart("Price Chart", "Loading..."))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Price chart error: {e}")
|
logger.warning(f"Price chart error: {e}")
|
||||||
price_chart = self._create_empty_chart("Price Chart", "Error loading chart - waiting for data")
|
price_chart = self._create_empty_chart("Price Chart", "Error loading chart - waiting for data")
|
||||||
@ -1405,18 +1417,17 @@ class TradingDashboard:
|
|||||||
return fig
|
return fig
|
||||||
|
|
||||||
def _create_price_chart(self, symbol: str) -> go.Figure:
|
def _create_price_chart(self, symbol: str) -> go.Figure:
|
||||||
"""Create enhanced 1-second price chart with volume and Williams pivot points from WebSocket stream"""
|
"""Create price chart with volume and Williams pivot points from cached data"""
|
||||||
try:
|
try:
|
||||||
# Get 1-second bars from WebSocket stream
|
# Use cached data from data provider (optimized for performance)
|
||||||
df = self.get_one_second_bars(count=300) # Last 5 minutes of 1s bars
|
df = self.data_provider.get_historical_data(symbol, '1m', limit=50, refresh=False)
|
||||||
|
|
||||||
# If no WebSocket data, fall back to data provider
|
if df is None or df.empty:
|
||||||
if df.empty:
|
logger.warning("[CHART] No cached data available, trying fresh data")
|
||||||
logger.warning("[CHART] No WebSocket data, falling back to data provider")
|
|
||||||
try:
|
try:
|
||||||
df = self.data_provider.get_historical_data(symbol, '1m', limit=50, refresh=True)
|
df = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=True)
|
||||||
if df is not None and not df.empty:
|
if df is not None and not df.empty:
|
||||||
# Ensure timezone consistency for fallback data
|
# Ensure timezone consistency for fresh data
|
||||||
df = self._ensure_timezone_consistency(df)
|
df = self._ensure_timezone_consistency(df)
|
||||||
# Add volume column if missing
|
# Add volume column if missing
|
||||||
if 'volume' not in df.columns:
|
if 'volume' not in df.columns:
|
||||||
@ -1424,20 +1435,20 @@ class TradingDashboard:
|
|||||||
actual_timeframe = '1m'
|
actual_timeframe = '1m'
|
||||||
else:
|
else:
|
||||||
return self._create_empty_chart(
|
return self._create_empty_chart(
|
||||||
f"{symbol} 1s Chart",
|
f"{symbol} Chart",
|
||||||
f"No data available for {symbol}\nStarting WebSocket stream..."
|
f"No data available for {symbol}\nWaiting for data provider..."
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"[ERROR] Error getting fallback data: {e}")
|
logger.warning(f"[ERROR] Error getting fresh data: {e}")
|
||||||
return self._create_empty_chart(
|
return self._create_empty_chart(
|
||||||
f"{symbol} 1s Chart",
|
f"{symbol} Chart",
|
||||||
f"Chart Error: {str(e)}"
|
f"Chart Error: {str(e)}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# Ensure timezone consistency for WebSocket data
|
# Ensure timezone consistency for cached data
|
||||||
df = self._ensure_timezone_consistency(df)
|
df = self._ensure_timezone_consistency(df)
|
||||||
actual_timeframe = '1s'
|
actual_timeframe = '1m'
|
||||||
logger.debug(f"[CHART] Using {len(df)} 1s bars from WebSocket stream in {self.timezone}")
|
logger.debug(f"[CHART] Using {len(df)} 1m bars from cached data in {self.timezone}")
|
||||||
|
|
||||||
# Create subplot with secondary y-axis for volume
|
# Create subplot with secondary y-axis for volume
|
||||||
fig = make_subplots(
|
fig = make_subplots(
|
||||||
@ -3234,56 +3245,61 @@ class TradingDashboard:
|
|||||||
'details': [html.P(f"Error: {str(e)}", className="text-danger")]
|
'details': [html.P(f"Error: {str(e)}", className="text-danger")]
|
||||||
}
|
}
|
||||||
|
|
||||||
def _start_websocket_stream(self):
|
def _start_lightweight_websocket(self):
|
||||||
"""Start WebSocket connection for real-time tick data"""
|
"""Start ultra-lightweight WebSocket for real-time price updates only"""
|
||||||
try:
|
try:
|
||||||
if not WEBSOCKET_AVAILABLE:
|
if self.is_streaming:
|
||||||
logger.warning("[WEBSOCKET] websocket-client not available. Using data provider fallback.")
|
logger.warning("[WS] WebSocket already running")
|
||||||
self.is_streaming = False
|
|
||||||
return
|
return
|
||||||
|
|
||||||
symbol = self.config.symbols[0] if self.config.symbols else "ETHUSDT"
|
# ETH/USDT primary symbol for scalping
|
||||||
|
symbol = "ethusdt"
|
||||||
|
|
||||||
# Start WebSocket in background thread
|
def ws_worker():
|
||||||
self.ws_thread = threading.Thread(target=self._websocket_worker, args=(symbol,), daemon=True)
|
|
||||||
self.ws_thread.start()
|
|
||||||
|
|
||||||
logger.info(f"[WEBSOCKET] Starting real-time tick stream for {symbol}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error starting WebSocket stream: {e}")
|
|
||||||
self.is_streaming = False
|
|
||||||
|
|
||||||
def _websocket_worker(self, symbol: str):
|
|
||||||
"""WebSocket worker thread for continuous tick data streaming"""
|
|
||||||
try:
|
try:
|
||||||
# Use Binance WebSocket for real-time tick data
|
import websocket
|
||||||
ws_url = f"wss://stream.binance.com:9443/ws/{symbol.lower().replace('/', '')}@ticker"
|
import json
|
||||||
|
|
||||||
def on_message(ws, message):
|
def on_message(ws, message):
|
||||||
try:
|
try:
|
||||||
data = json.loads(message)
|
data = json.loads(message)
|
||||||
self._process_tick_data(data)
|
# Extract only current price - ultra minimal processing
|
||||||
|
if 'c' in data: # Current price from ticker
|
||||||
|
price = float(data['c'])
|
||||||
|
# Update price cache (no history, just current)
|
||||||
|
self.ws_price_cache['ETHUSDT'] = price
|
||||||
|
self.current_prices['ETHUSDT'] = price
|
||||||
|
|
||||||
|
# Performance tracking
|
||||||
|
current_time = time.time()
|
||||||
|
self.last_ws_update = current_time
|
||||||
|
self.ws_update_count += 1
|
||||||
|
|
||||||
|
# Log every 100 updates for monitoring
|
||||||
|
if self.ws_update_count % 100 == 0:
|
||||||
|
logger.debug(f"[WS] {self.ws_update_count} price updates, latest: ${price:.2f}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Error processing WebSocket message: {e}")
|
logger.warning(f"[WS] Error processing message: {e}")
|
||||||
|
|
||||||
def on_error(ws, error):
|
def on_error(ws, error):
|
||||||
logger.error(f"WebSocket error: {error}")
|
logger.error(f"[WS] Error: {error}")
|
||||||
self.is_streaming = False
|
self.is_streaming = False
|
||||||
|
|
||||||
def on_close(ws, close_status_code, close_msg):
|
def on_close(ws, close_status_code, close_msg):
|
||||||
logger.warning("WebSocket connection closed")
|
logger.warning(f"[WS] Connection closed: {close_status_code}")
|
||||||
self.is_streaming = False
|
self.is_streaming = False
|
||||||
# Attempt to reconnect after 5 seconds
|
# Auto-reconnect after 5 seconds
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
if not self.is_streaming:
|
if not self.is_streaming:
|
||||||
self._websocket_worker(symbol)
|
self._start_lightweight_websocket()
|
||||||
|
|
||||||
def on_open(ws):
|
def on_open(ws):
|
||||||
logger.info("[WEBSOCKET] Connected to Binance stream")
|
logger.info(f"[WS] Connected for real-time ETHUSDT price updates")
|
||||||
self.is_streaming = True
|
self.is_streaming = True
|
||||||
|
|
||||||
# Create WebSocket connection
|
# Binance WebSocket for ticker (price only, not trades)
|
||||||
|
ws_url = f"wss://stream.binance.com:9443/ws/{symbol}@ticker"
|
||||||
|
|
||||||
self.ws_connection = websocket.WebSocketApp(
|
self.ws_connection = websocket.WebSocketApp(
|
||||||
ws_url,
|
ws_url,
|
||||||
on_message=on_message,
|
on_message=on_message,
|
||||||
@ -3292,129 +3308,46 @@ class TradingDashboard:
|
|||||||
on_open=on_open
|
on_open=on_open
|
||||||
)
|
)
|
||||||
|
|
||||||
# Run WebSocket (this blocks)
|
# Run WebSocket (blocking)
|
||||||
self.ws_connection.run_forever()
|
self.ws_connection.run_forever()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"WebSocket worker error: {e}")
|
logger.error(f"[WS] Worker error: {e}")
|
||||||
self.is_streaming = False
|
self.is_streaming = False
|
||||||
|
|
||||||
def _process_tick_data(self, tick_data: Dict):
|
# Start WebSocket in background thread
|
||||||
"""Process incoming tick data and update 1-second bars with consistent timezone"""
|
self.ws_thread = threading.Thread(target=ws_worker, daemon=True)
|
||||||
try:
|
self.ws_thread.start()
|
||||||
# Extract price and volume from Binance ticker data
|
|
||||||
price = float(tick_data.get('c', 0)) # Current price
|
|
||||||
volume = float(tick_data.get('v', 0)) # 24h volume
|
|
||||||
# Use configured timezone instead of UTC for consistency
|
|
||||||
timestamp = self._now_local()
|
|
||||||
|
|
||||||
# Add to tick cache with consistent timezone
|
logger.info("[WS] Lightweight WebSocket started for real-time price updates")
|
||||||
tick = {
|
|
||||||
'timestamp': timestamp,
|
|
||||||
'price': price,
|
|
||||||
'volume': volume,
|
|
||||||
'bid': float(tick_data.get('b', price)), # Best bid
|
|
||||||
'ask': float(tick_data.get('a', price)), # Best ask
|
|
||||||
'high_24h': float(tick_data.get('h', price)),
|
|
||||||
'low_24h': float(tick_data.get('l', price))
|
|
||||||
}
|
|
||||||
|
|
||||||
self.tick_cache.append(tick)
|
|
||||||
|
|
||||||
# Update current second bar using local timezone
|
|
||||||
current_second = timestamp.replace(microsecond=0)
|
|
||||||
|
|
||||||
if self.current_second_data['timestamp'] != current_second:
|
|
||||||
# New second - finalize previous bar and start new one
|
|
||||||
if self.current_second_data['timestamp'] is not None:
|
|
||||||
self._finalize_second_bar()
|
|
||||||
|
|
||||||
# Start new second bar
|
|
||||||
self.current_second_data = {
|
|
||||||
'timestamp': current_second,
|
|
||||||
'open': price,
|
|
||||||
'high': price,
|
|
||||||
'low': price,
|
|
||||||
'close': price,
|
|
||||||
'volume': 0,
|
|
||||||
'tick_count': 1
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
# Update current second bar
|
|
||||||
self.current_second_data['high'] = max(self.current_second_data['high'], price)
|
|
||||||
self.current_second_data['low'] = min(self.current_second_data['low'], price)
|
|
||||||
self.current_second_data['close'] = price
|
|
||||||
self.current_second_data['tick_count'] += 1
|
|
||||||
|
|
||||||
# Update current price for dashboard
|
|
||||||
self.current_prices[tick_data.get('s', 'ETHUSDT')] = price
|
|
||||||
|
|
||||||
logger.debug(f"[TICK] Processed tick at {timestamp.strftime('%H:%M:%S')} {self.timezone.zone}: ${price:.2f}")
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Error processing tick data: {e}")
|
logger.error(f"[WS] Failed to start: {e}")
|
||||||
|
self.is_streaming = False
|
||||||
|
|
||||||
def _finalize_second_bar(self):
|
def stop_streaming(self):
|
||||||
"""Finalize the current second bar and add to bars cache"""
|
"""Stop WebSocket streaming"""
|
||||||
try:
|
try:
|
||||||
if self.current_second_data['timestamp'] is not None:
|
self.is_streaming = False
|
||||||
bar = {
|
if self.ws_connection:
|
||||||
'timestamp': self.current_second_data['timestamp'],
|
self.ws_connection.close()
|
||||||
'open': self.current_second_data['open'],
|
logger.info("[WS] Streaming stopped")
|
||||||
'high': self.current_second_data['high'],
|
|
||||||
'low': self.current_second_data['low'],
|
|
||||||
'close': self.current_second_data['close'],
|
|
||||||
'volume': self.current_second_data['volume'],
|
|
||||||
'tick_count': self.current_second_data['tick_count']
|
|
||||||
}
|
|
||||||
|
|
||||||
self.one_second_bars.append(bar)
|
|
||||||
|
|
||||||
# Log every 10 seconds for monitoring
|
|
||||||
if len(self.one_second_bars) % 10 == 0:
|
|
||||||
logger.debug(f"[BARS] Generated {len(self.one_second_bars)} 1s bars, latest: ${bar['close']:.2f}")
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Error finalizing second bar: {e}")
|
logger.error(f"[WS] Error stopping: {e}")
|
||||||
|
|
||||||
def get_tick_cache_for_training(self, minutes: int = 15) -> List[Dict]:
|
def get_realtime_price(self, symbol: str) -> float:
|
||||||
"""Get tick cache data for model training"""
|
"""Get real-time price from WebSocket cache (faster than API)"""
|
||||||
try:
|
try:
|
||||||
cutoff_time = datetime.now(timezone.utc) - timedelta(minutes=minutes)
|
# Try WebSocket cache first (sub-second latency)
|
||||||
recent_ticks = [
|
ws_price = self.ws_price_cache.get(symbol.replace('/', ''))
|
||||||
tick for tick in self.tick_cache
|
if ws_price:
|
||||||
if tick['timestamp'] >= cutoff_time
|
return ws_price
|
||||||
]
|
|
||||||
return recent_ticks
|
# Fallback to current_prices (from data provider)
|
||||||
|
return self.current_prices.get(symbol.replace('/', ''))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error getting tick cache: {e}")
|
logger.warning(f"[WS] Error getting realtime price: {e}")
|
||||||
return []
|
return None
|
||||||
|
|
||||||
def get_one_second_bars(self, count: int = 300) -> pd.DataFrame:
|
|
||||||
"""Get recent 1-second bars as DataFrame with consistent timezone"""
|
|
||||||
try:
|
|
||||||
if len(self.one_second_bars) == 0:
|
|
||||||
return pd.DataFrame()
|
|
||||||
|
|
||||||
# Get recent bars
|
|
||||||
recent_bars = list(self.one_second_bars)[-count:]
|
|
||||||
|
|
||||||
# Convert to DataFrame
|
|
||||||
df = pd.DataFrame(recent_bars)
|
|
||||||
if not df.empty:
|
|
||||||
df.set_index('timestamp', inplace=True)
|
|
||||||
df.sort_index(inplace=True)
|
|
||||||
|
|
||||||
# Ensure timezone consistency
|
|
||||||
df = self._ensure_timezone_consistency(df)
|
|
||||||
|
|
||||||
logger.debug(f"[BARS] Retrieved {len(df)} 1s bars in {self.timezone.zone}")
|
|
||||||
|
|
||||||
return df
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting 1-second bars: {e}")
|
|
||||||
return pd.DataFrame()
|
|
||||||
|
|
||||||
def _create_cnn_monitoring_content(self) -> List:
|
def _create_cnn_monitoring_content(self) -> List:
|
||||||
"""Create CNN monitoring and prediction analysis content"""
|
"""Create CNN monitoring and prediction analysis content"""
|
||||||
@ -3596,33 +3529,77 @@ class TradingDashboard:
|
|||||||
logger.error(f"Error creating model performance table: {e}")
|
logger.error(f"Error creating model performance table: {e}")
|
||||||
return html.P(f"Error creating performance table: {str(e)}", className="text-danger")
|
return html.P(f"Error creating performance table: {str(e)}", className="text-danger")
|
||||||
|
|
||||||
|
def _cleanup_old_data(self):
|
||||||
|
"""Clean up old data to prevent memory leaks and performance degradation"""
|
||||||
|
try:
|
||||||
|
cleanup_start = time.time()
|
||||||
|
|
||||||
|
# Clean up recent decisions - keep only last 100
|
||||||
|
if len(self.recent_decisions) > 100:
|
||||||
|
self.recent_decisions = self.recent_decisions[-100:]
|
||||||
|
|
||||||
|
# Clean up recent signals - keep only last 50
|
||||||
|
if len(self.recent_signals) > 50:
|
||||||
|
self.recent_signals = self.recent_signals[-50:]
|
||||||
|
|
||||||
|
# Clean up session trades - keep only last 200
|
||||||
|
if len(self.session_trades) > 200:
|
||||||
|
self.session_trades = self.session_trades[-200:]
|
||||||
|
|
||||||
|
# Clean up closed trades - keep only last 100 in memory, rest in file
|
||||||
|
if len(self.closed_trades) > 100:
|
||||||
|
self.closed_trades = self.closed_trades[-100:]
|
||||||
|
|
||||||
|
# Clean up current prices - remove old symbols not in config
|
||||||
|
current_symbols = set(self.config.symbols) if self.config.symbols else {'ETHUSDT'}
|
||||||
|
symbols_to_remove = []
|
||||||
|
for symbol in self.current_prices:
|
||||||
|
if symbol not in current_symbols:
|
||||||
|
symbols_to_remove.append(symbol)
|
||||||
|
for symbol in symbols_to_remove:
|
||||||
|
del self.current_prices[symbol]
|
||||||
|
|
||||||
|
# Clean up RL training queue - keep only last 500
|
||||||
|
if len(self.rl_training_queue) > 500:
|
||||||
|
# Convert to list, slice, then back to deque
|
||||||
|
old_queue = list(self.rl_training_queue)
|
||||||
|
self.rl_training_queue.clear()
|
||||||
|
self.rl_training_queue.extend(old_queue[-500:])
|
||||||
|
|
||||||
|
# Tick infrastructure removed - no cleanup needed
|
||||||
|
|
||||||
|
cleanup_time = (time.time() - cleanup_start) * 1000
|
||||||
|
logger.info(f"[CLEANUP] Data cleanup completed in {cleanup_time:.1f}ms - "
|
||||||
|
f"Decisions: {len(self.recent_decisions)}, "
|
||||||
|
f"Signals: {len(self.recent_signals)}, "
|
||||||
|
f"Trades: {len(self.session_trades)}, "
|
||||||
|
f"Closed: {len(self.closed_trades)}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error during data cleanup: {e}")
|
||||||
|
|
||||||
def _create_training_metrics(self) -> List:
|
def _create_training_metrics(self) -> List:
|
||||||
"""Create comprehensive model training metrics display with enhanced RL integration"""
|
"""Create comprehensive model training metrics display with enhanced RL integration"""
|
||||||
try:
|
try:
|
||||||
training_items = []
|
training_items = []
|
||||||
|
|
||||||
# Enhanced Training Data Streaming Status
|
# Enhanced Training Data Streaming Status
|
||||||
tick_cache_size = len(self.tick_cache)
|
ws_updates = getattr(self, 'ws_update_count', 0)
|
||||||
bars_cache_size = len(self.one_second_bars)
|
|
||||||
enhanced_data_available = self.training_data_available and self.enhanced_rl_training_enabled
|
enhanced_data_available = self.training_data_available and self.enhanced_rl_training_enabled
|
||||||
|
|
||||||
training_items.append(
|
training_items.append(
|
||||||
html.Div([
|
html.Div([
|
||||||
html.H6([
|
html.H6([
|
||||||
html.I(className="fas fa-database me-2 text-info"),
|
html.I(className="fas fa-database me-2 text-info"),
|
||||||
"Enhanced Training Data Stream"
|
"Real-Time Data & Training Stream"
|
||||||
], className="mb-2"),
|
], className="mb-2"),
|
||||||
html.Div([
|
html.Div([
|
||||||
html.Small([
|
html.Small([
|
||||||
html.Strong("Tick Cache: "),
|
html.Strong("WebSocket Updates: "),
|
||||||
html.Span(f"{tick_cache_size:,} ticks", className="text-success" if tick_cache_size > 1000 else "text-warning")
|
html.Span(f"{ws_updates:,} price updates", className="text-success" if ws_updates > 100 else "text-warning")
|
||||||
], className="d-block"),
|
], className="d-block"),
|
||||||
html.Small([
|
html.Small([
|
||||||
html.Strong("1s Bars: "),
|
html.Strong("Stream Status: "),
|
||||||
html.Span(f"{bars_cache_size} bars", className="text-success" if bars_cache_size > 100 else "text-warning")
|
|
||||||
], className="d-block"),
|
|
||||||
html.Small([
|
|
||||||
html.Strong("Stream: "),
|
|
||||||
html.Span("LIVE" if self.is_streaming else "OFFLINE",
|
html.Span("LIVE" if self.is_streaming else "OFFLINE",
|
||||||
className="text-success" if self.is_streaming else "text-danger")
|
className="text-success" if self.is_streaming else "text-danger")
|
||||||
], className="d-block"),
|
], className="d-block"),
|
||||||
@ -3632,9 +3609,14 @@ class TradingDashboard:
|
|||||||
className="text-success" if self.enhanced_rl_training_enabled else "text-warning")
|
className="text-success" if self.enhanced_rl_training_enabled else "text-warning")
|
||||||
], className="d-block"),
|
], className="d-block"),
|
||||||
html.Small([
|
html.Small([
|
||||||
html.Strong("Comprehensive Data: "),
|
html.Strong("Training Data: "),
|
||||||
html.Span("AVAILABLE" if enhanced_data_available else "WAITING",
|
html.Span("AVAILABLE" if enhanced_data_available else "WAITING",
|
||||||
className="text-success" if enhanced_data_available else "text-warning")
|
className="text-success" if enhanced_data_available else "text-warning")
|
||||||
|
], className="d-block"),
|
||||||
|
html.Small([
|
||||||
|
html.Strong("Cached Data: "),
|
||||||
|
html.Span("READY" if len(self.current_prices) > 0 else "LOADING",
|
||||||
|
className="text-success" if len(self.current_prices) > 0 else "text-warning")
|
||||||
], className="d-block")
|
], className="d-block")
|
||||||
])
|
])
|
||||||
], className="mb-3 p-2 border border-info rounded")
|
], className="mb-3 p-2 border border-info rounded")
|
||||||
@ -4500,12 +4482,9 @@ class TradingDashboard:
|
|||||||
logger.warning(f"Error updating training metrics: {e}")
|
logger.warning(f"Error updating training metrics: {e}")
|
||||||
|
|
||||||
def get_tick_cache_for_training(self) -> List[Dict]:
|
def get_tick_cache_for_training(self) -> List[Dict]:
|
||||||
"""Get tick cache data for external training systems"""
|
"""Get tick cache data for external training systems - removed for performance optimization"""
|
||||||
try:
|
logger.debug("Tick cache removed for performance - using cached OHLCV data for training instead")
|
||||||
return list(self.tick_cache)
|
return [] # Empty since we removed tick infrastructure
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting tick cache for training: {e}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
def start_continuous_training(self):
|
def start_continuous_training(self):
|
||||||
"""Start continuous training in background thread"""
|
"""Start continuous training in background thread"""
|
||||||
@ -5243,10 +5222,7 @@ class TradingDashboard:
|
|||||||
try:
|
try:
|
||||||
# Get data for this timeframe
|
# Get data for this timeframe
|
||||||
if timeframe == '1s':
|
if timeframe == '1s':
|
||||||
# For 1s data, use our tick aggregation
|
# For 1s data, fallback directly to 1m data (no tick aggregation)
|
||||||
df = self.get_one_second_bars(count=60) # Last 60 seconds
|
|
||||||
if df.empty:
|
|
||||||
# Fallback to 1m data
|
|
||||||
df = self.data_provider.get_historical_data(
|
df = self.data_provider.get_historical_data(
|
||||||
symbol=target_symbol,
|
symbol=target_symbol,
|
||||||
timeframe='1m',
|
timeframe='1m',
|
||||||
@ -5279,11 +5255,9 @@ class TradingDashboard:
|
|||||||
# BTC: 1 timeframe * 50 = 50 features
|
# BTC: 1 timeframe * 50 = 50 features
|
||||||
# Total expected: 200 features (150 ETH + 50 BTC)
|
# Total expected: 200 features (150 ETH + 50 BTC)
|
||||||
|
|
||||||
# Add BTC 1s data if we're processing ETH (for correlation analysis)
|
# Add BTC 1m data if we're processing ETH (for correlation analysis)
|
||||||
if symbol.startswith('ETH'):
|
if symbol.startswith('ETH'):
|
||||||
try:
|
try:
|
||||||
btc_1s_df = self.get_one_second_bars(count=60, symbol='BTC/USDT')
|
|
||||||
if btc_1s_df.empty:
|
|
||||||
btc_1s_df = self.data_provider.get_historical_data(
|
btc_1s_df = self.data_provider.get_historical_data(
|
||||||
symbol='BTC/USDT',
|
symbol='BTC/USDT',
|
||||||
timeframe='1m',
|
timeframe='1m',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user