folder stricture reorganize

This commit is contained in:
Dobromir Popov
2025-06-25 11:42:12 +03:00
parent 61b31a3089
commit 03fa28a12d
127 changed files with 3108 additions and 1774 deletions

View File

@ -1,5 +1,20 @@
"""
Clean Trading Dashboard - Modular Implementation
This dashboard is fully integrated with the Universal Data Stream architecture
and receives the standardized 5 timeseries format:
UNIVERSAL DATA FORMAT (The Sacred 5):
1. ETH/USDT Ticks (1s) - Primary trading pair real-time data
2. ETH/USDT 1m - Short-term price action and patterns
3. ETH/USDT 1h - Medium-term trends and momentum
4. ETH/USDT 1d - Long-term market structure
5. BTC/USDT Ticks (1s) - Reference asset for correlation analysis
The dashboard subscribes to the UnifiedDataStream as a consumer and receives
real-time updates for all 5 timeseries through a standardized callback.
This ensures consistent data across all models and components.
Uses layout and component managers to reduce file size and improve maintainability
"""
@ -54,6 +69,15 @@ except ImportError:
COB_INTEGRATION_AVAILABLE = False
logger.warning("COB integration not available")
# Add Universal Data Stream imports
try:
from core.unified_data_stream import UnifiedDataStream
from core.universal_data_adapter import UniversalDataAdapter, UniversalDataStream as UDS
UNIFIED_STREAM_AVAILABLE = True
except ImportError:
UNIFIED_STREAM_AVAILABLE = False
logger.warning("Unified Data Stream not available")
# Import RL COB trader for 1B parameter model integration
from core.realtime_rl_cob_trader import RealtimeRLCOBTrader, PredictionResult
@ -84,6 +108,21 @@ class CleanTradingDashboard:
)
self.component_manager = DashboardComponentManager()
# Initialize Universal Data Stream for the 5 timeseries architecture
if UNIFIED_STREAM_AVAILABLE:
self.unified_stream = UnifiedDataStream(self.data_provider, self.orchestrator)
self.stream_consumer_id = self.unified_stream.register_consumer(
consumer_name="CleanTradingDashboard",
callback=self._handle_unified_stream_data,
data_types=['ticks', 'ohlcv', 'training_data', 'ui_data']
)
logger.info(f"🔗 Universal Data Stream initialized with consumer ID: {self.stream_consumer_id}")
logger.info("📊 Subscribed to Universal 5 Timeseries: ETH(ticks,1m,1h,1d) + BTC(ticks)")
else:
self.unified_stream = None
self.stream_consumer_id = None
logger.warning("⚠️ Universal Data Stream not available - fallback to direct data access")
# Dashboard state
self.recent_decisions = []
self.closed_trades = []
@ -133,6 +172,12 @@ class CleanTradingDashboard:
# Initialize COB integration
self._initialize_cob_integration()
# Start Universal Data Stream
if self.unified_stream:
import threading
threading.Thread(target=self._start_unified_stream, daemon=True).start()
logger.info("🚀 Universal Data Stream starting...")
logger.info("Clean Trading Dashboard initialized with COB RL integration")
def load_model_dynamically(self, model_name: str, model_type: str, model_path: str = None) -> bool:
@ -1595,6 +1640,69 @@ class CleanTradingDashboard:
except Exception as e:
logger.error(f"Error stopping dashboard: {e}")
def _start_unified_stream(self):
"""Start the unified data stream in background"""
try:
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop.run_until_complete(self.unified_stream.start_streaming())
except Exception as e:
logger.error(f"Error starting unified stream: {e}")
def _handle_unified_stream_data(self, data_packet: Dict[str, Any]):
"""Handle incoming data from the Universal Data Stream (5 timeseries)"""
try:
# Extract the universal 5 timeseries data
if 'ticks' in data_packet and data_packet['ticks']:
# Update tick cache with real-time data
self.tick_cache.extend(data_packet['ticks'][-50:]) # Last 50 ticks
if len(self.tick_cache) > 1000:
self.tick_cache = self.tick_cache[-1000:]
if 'ohlcv' in data_packet:
# Update multi-timeframe data
multi_tf_data = data_packet.get('multi_timeframe', {})
for symbol in ['ETH/USDT', 'BTC/USDT']:
if symbol in multi_tf_data:
for timeframe in ['1s', '1m', '1h', '1d']:
if timeframe in multi_tf_data[symbol]:
# Update internal cache with universal data
tf_data = multi_tf_data[symbol][timeframe]
if tf_data:
# Update current prices from universal stream
latest_bar = tf_data[-1]
if 'close' in latest_bar:
self.current_prices[symbol] = latest_bar['close']
self.ws_price_cache[symbol.replace('/', '')] = latest_bar['close']
if 'ui_data' in data_packet and data_packet['ui_data']:
# Process UI-specific data updates
ui_data = data_packet['ui_data']
# This could include formatted data specifically for dashboard display
pass
if 'training_data' in data_packet and data_packet['training_data']:
# Process training data for real-time model updates
training_data = data_packet['training_data']
# This includes market state and model features
pass
# Log periodic universal data stream stats
consumer_name = data_packet.get('consumer_name', 'unknown')
if hasattr(self, '_stream_update_count'):
self._stream_update_count += 1
else:
self._stream_update_count = 1
if self._stream_update_count % 100 == 0: # Every 100 updates
logger.info(f"📈 Universal Stream: {self._stream_update_count} updates processed for {consumer_name}")
logger.debug(f"📊 Current data: ticks={len(data_packet.get('ticks', []))}, "
f"tf_symbols={len(data_packet.get('multi_timeframe', {}))}")
except Exception as e:
logger.error(f"Error handling universal stream data: {e}")
# Factory function for easy creation
def create_clean_dashboard(data_provider=None, orchestrator=None, trading_executor=None):
"""Create a clean trading dashboard instance"""