optimize updates, remove fifo for simple cache

This commit is contained in:
Dobromir Popov
2025-07-26 22:17:29 +03:00
parent c349ff6f30
commit 9576c52039
4 changed files with 965 additions and 51 deletions

View File

@ -179,22 +179,12 @@ class TradingOrchestrator:
self.fusion_decisions_count: int = 0
self.fusion_training_data: List[Any] = [] # Store training examples for decision model
# FIFO Data Queues - Ensure consistent data availability across different refresh rates
self.data_queues = {
'ohlcv_1s': {symbol: deque(maxlen=500) for symbol in [self.symbol] + self.ref_symbols},
'ohlcv_1m': {symbol: deque(maxlen=300) for symbol in [self.symbol] + self.ref_symbols},
'ohlcv_1h': {symbol: deque(maxlen=300) for symbol in [self.symbol] + self.ref_symbols},
'ohlcv_1d': {symbol: deque(maxlen=300) for symbol in [self.symbol] + self.ref_symbols},
'technical_indicators': {symbol: deque(maxlen=100) for symbol in [self.symbol] + self.ref_symbols},
'cob_data': {symbol: deque(maxlen=50) for symbol in [self.symbol]}, # COB only for primary symbol
'model_predictions': {symbol: deque(maxlen=20) for symbol in [self.symbol]}
}
# Data queue locks for thread safety
self.data_queue_locks = {
data_type: {symbol: threading.Lock() for symbol in queue_dict.keys()}
for data_type, queue_dict in self.data_queues.items()
}
# Simplified Data Integration - Replace complex FIFO queues with efficient cache
from core.simplified_data_integration import SimplifiedDataIntegration
self.data_integration = SimplifiedDataIntegration(
data_provider=self.data_provider,
symbols=[self.symbol] + self.ref_symbols
)
# COB Integration - Real-time market microstructure data
self.cob_integration = None # Will be set to COBIntegration instance if available
@ -259,12 +249,12 @@ class TradingOrchestrator:
self.data_provider.start_centralized_data_collection()
logger.info("Centralized data collection started - all models and dashboard will receive data")
# Initialize FIFO data queue integration
self._initialize_data_queue_integration()
# Initialize simplified data integration
self._initialize_simplified_data_integration()
# Log initial queue status
logger.info("FIFO data queues initialized")
self.log_queue_status(detailed=False)
# Log initial data status
logger.info("Simplified data integration initialized")
self._log_data_status()
# Initialize database cleanup task
self._schedule_database_cleanup()
@ -3699,37 +3689,56 @@ class TradingOrchestrator:
"""
return self.db_manager.get_best_checkpoint_metadata(model_name)
# === FIFO DATA QUEUE MANAGEMENT ===
# === SIMPLIFIED DATA MANAGEMENT ===
def update_data_queue(self, data_type: str, symbol: str, data: Any) -> bool:
def _initialize_simplified_data_integration(self):
"""Initialize the simplified data integration system"""
try:
# Start the data integration system
self.data_integration.start()
logger.info("Simplified data integration started successfully")
except Exception as e:
logger.error(f"Error starting simplified data integration: {e}")
def _log_data_status(self):
"""Log current data status"""
try:
status = self.data_integration.get_cache_status()
cache_status = status.get('cache_status', {})
logger.info("=== Data Cache Status ===")
for data_type, symbols_data in cache_status.items():
symbol_info = []
for symbol, info in symbols_data.items():
age = info.get('age_seconds', 0)
has_data = info.get('has_data', False)
if has_data and age < 300: # Recent data
symbol_info.append(f"{symbol}:✅")
else:
symbol_info.append(f"{symbol}:❌")
if symbol_info:
logger.info(f"{data_type}: {', '.join(symbol_info)}")
except Exception as e:
logger.error(f"Error logging data status: {e}")
def update_data_cache(self, data_type: str, symbol: str, data: Any, source: str = "orchestrator") -> bool:
"""
Update FIFO data queue with new data
Update data cache with new data (simplified approach)
Args:
data_type: Type of data ('ohlcv_1s', 'ohlcv_1m', etc.)
data_type: Type of data ('ohlcv_1s', 'technical_indicators', etc.)
symbol: Trading symbol
data: New data to add
data: New data to store
source: Source of the data
Returns:
bool: True if successful
"""
try:
if data_type not in self.data_queues:
logger.warning(f"Unknown data type: {data_type}")
return False
if symbol not in self.data_queues[data_type]:
logger.warning(f"Unknown symbol for {data_type}: {symbol}")
return False
# Thread-safe queue update
with self.data_queue_locks[data_type][symbol]:
self.data_queues[data_type][symbol].append(data)
return True
return self.data_integration.cache.update(data_type, symbol, data, source)
except Exception as e:
logger.error(f"Error updating data queue {data_type}/{symbol}: {e}")
logger.error(f"Error updating data cache {data_type}/{symbol}: {e}")
return False
def get_latest_data(self, data_type: str, symbol: str, count: int = 1) -> List[Any]:
@ -3887,7 +3896,7 @@ class TradingOrchestrator:
def build_base_data_input(self, symbol: str) -> Optional[Any]:
"""
Build BaseDataInput from FIFO queues with consistent data
Build BaseDataInput using simplified data integration
Args:
symbol: Trading symbol
@ -3896,15 +3905,8 @@ class TradingOrchestrator:
BaseDataInput with consistent data structure
"""
try:
from core.data_models import BaseDataInput
# Check minimum data requirements
min_requirements = {
'ohlcv_1s': 100,
'ohlcv_1m': 50,
'ohlcv_1h': 20,
'ohlcv_1d': 10
}
# Use simplified data integration to build BaseDataInput
return self.data_integration.build_base_data_input(symbol)
# Verify we have minimum data for all timeframes with fallback strategy
missing_data = []