stability

This commit is contained in:
Dobromir Popov
2025-07-28 12:10:52 +03:00
parent 9219b78241
commit fb72c93743
8 changed files with 207 additions and 53 deletions

View File

@ -83,6 +83,13 @@ class PivotBounds:
distances = [abs(current_price - r) for r in self.pivot_resistance_levels]
return min(distances) / self.get_price_range()
@dataclass
class SimplePivotLevel:
"""Simple pivot level structure for fallback pivot detection"""
swing_points: List[Any] = field(default_factory=list)
support_levels: List[float] = field(default_factory=list)
resistance_levels: List[float] = field(default_factory=list)
@dataclass
class MarketTick:
"""Standardized market tick data structure"""
@ -127,6 +134,10 @@ class DataProvider:
self.real_time_data = {} # {symbol: {timeframe: deque}}
self.current_prices = {} # {symbol: float}
# Live price cache for low-latency price updates
self.live_price_cache: Dict[str, Tuple[float, datetime]] = {}
self.live_price_cache_ttl = timedelta(milliseconds=500)
# Initialize cached data structure
for symbol in self.symbols:
self.cached_data[symbol] = {}
@ -1839,14 +1850,14 @@ class DataProvider:
low_pivots = monthly_data[lows == rolling_min]['low'].tolist()
pivot_lows.extend(low_pivots)
# Create mock level structure
mock_level = type('MockLevel', (), {
'swing_points': [],
'support_levels': list(set(pivot_lows)),
'resistance_levels': list(set(pivot_highs))
})()
# Create proper pivot level structure
pivot_level = SimplePivotLevel(
swing_points=[],
support_levels=list(set(pivot_lows)),
resistance_levels=list(set(pivot_highs))
)
return {'level_0': mock_level}
return {'level_0': pivot_level}
except Exception as e:
logger.error(f"Error in simple pivot detection: {e}")

View File

@ -1062,10 +1062,11 @@ class MultiExchangeCOBProvider:
consolidated_bids[price].exchange_breakdown[exchange_name] = level
# Update dominant exchange based on volume
if level.volume_usd > consolidated_bids[price].exchange_breakdown.get(
consolidated_bids[price].dominant_exchange,
type('obj', (object,), {'volume_usd': 0})()
).volume_usd:
current_dominant = consolidated_bids[price].exchange_breakdown.get(
consolidated_bids[price].dominant_exchange
)
current_volume = current_dominant.volume_usd if current_dominant else 0
if level.volume_usd > current_volume:
consolidated_bids[price].dominant_exchange = exchange_name
# Process merged asks (similar logic)
@ -1088,10 +1089,11 @@ class MultiExchangeCOBProvider:
consolidated_asks[price].total_orders += level.orders_count
consolidated_asks[price].exchange_breakdown[exchange_name] = level
if level.volume_usd > consolidated_asks[price].exchange_breakdown.get(
consolidated_asks[price].dominant_exchange,
type('obj', (object,), {'volume_usd': 0})()
).volume_usd:
current_dominant = consolidated_asks[price].exchange_breakdown.get(
consolidated_asks[price].dominant_exchange
)
current_volume = current_dominant.volume_usd if current_dominant else 0
if level.volume_usd > current_volume:
consolidated_asks[price].dominant_exchange = exchange_name
logger.debug(f"Consolidated {len(consolidated_bids)} bids and {len(consolidated_asks)} asks for {symbol}")

View File

@ -1493,6 +1493,17 @@ class TradingOrchestrator:
if not base_data:
logger.warning(f"Cannot build BaseDataInput for predictions: {symbol}")
return predictions
# Validate base_data has proper feature vector
if hasattr(base_data, 'get_feature_vector'):
try:
feature_vector = base_data.get_feature_vector()
if feature_vector is None or (isinstance(feature_vector, np.ndarray) and feature_vector.size == 0):
logger.warning(f"BaseDataInput has empty feature vector for {symbol}")
return predictions
except Exception as e:
logger.warning(f"Error getting feature vector from BaseDataInput for {symbol}: {e}")
return predictions
# log all registered models
logger.debug(f"inferencing registered models: {self.model_registry.models}")
@ -1691,6 +1702,15 @@ class TradingOrchestrator:
try:
logger.debug(f"Storing inference for {model_name}: {prediction.action} (confidence: {prediction.confidence:.3f})")
# Validate model_input before storing
if model_input is None:
logger.warning(f"Skipping inference storage for {model_name}: model_input is None")
return
if isinstance(model_input, dict) and not model_input:
logger.warning(f"Skipping inference storage for {model_name}: model_input is empty dict")
return
# Extract symbol from prediction if not provided
if symbol is None:
symbol = getattr(prediction, 'symbol', 'ETH/USDT') # Default to ETH/USDT if not available
@ -2569,6 +2589,25 @@ class TradingOrchestrator:
# Method 3: Dictionary with feature data
if isinstance(model_input, dict):
# Check if dictionary is empty
if not model_input:
logger.warning(f"Empty dictionary passed as model_input for {model_name}, using fallback")
# Try to use data provider to build state as fallback
if hasattr(self, 'data_provider'):
try:
base_data = self.data_provider.build_base_data_input('ETH/USDT')
if base_data and hasattr(base_data, 'get_feature_vector'):
state = base_data.get_feature_vector()
if isinstance(state, np.ndarray):
logger.debug(f"Used data provider fallback for empty dict in {model_name}")
return state
except Exception as e:
logger.debug(f"Data provider fallback failed for empty dict in {model_name}: {e}")
# Final fallback: return default state
logger.warning(f"Using default state for empty dict in {model_name}")
return np.zeros(403, dtype=np.float32) # Default state size
# Try to extract features from dictionary
if 'features' in model_input:
features = model_input['features']
@ -2589,6 +2628,8 @@ class TradingOrchestrator:
if feature_list:
return np.array(feature_list, dtype=np.float32)
else:
logger.warning(f"No numerical features found in dictionary for {model_name}, using fallback")
# Method 4: List or tuple
if isinstance(model_input, (list, tuple)):