added 5 min bom data to CNN. respecting port
This commit is contained in:
@ -197,6 +197,14 @@ class EnhancedTradingOrchestrator(TradingOrchestrator):
|
||||
self.latest_cob_state: Dict[str, np.ndarray] = {}
|
||||
self.cob_feature_history: Dict[str, deque] = {symbol: deque(maxlen=100) for symbol in self.symbols}
|
||||
|
||||
# Start BOM cache updates in data provider
|
||||
if hasattr(self.data_provider, 'start_bom_cache_updates'):
|
||||
try:
|
||||
self.data_provider.start_bom_cache_updates(self.cob_integration)
|
||||
logger.info("Started BOM cache updates in data provider")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to start BOM cache updates: {e}")
|
||||
|
||||
logger.info("COB Integration: Deferred initialization to prevent sync/async conflicts")
|
||||
|
||||
# Williams integration
|
||||
@ -759,70 +767,56 @@ class EnhancedTradingOrchestrator(TradingOrchestrator):
|
||||
|
||||
def _get_bom_matrix_for_cnn(self, symbol: str) -> Optional[np.ndarray]:
|
||||
"""
|
||||
Generate BOM (Book of Market) matrix for CNN input
|
||||
Get cached BOM (Book of Market) matrix for CNN input from data provider
|
||||
|
||||
BOM Matrix contains:
|
||||
- Order book depth (20 levels bid/ask)
|
||||
- Volume profile distribution
|
||||
- Order flow intensity patterns
|
||||
- Market microstructure signals
|
||||
- Exchange-specific liquidity data
|
||||
Uses 1s cached BOM data from the data provider for proper temporal analysis
|
||||
|
||||
Returns:
|
||||
np.ndarray: BOM matrix of shape (sequence_length, bom_features)
|
||||
where bom_features typically = 120 features
|
||||
np.ndarray: BOM matrix of shape (sequence_length, 120) from cached 1s data
|
||||
"""
|
||||
try:
|
||||
sequence_length = 50 # Match standard CNN sequence length
|
||||
bom_features = []
|
||||
|
||||
# === 1. CONSOLIDATED ORDER BOOK DATA ===
|
||||
cob_features = self._get_cob_bom_features(symbol)
|
||||
if cob_features:
|
||||
bom_features.extend(cob_features) # ~40 features
|
||||
else:
|
||||
bom_features.extend([0.0] * 40)
|
||||
# Get cached BOM matrix from data provider
|
||||
if hasattr(self.data_provider, 'get_bom_matrix_for_cnn'):
|
||||
bom_matrix = self.data_provider.get_bom_matrix_for_cnn(symbol, sequence_length)
|
||||
if bom_matrix is not None:
|
||||
logger.debug(f"Retrieved cached BOM matrix for {symbol}: shape={bom_matrix.shape}")
|
||||
return bom_matrix
|
||||
|
||||
# === 2. VOLUME PROFILE FEATURES ===
|
||||
volume_profile_features = self._get_volume_profile_bom_features(symbol)
|
||||
if volume_profile_features:
|
||||
bom_features.extend(volume_profile_features) # ~30 features
|
||||
else:
|
||||
bom_features.extend([0.0] * 30)
|
||||
|
||||
# === 3. ORDER FLOW INTENSITY ===
|
||||
flow_intensity_features = self._get_flow_intensity_bom_features(symbol)
|
||||
if flow_intensity_features:
|
||||
bom_features.extend(flow_intensity_features) # ~25 features
|
||||
else:
|
||||
bom_features.extend([0.0] * 25)
|
||||
|
||||
# === 4. MARKET MICROSTRUCTURE SIGNALS ===
|
||||
microstructure_features = self._get_microstructure_bom_features(symbol)
|
||||
if microstructure_features:
|
||||
bom_features.extend(microstructure_features) # ~25 features
|
||||
else:
|
||||
bom_features.extend([0.0] * 25)
|
||||
|
||||
# Pad or trim to exactly 120 features
|
||||
if len(bom_features) > 120:
|
||||
bom_features = bom_features[:120]
|
||||
elif len(bom_features) < 120:
|
||||
bom_features.extend([0.0] * (120 - len(bom_features)))
|
||||
|
||||
# Create time series matrix by repeating features across sequence
|
||||
# In real implementation, you might want historical BOM data
|
||||
bom_matrix = np.tile(bom_features, (sequence_length, 1))
|
||||
|
||||
# Add temporal dynamics (simulate order book changes over time)
|
||||
bom_matrix = self._add_temporal_dynamics_to_bom(bom_matrix, symbol)
|
||||
|
||||
logger.debug(f"Generated BOM matrix for {symbol}: shape={bom_matrix.shape}")
|
||||
return bom_matrix.astype(np.float32)
|
||||
# Fallback to generating synthetic BOM matrix if no cache available
|
||||
logger.warning(f"No cached BOM data available for {symbol}, generating synthetic")
|
||||
return self._generate_fallback_bom_matrix(symbol, sequence_length)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error generating BOM matrix for {symbol}: {e}")
|
||||
return None
|
||||
logger.warning(f"Error getting BOM matrix for {symbol}: {e}")
|
||||
return self._generate_fallback_bom_matrix(symbol, sequence_length)
|
||||
|
||||
def _generate_fallback_bom_matrix(self, symbol: str, sequence_length: int) -> np.ndarray:
|
||||
"""Generate fallback BOM matrix when cache is not available"""
|
||||
try:
|
||||
# Generate synthetic BOM features for current timestamp
|
||||
if hasattr(self.data_provider, 'generate_synthetic_bom_features'):
|
||||
current_features = self.data_provider.generate_synthetic_bom_features(symbol)
|
||||
else:
|
||||
current_features = [0.0] * 120
|
||||
|
||||
# Create temporal variations for the sequence
|
||||
bom_matrix = np.zeros((sequence_length, 120), dtype=np.float32)
|
||||
|
||||
for i in range(sequence_length):
|
||||
# Add small random variations to simulate temporal changes
|
||||
variation_factor = 0.95 + 0.1 * np.random.random() # 5% variation
|
||||
varied_features = [f * variation_factor for f in current_features]
|
||||
bom_matrix[i] = np.array(varied_features, dtype=np.float32)
|
||||
|
||||
logger.debug(f"Generated fallback BOM matrix for {symbol}: shape={bom_matrix.shape}")
|
||||
return bom_matrix
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating fallback BOM matrix for {symbol}: {e}")
|
||||
# Return zeros as absolute fallback
|
||||
return np.zeros((sequence_length, 120), dtype=np.float32)
|
||||
|
||||
def _get_cob_bom_features(self, symbol: str) -> Optional[List[float]]:
|
||||
"""Extract COB features for BOM matrix (40 features)"""
|
||||
@ -1203,33 +1197,77 @@ class EnhancedTradingOrchestrator(TradingOrchestrator):
|
||||
Combine traditional market features with BOM matrix features
|
||||
|
||||
Args:
|
||||
market_matrix: Traditional market data features (sequence_length, market_features)
|
||||
bom_matrix: BOM matrix features (sequence_length, bom_features)
|
||||
market_matrix: Traditional market data features (timeframes, sequence_length, market_features) - 3D
|
||||
bom_matrix: BOM matrix features (sequence_length, bom_features) - 2D
|
||||
symbol: Trading symbol
|
||||
|
||||
Returns:
|
||||
Combined feature matrix (sequence_length, market_features + bom_features)
|
||||
Combined feature matrix reshaped for CNN input
|
||||
"""
|
||||
try:
|
||||
# Ensure both matrices have the same sequence length
|
||||
min_length = min(market_matrix.shape[0], bom_matrix.shape[0])
|
||||
logger.debug(f"Combining features for {symbol}: market={market_matrix.shape}, bom={bom_matrix.shape}")
|
||||
|
||||
market_trimmed = market_matrix[:min_length]
|
||||
bom_trimmed = bom_matrix[:min_length]
|
||||
|
||||
# Combine horizontally
|
||||
combined_matrix = np.concatenate([market_trimmed, bom_trimmed], axis=1)
|
||||
|
||||
logger.debug(f"Combined market and BOM features for {symbol}: "
|
||||
f"market={market_trimmed.shape}, bom={bom_trimmed.shape}, "
|
||||
f"combined={combined_matrix.shape}")
|
||||
|
||||
return combined_matrix.astype(np.float32)
|
||||
# Handle dimensional mismatch
|
||||
if market_matrix.ndim == 3 and bom_matrix.ndim == 2:
|
||||
# Market matrix is (timeframes, sequence_length, features)
|
||||
# BOM matrix is (sequence_length, bom_features)
|
||||
|
||||
# Reshape market matrix to 2D by flattening timeframes dimension
|
||||
timeframes, sequence_length, market_features = market_matrix.shape
|
||||
|
||||
# Option 1: Take the last timeframe (most recent data)
|
||||
market_2d = market_matrix[-1] # Shape: (sequence_length, market_features)
|
||||
|
||||
# Ensure sequence lengths match
|
||||
min_length = min(market_2d.shape[0], bom_matrix.shape[0])
|
||||
market_trimmed = market_2d[:min_length]
|
||||
bom_trimmed = bom_matrix[:min_length]
|
||||
|
||||
# Combine horizontally
|
||||
combined_matrix = np.concatenate([market_trimmed, bom_trimmed], axis=1)
|
||||
|
||||
logger.debug(f"Combined features for {symbol}: "
|
||||
f"market_2d={market_trimmed.shape}, bom={bom_trimmed.shape}, "
|
||||
f"combined={combined_matrix.shape}")
|
||||
|
||||
return combined_matrix.astype(np.float32)
|
||||
|
||||
elif market_matrix.ndim == 2 and bom_matrix.ndim == 2:
|
||||
# Both are 2D - can combine directly
|
||||
min_length = min(market_matrix.shape[0], bom_matrix.shape[0])
|
||||
market_trimmed = market_matrix[:min_length]
|
||||
bom_trimmed = bom_matrix[:min_length]
|
||||
|
||||
combined_matrix = np.concatenate([market_trimmed, bom_trimmed], axis=1)
|
||||
|
||||
logger.debug(f"Combined 2D features for {symbol}: "
|
||||
f"market={market_trimmed.shape}, bom={bom_trimmed.shape}, "
|
||||
f"combined={combined_matrix.shape}")
|
||||
|
||||
return combined_matrix.astype(np.float32)
|
||||
|
||||
else:
|
||||
logger.warning(f"Unsupported matrix dimensions for {symbol}: "
|
||||
f"market={market_matrix.shape}, bom={bom_matrix.shape}")
|
||||
# Fallback: reshape market matrix to 2D if needed
|
||||
if market_matrix.ndim == 3:
|
||||
market_2d = market_matrix.reshape(-1, market_matrix.shape[-1])
|
||||
else:
|
||||
market_2d = market_matrix
|
||||
|
||||
return market_2d.astype(np.float32)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error combining market and BOM features for {symbol}: {e}")
|
||||
# Fallback to market features only
|
||||
return market_matrix
|
||||
# Fallback to reshaped market features only
|
||||
try:
|
||||
if market_matrix.ndim == 3:
|
||||
return market_matrix[-1].astype(np.float32) # Last timeframe
|
||||
else:
|
||||
return market_matrix.astype(np.float32)
|
||||
except:
|
||||
logger.error(f"Fallback failed for {symbol}, returning zeros")
|
||||
return np.zeros((50, 5), dtype=np.float32) # Basic fallback
|
||||
|
||||
def _get_latest_price_from_universal(self, symbol: str, timeframe: str, universal_stream: UniversalDataStream) -> Optional[float]:
|
||||
"""Get latest price for symbol and timeframe from universal data stream"""
|
||||
@ -1353,18 +1391,25 @@ class EnhancedTradingOrchestrator(TradingOrchestrator):
|
||||
|
||||
# Extract predictions (action probabilities)
|
||||
if isinstance(prediction_result, dict):
|
||||
predictions = prediction_result.get('probabilities', np.array([0.33, 0.33, 0.34]))
|
||||
predictions = prediction_result.get('probabilities', [0.33, 0.33, 0.34])
|
||||
confidence = prediction_result.get('confidence', 0.7)
|
||||
# Ensure predictions is a flat numpy array
|
||||
if isinstance(predictions, (list, tuple)):
|
||||
predictions = np.array(predictions, dtype=np.float32)
|
||||
predictions = np.append(predictions.flatten(), confidence)
|
||||
# Ensure predictions is a flat list first
|
||||
if isinstance(predictions, np.ndarray):
|
||||
predictions = predictions.flatten().tolist()
|
||||
elif not isinstance(predictions, list):
|
||||
predictions = [float(predictions)]
|
||||
# Add confidence as a single float
|
||||
predictions.append(float(confidence))
|
||||
# Convert to flat numpy array
|
||||
predictions = np.array(predictions, dtype=np.float32)
|
||||
else:
|
||||
# Ensure prediction_result is a flat numpy array
|
||||
if isinstance(prediction_result, (list, tuple)):
|
||||
# Handle direct prediction result
|
||||
if isinstance(prediction_result, np.ndarray):
|
||||
predictions = prediction_result.flatten()
|
||||
elif isinstance(prediction_result, (list, tuple)):
|
||||
predictions = np.array(prediction_result, dtype=np.float32).flatten()
|
||||
else:
|
||||
predictions = np.array(prediction_result).flatten()
|
||||
predictions = np.array([float(prediction_result)], dtype=np.float32)
|
||||
|
||||
# Extract hidden features if model supports it
|
||||
hidden_features = None
|
||||
|
Reference in New Issue
Block a user