scalping dash also works initially
This commit is contained in:
@ -7,6 +7,7 @@ This enhanced orchestrator implements:
|
||||
3. Multi-symbol (ETH, BTC) coordinated decision making
|
||||
4. Perfect move marking for CNN backpropagation training
|
||||
5. Market environment adaptation through RL evaluation
|
||||
6. Universal data format compliance (5 timeseries streams)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
@ -22,6 +23,8 @@ import torch
|
||||
|
||||
from .config import get_config
|
||||
from .data_provider import DataProvider
|
||||
from .universal_data_adapter import UniversalDataAdapter, UniversalDataStream
|
||||
from .realtime_tick_processor import RealTimeTickProcessor, ProcessedTickFeatures, integrate_with_orchestrator
|
||||
from models import get_model_registry, ModelInterface, CNNModelInterface, RLAgentInterface
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -70,6 +73,7 @@ class MarketState:
|
||||
volume: float
|
||||
trend_strength: float
|
||||
market_regime: str # 'trending', 'ranging', 'volatile'
|
||||
universal_data: UniversalDataStream # Universal format data
|
||||
|
||||
@dataclass
|
||||
class PerfectMove:
|
||||
@ -86,6 +90,7 @@ class PerfectMove:
|
||||
class EnhancedTradingOrchestrator:
|
||||
"""
|
||||
Enhanced orchestrator with sophisticated multi-modal decision making
|
||||
and universal data format compliance
|
||||
"""
|
||||
|
||||
def __init__(self, data_provider: DataProvider = None):
|
||||
@ -94,6 +99,15 @@ class EnhancedTradingOrchestrator:
|
||||
self.data_provider = data_provider or DataProvider()
|
||||
self.model_registry = get_model_registry()
|
||||
|
||||
# Initialize universal data adapter
|
||||
self.universal_adapter = UniversalDataAdapter(self.data_provider)
|
||||
|
||||
# Initialize real-time tick processor for ultra-low latency processing
|
||||
self.tick_processor = RealTimeTickProcessor(symbols=self.config.symbols)
|
||||
|
||||
# Real-time tick features storage
|
||||
self.realtime_tick_features = {symbol: deque(maxlen=100) for symbol in self.config.symbols}
|
||||
|
||||
# Multi-symbol configuration
|
||||
self.symbols = self.config.symbols
|
||||
self.timeframes = self.config.timeframes
|
||||
@ -123,22 +137,28 @@ class EnhancedTradingOrchestrator:
|
||||
self.decision_callbacks = []
|
||||
self.learning_callbacks = []
|
||||
|
||||
logger.info("Enhanced TradingOrchestrator initialized")
|
||||
# Integrate tick processor with orchestrator
|
||||
integrate_with_orchestrator(self, self.tick_processor)
|
||||
|
||||
logger.info("Enhanced TradingOrchestrator initialized with Universal Data Format")
|
||||
logger.info(f"Symbols: {self.symbols}")
|
||||
logger.info(f"Timeframes: {self.timeframes}")
|
||||
logger.info(f"Universal format: ETH ticks, 1m, 1h, 1d + BTC reference ticks")
|
||||
logger.info(f"Enhanced confidence threshold: {self.confidence_threshold}")
|
||||
logger.info("Real-time tick processor integrated for ultra-low latency processing")
|
||||
|
||||
def _initialize_timeframe_weights(self) -> Dict[str, float]:
|
||||
"""Initialize weights for different timeframes"""
|
||||
# Higher timeframes get more weight for trend direction
|
||||
# Lower timeframes get more weight for entry/exit timing
|
||||
base_weights = {
|
||||
'1m': 0.05, # Noise filtering
|
||||
'1s': 0.60, # Primary scalping signal (ticks)
|
||||
'1m': 0.20, # Short-term confirmation
|
||||
'5m': 0.10, # Short-term momentum
|
||||
'15m': 0.15, # Entry/exit timing
|
||||
'1h': 0.25, # Medium-term trend
|
||||
'1h': 0.15, # Medium-term trend
|
||||
'4h': 0.25, # Stronger trend confirmation
|
||||
'1d': 0.20 # Long-term direction
|
||||
'1d': 0.05 # Long-term direction (minimal for scalping)
|
||||
}
|
||||
|
||||
# Normalize weights for configured timeframes
|
||||
@ -163,19 +183,42 @@ class EnhancedTradingOrchestrator:
|
||||
|
||||
async def make_coordinated_decisions(self) -> Dict[str, Optional[TradingAction]]:
|
||||
"""
|
||||
Make coordinated trading decisions across all symbols
|
||||
Make coordinated trading decisions across all symbols using universal data format
|
||||
"""
|
||||
decisions = {}
|
||||
|
||||
try:
|
||||
# Get market states for all symbols
|
||||
market_states = await self._get_all_market_states()
|
||||
# Get universal data stream (5 timeseries)
|
||||
universal_stream = self.universal_adapter.get_universal_data_stream()
|
||||
|
||||
if universal_stream is None:
|
||||
logger.warning("Failed to get universal data stream")
|
||||
return decisions
|
||||
|
||||
# Validate universal format
|
||||
is_valid, issues = self.universal_adapter.validate_universal_format(universal_stream)
|
||||
if not is_valid:
|
||||
logger.warning(f"Universal data format validation failed: {issues}")
|
||||
return decisions
|
||||
|
||||
logger.info("UNIVERSAL DATA STREAM ACTIVE:")
|
||||
logger.info(f" ETH ticks: {len(universal_stream.eth_ticks)} samples")
|
||||
logger.info(f" ETH 1m: {len(universal_stream.eth_1m)} candles")
|
||||
logger.info(f" ETH 1h: {len(universal_stream.eth_1h)} candles")
|
||||
logger.info(f" ETH 1d: {len(universal_stream.eth_1d)} candles")
|
||||
logger.info(f" BTC reference: {len(universal_stream.btc_ticks)} samples")
|
||||
logger.info(f" Data quality: {universal_stream.metadata['data_quality']['overall_score']:.2f}")
|
||||
|
||||
# Get market states for all symbols using universal data
|
||||
market_states = await self._get_all_market_states_universal(universal_stream)
|
||||
|
||||
# Get enhanced predictions for all symbols
|
||||
symbol_predictions = {}
|
||||
for symbol in self.symbols:
|
||||
if symbol in market_states:
|
||||
predictions = await self._get_enhanced_predictions(symbol, market_states[symbol])
|
||||
predictions = await self._get_enhanced_predictions_universal(
|
||||
symbol, market_states[symbol], universal_stream
|
||||
)
|
||||
symbol_predictions[symbol] = predictions
|
||||
|
||||
# Coordinate decisions considering symbol correlations
|
||||
@ -198,76 +241,125 @@ class EnhancedTradingOrchestrator:
|
||||
|
||||
return decisions
|
||||
|
||||
async def _get_all_market_states(self) -> Dict[str, MarketState]:
|
||||
"""Get current market state for all symbols"""
|
||||
async def _get_all_market_states_universal(self, universal_stream: UniversalDataStream) -> Dict[str, MarketState]:
|
||||
"""Get current market state for all symbols using universal data format"""
|
||||
market_states = {}
|
||||
|
||||
for symbol in self.symbols:
|
||||
try:
|
||||
# Get current market data for all timeframes
|
||||
prices = {}
|
||||
features = {}
|
||||
try:
|
||||
# Create market state for ETH/USDT (primary trading pair)
|
||||
if 'ETH/USDT' in self.symbols:
|
||||
eth_prices = {}
|
||||
eth_features = {}
|
||||
|
||||
for timeframe in self.timeframes:
|
||||
# Get current price
|
||||
current_price = self.data_provider.get_current_price(symbol)
|
||||
if current_price:
|
||||
prices[timeframe] = current_price
|
||||
|
||||
# Get feature matrix for this timeframe
|
||||
feature_matrix = self.data_provider.get_feature_matrix(
|
||||
symbol=symbol,
|
||||
timeframes=[timeframe],
|
||||
window_size=20 # Standard window
|
||||
)
|
||||
if feature_matrix is not None:
|
||||
features[timeframe] = feature_matrix
|
||||
# Extract prices from universal stream
|
||||
if len(universal_stream.eth_ticks) > 0:
|
||||
eth_prices['1s'] = float(universal_stream.eth_ticks[-1, 4]) # Close price from ticks
|
||||
if len(universal_stream.eth_1m) > 0:
|
||||
eth_prices['1m'] = float(universal_stream.eth_1m[-1, 4]) # Close price from 1m
|
||||
if len(universal_stream.eth_1h) > 0:
|
||||
eth_prices['1h'] = float(universal_stream.eth_1h[-1, 4]) # Close price from 1h
|
||||
if len(universal_stream.eth_1d) > 0:
|
||||
eth_prices['1d'] = float(universal_stream.eth_1d[-1, 4]) # Close price from 1d
|
||||
|
||||
if prices and features:
|
||||
# Calculate market metrics
|
||||
volatility = self._calculate_volatility(symbol)
|
||||
volume = self._get_current_volume(symbol)
|
||||
trend_strength = self._calculate_trend_strength(symbol)
|
||||
market_regime = self._determine_market_regime(symbol)
|
||||
|
||||
market_state = MarketState(
|
||||
symbol=symbol,
|
||||
timestamp=datetime.now(),
|
||||
prices=prices,
|
||||
features=features,
|
||||
volatility=volatility,
|
||||
volume=volume,
|
||||
trend_strength=trend_strength,
|
||||
market_regime=market_regime
|
||||
)
|
||||
|
||||
market_states[symbol] = market_state
|
||||
|
||||
# Store for historical tracking
|
||||
self.market_states[symbol].append(market_state)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting market state for {symbol}: {e}")
|
||||
# Extract features from universal stream (OHLCV data)
|
||||
eth_features['1s'] = universal_stream.eth_ticks[:, 1:] if universal_stream.eth_ticks.shape[1] > 5 else universal_stream.eth_ticks
|
||||
eth_features['1m'] = universal_stream.eth_1m[:, 1:] if universal_stream.eth_1m.shape[1] > 5 else universal_stream.eth_1m
|
||||
eth_features['1h'] = universal_stream.eth_1h[:, 1:] if universal_stream.eth_1h.shape[1] > 5 else universal_stream.eth_1h
|
||||
eth_features['1d'] = universal_stream.eth_1d[:, 1:] if universal_stream.eth_1d.shape[1] > 5 else universal_stream.eth_1d
|
||||
|
||||
# Calculate market metrics
|
||||
volatility = self._calculate_volatility_from_universal('ETH/USDT', universal_stream)
|
||||
volume = self._get_current_volume_from_universal('ETH/USDT', universal_stream)
|
||||
trend_strength = self._calculate_trend_strength_from_universal('ETH/USDT', universal_stream)
|
||||
market_regime = self._determine_market_regime_from_universal('ETH/USDT', universal_stream)
|
||||
|
||||
eth_market_state = MarketState(
|
||||
symbol='ETH/USDT',
|
||||
timestamp=universal_stream.timestamp,
|
||||
prices=eth_prices,
|
||||
features=eth_features,
|
||||
volatility=volatility,
|
||||
volume=volume,
|
||||
trend_strength=trend_strength,
|
||||
market_regime=market_regime,
|
||||
universal_data=universal_stream
|
||||
)
|
||||
|
||||
market_states['ETH/USDT'] = eth_market_state
|
||||
self.market_states['ETH/USDT'].append(eth_market_state)
|
||||
|
||||
# Create market state for BTC/USDT (reference pair)
|
||||
if 'BTC/USDT' in self.symbols:
|
||||
btc_prices = {}
|
||||
btc_features = {}
|
||||
|
||||
# Extract BTC reference data
|
||||
if len(universal_stream.btc_ticks) > 0:
|
||||
btc_prices['1s'] = float(universal_stream.btc_ticks[-1, 4]) # Close price from BTC ticks
|
||||
|
||||
btc_features['1s'] = universal_stream.btc_ticks[:, 1:] if universal_stream.btc_ticks.shape[1] > 5 else universal_stream.btc_ticks
|
||||
|
||||
# Calculate BTC metrics
|
||||
btc_volatility = self._calculate_volatility_from_universal('BTC/USDT', universal_stream)
|
||||
btc_volume = self._get_current_volume_from_universal('BTC/USDT', universal_stream)
|
||||
btc_trend_strength = self._calculate_trend_strength_from_universal('BTC/USDT', universal_stream)
|
||||
btc_market_regime = self._determine_market_regime_from_universal('BTC/USDT', universal_stream)
|
||||
|
||||
btc_market_state = MarketState(
|
||||
symbol='BTC/USDT',
|
||||
timestamp=universal_stream.timestamp,
|
||||
prices=btc_prices,
|
||||
features=btc_features,
|
||||
volatility=btc_volatility,
|
||||
volume=btc_volume,
|
||||
trend_strength=btc_trend_strength,
|
||||
market_regime=btc_market_regime,
|
||||
universal_data=universal_stream
|
||||
)
|
||||
|
||||
market_states['BTC/USDT'] = btc_market_state
|
||||
self.market_states['BTC/USDT'].append(btc_market_state)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating market states from universal data: {e}")
|
||||
|
||||
return market_states
|
||||
|
||||
async def _get_enhanced_predictions(self, symbol: str, market_state: MarketState) -> List[EnhancedPrediction]:
|
||||
"""Get enhanced predictions with timeframe breakdown"""
|
||||
async def _get_enhanced_predictions_universal(self, symbol: str, market_state: MarketState,
|
||||
universal_stream: UniversalDataStream) -> List[EnhancedPrediction]:
|
||||
"""Get enhanced predictions using universal data format"""
|
||||
predictions = []
|
||||
|
||||
for model_name, model in self.model_registry.models.items():
|
||||
try:
|
||||
if isinstance(model, CNNModelInterface):
|
||||
# Get CNN predictions for each timeframe
|
||||
# Format universal data for CNN model
|
||||
cnn_data = self.universal_adapter.format_for_model(universal_stream, 'cnn')
|
||||
|
||||
# Get CNN predictions for each timeframe using universal data
|
||||
timeframe_predictions = []
|
||||
|
||||
for timeframe in self.timeframes:
|
||||
if timeframe in market_state.features:
|
||||
feature_matrix = market_state.features[timeframe]
|
||||
|
||||
# Get timeframe-specific prediction
|
||||
action_probs, confidence = await self._get_timeframe_prediction(
|
||||
model, feature_matrix, timeframe, market_state
|
||||
# ETH timeframes (primary trading pair)
|
||||
if symbol == 'ETH/USDT':
|
||||
timeframe_data_map = {
|
||||
'1s': cnn_data.get('eth_ticks'),
|
||||
'1m': cnn_data.get('eth_1m'),
|
||||
'1h': cnn_data.get('eth_1h'),
|
||||
'1d': cnn_data.get('eth_1d')
|
||||
}
|
||||
# BTC reference
|
||||
elif symbol == 'BTC/USDT':
|
||||
timeframe_data_map = {
|
||||
'1s': cnn_data.get('btc_ticks')
|
||||
}
|
||||
else:
|
||||
continue
|
||||
|
||||
for timeframe, feature_matrix in timeframe_data_map.items():
|
||||
if feature_matrix is not None and len(feature_matrix) > 0:
|
||||
# Get timeframe-specific prediction using universal data
|
||||
action_probs, confidence = await self._get_timeframe_prediction_universal(
|
||||
model, feature_matrix, timeframe, market_state, universal_stream
|
||||
)
|
||||
|
||||
if action_probs is not None:
|
||||
@ -285,7 +377,8 @@ class EnhancedTradingOrchestrator:
|
||||
market_features={
|
||||
'volatility': market_state.volatility,
|
||||
'volume': market_state.volume,
|
||||
'trend_strength': market_state.trend_strength
|
||||
'trend_strength': market_state.trend_strength,
|
||||
'data_quality': universal_stream.metadata['data_quality']['overall_score']
|
||||
}
|
||||
)
|
||||
timeframe_predictions.append(tf_prediction)
|
||||
@ -305,7 +398,9 @@ class EnhancedTradingOrchestrator:
|
||||
timestamp=datetime.now(),
|
||||
metadata={
|
||||
'market_regime': market_state.market_regime,
|
||||
'symbol_correlation': self._get_symbol_correlation(symbol)
|
||||
'symbol_correlation': self._get_symbol_correlation(symbol),
|
||||
'universal_data_quality': universal_stream.metadata['data_quality'],
|
||||
'data_freshness': universal_stream.metadata['data_freshness']
|
||||
}
|
||||
)
|
||||
predictions.append(enhanced_pred)
|
||||
@ -315,9 +410,10 @@ class EnhancedTradingOrchestrator:
|
||||
|
||||
return predictions
|
||||
|
||||
async def _get_timeframe_prediction(self, model: CNNModelInterface, feature_matrix: np.ndarray,
|
||||
timeframe: str, market_state: MarketState) -> Tuple[Optional[np.ndarray], float]:
|
||||
"""Get prediction for specific timeframe with enhanced context"""
|
||||
async def _get_timeframe_prediction_universal(self, model: CNNModelInterface, feature_matrix: np.ndarray,
|
||||
timeframe: str, market_state: MarketState,
|
||||
universal_stream: UniversalDataStream) -> Tuple[Optional[np.ndarray], float]:
|
||||
"""Get prediction for specific timeframe using universal data format"""
|
||||
try:
|
||||
# Check if model supports timeframe-specific prediction
|
||||
if hasattr(model, 'predict_timeframe'):
|
||||
@ -326,9 +422,9 @@ class EnhancedTradingOrchestrator:
|
||||
action_probs, confidence = model.predict(feature_matrix)
|
||||
|
||||
if action_probs is not None and confidence is not None:
|
||||
# Enhance confidence based on market conditions
|
||||
enhanced_confidence = self._enhance_confidence_with_context(
|
||||
confidence, timeframe, market_state
|
||||
# Enhance confidence based on universal data quality and market conditions
|
||||
enhanced_confidence = self._enhance_confidence_with_universal_context(
|
||||
confidence, timeframe, market_state, universal_stream
|
||||
)
|
||||
return action_probs, enhanced_confidence
|
||||
|
||||
@ -337,20 +433,39 @@ class EnhancedTradingOrchestrator:
|
||||
|
||||
return None, 0.0
|
||||
|
||||
def _enhance_confidence_with_context(self, base_confidence: float, timeframe: str,
|
||||
market_state: MarketState) -> float:
|
||||
"""Enhance confidence score based on market context"""
|
||||
def _enhance_confidence_with_universal_context(self, base_confidence: float, timeframe: str,
|
||||
market_state: MarketState,
|
||||
universal_stream: UniversalDataStream) -> float:
|
||||
"""Enhance confidence score based on universal data context"""
|
||||
enhanced = base_confidence
|
||||
|
||||
# Adjust based on data quality from universal stream
|
||||
data_quality = universal_stream.metadata['data_quality']['overall_score']
|
||||
enhanced *= data_quality
|
||||
|
||||
# Adjust based on data freshness
|
||||
freshness = universal_stream.metadata.get('data_freshness', {})
|
||||
if timeframe in ['1s', '1m']:
|
||||
# For short timeframes, penalize stale data more heavily
|
||||
eth_freshness = freshness.get(f'eth_{timeframe}', 0)
|
||||
if eth_freshness > 60: # More than 1 minute old
|
||||
enhanced *= 0.8
|
||||
|
||||
# Adjust based on market regime
|
||||
if market_state.market_regime == 'trending':
|
||||
enhanced *= 1.1 # More confident in trending markets
|
||||
elif market_state.market_regime == 'volatile':
|
||||
enhanced *= 0.8 # Less confident in volatile markets
|
||||
|
||||
# Adjust based on timeframe reliability
|
||||
# Adjust based on timeframe reliability for scalping
|
||||
timeframe_reliability = {
|
||||
'1m': 0.7, '5m': 0.8, '15m': 0.9, '1h': 1.0, '4h': 1.1, '1d': 1.2
|
||||
'1s': 1.0, # Primary scalping timeframe
|
||||
'1m': 0.9, # Short-term confirmation
|
||||
'5m': 0.8, # Short-term momentum
|
||||
'15m': 0.9, # Entry/exit timing
|
||||
'1h': 0.8, # Medium-term trend
|
||||
'4h': 0.7, # Longer-term (less relevant for scalping)
|
||||
'1d': 0.6 # Long-term direction (minimal for scalping)
|
||||
}
|
||||
enhanced *= timeframe_reliability.get(timeframe, 1.0)
|
||||
|
||||
@ -360,6 +475,18 @@ class EnhancedTradingOrchestrator:
|
||||
elif market_state.volume < 0.5: # Low volume
|
||||
enhanced *= 0.9
|
||||
|
||||
# Adjust based on correlation with BTC (for ETH trades)
|
||||
if market_state.symbol == 'ETH/USDT' and len(universal_stream.btc_ticks) > 1:
|
||||
# Check ETH-BTC correlation strength
|
||||
eth_momentum = (universal_stream.eth_ticks[-1, 4] - universal_stream.eth_ticks[-2, 4]) / universal_stream.eth_ticks[-2, 4]
|
||||
btc_momentum = (universal_stream.btc_ticks[-1, 4] - universal_stream.btc_ticks[-2, 4]) / universal_stream.btc_ticks[-2, 4]
|
||||
|
||||
# If ETH and BTC are moving in same direction, increase confidence
|
||||
if (eth_momentum > 0 and btc_momentum > 0) or (eth_momentum < 0 and btc_momentum < 0):
|
||||
enhanced *= 1.05
|
||||
else:
|
||||
enhanced *= 0.95
|
||||
|
||||
return min(enhanced, 1.0) # Cap at 1.0
|
||||
|
||||
def _combine_timeframe_predictions(self, timeframe_predictions: List[TimeframePrediction],
|
||||
@ -524,7 +651,7 @@ class EnhancedTradingOrchestrator:
|
||||
initial_state = evaluation_item['market_state_before']
|
||||
|
||||
# Get current market state for comparison
|
||||
current_market_states = await self._get_all_market_states()
|
||||
current_market_states = await self._get_all_market_states_universal(self.universal_adapter.get_universal_data_stream())
|
||||
current_state = current_market_states.get(action.symbol)
|
||||
|
||||
if current_state:
|
||||
@ -625,38 +752,165 @@ class EnhancedTradingOrchestrator:
|
||||
except Exception as e:
|
||||
logger.error(f"Error marking perfect move: {e}")
|
||||
|
||||
def get_recent_perfect_moves(self, limit: int = 10) -> List[PerfectMove]:
|
||||
"""Get recent perfect moves for display/monitoring"""
|
||||
return list(self.perfect_moves)[-limit:]
|
||||
|
||||
async def queue_action_for_evaluation(self, action: TradingAction):
|
||||
"""Queue a trading action for future RL evaluation"""
|
||||
try:
|
||||
# Get current market state
|
||||
market_states = await self._get_all_market_states_universal(self.universal_adapter.get_universal_data_stream())
|
||||
if action.symbol in market_states:
|
||||
evaluation_item = {
|
||||
'action': action,
|
||||
'market_state_before': market_states[action.symbol],
|
||||
'timestamp': datetime.now()
|
||||
}
|
||||
self.rl_evaluation_queue.append(evaluation_item)
|
||||
logger.debug(f"Queued action for RL evaluation: {action.action} {action.symbol}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error queuing action for evaluation: {e}")
|
||||
|
||||
def get_perfect_moves_for_training(self, symbol: str = None, timeframe: str = None,
|
||||
limit: int = 1000) -> List[PerfectMove]:
|
||||
"""Get perfect moves for CNN training"""
|
||||
moves = list(self.perfect_moves)
|
||||
|
||||
# Filter by symbol if specified
|
||||
if symbol:
|
||||
moves = [m for m in moves if m.symbol == symbol]
|
||||
moves = [move for move in moves if move.symbol == symbol]
|
||||
|
||||
# Filter by timeframe if specified
|
||||
if timeframe:
|
||||
moves = [m for m in moves if m.timeframe == timeframe]
|
||||
moves = [move for move in moves if move.timeframe == timeframe]
|
||||
|
||||
return moves[-limit:] if limit else moves
|
||||
return moves[-limit:] # Return most recent moves
|
||||
|
||||
# Helper methods for market analysis
|
||||
# Helper methods for market analysis using universal data
|
||||
def _calculate_volatility_from_universal(self, symbol: str, universal_stream: UniversalDataStream) -> float:
|
||||
"""Calculate current volatility for symbol using universal data"""
|
||||
try:
|
||||
if symbol == 'ETH/USDT' and len(universal_stream.eth_ticks) > 10:
|
||||
# Calculate volatility from tick data
|
||||
prices = universal_stream.eth_ticks[-10:, 4] # Last 10 close prices
|
||||
returns = np.diff(prices) / prices[:-1]
|
||||
volatility = np.std(returns) * np.sqrt(86400) # Annualized volatility
|
||||
return float(volatility)
|
||||
elif symbol == 'BTC/USDT' and len(universal_stream.btc_ticks) > 10:
|
||||
# Calculate volatility from BTC tick data
|
||||
prices = universal_stream.btc_ticks[-10:, 4] # Last 10 close prices
|
||||
returns = np.diff(prices) / prices[:-1]
|
||||
volatility = np.std(returns) * np.sqrt(86400) # Annualized volatility
|
||||
return float(volatility)
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating volatility from universal data: {e}")
|
||||
|
||||
return 0.02 # Default 2% volatility
|
||||
|
||||
def _get_current_volume_from_universal(self, symbol: str, universal_stream: UniversalDataStream) -> float:
|
||||
"""Get current volume ratio compared to average using universal data"""
|
||||
try:
|
||||
if symbol == 'ETH/USDT':
|
||||
# Use 1m data for volume analysis
|
||||
if len(universal_stream.eth_1m) > 10:
|
||||
volumes = universal_stream.eth_1m[-10:, 5] # Last 10 volume values
|
||||
current_volume = universal_stream.eth_1m[-1, 5]
|
||||
avg_volume = np.mean(volumes[:-1])
|
||||
if avg_volume > 0:
|
||||
return float(current_volume / avg_volume)
|
||||
elif symbol == 'BTC/USDT':
|
||||
# Use BTC tick data for volume analysis
|
||||
if len(universal_stream.btc_ticks) > 10:
|
||||
volumes = universal_stream.btc_ticks[-10:, 5] # Last 10 volume values
|
||||
current_volume = universal_stream.btc_ticks[-1, 5]
|
||||
avg_volume = np.mean(volumes[:-1])
|
||||
if avg_volume > 0:
|
||||
return float(current_volume / avg_volume)
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating volume from universal data: {e}")
|
||||
|
||||
return 1.0 # Normal volume
|
||||
|
||||
def _calculate_trend_strength_from_universal(self, symbol: str, universal_stream: UniversalDataStream) -> float:
|
||||
"""Calculate trend strength using universal data"""
|
||||
try:
|
||||
if symbol == 'ETH/USDT':
|
||||
# Use multiple timeframes to determine trend strength
|
||||
trend_scores = []
|
||||
|
||||
# Check 1m trend
|
||||
if len(universal_stream.eth_1m) > 20:
|
||||
prices = universal_stream.eth_1m[-20:, 4] # Last 20 close prices
|
||||
slope = np.polyfit(range(len(prices)), prices, 1)[0]
|
||||
trend_scores.append(abs(slope) / np.mean(prices))
|
||||
|
||||
# Check 1h trend
|
||||
if len(universal_stream.eth_1h) > 10:
|
||||
prices = universal_stream.eth_1h[-10:, 4] # Last 10 close prices
|
||||
slope = np.polyfit(range(len(prices)), prices, 1)[0]
|
||||
trend_scores.append(abs(slope) / np.mean(prices))
|
||||
|
||||
if trend_scores:
|
||||
return float(np.mean(trend_scores))
|
||||
|
||||
elif symbol == 'BTC/USDT':
|
||||
# Use BTC tick data for trend analysis
|
||||
if len(universal_stream.btc_ticks) > 20:
|
||||
prices = universal_stream.btc_ticks[-20:, 4] # Last 20 close prices
|
||||
slope = np.polyfit(range(len(prices)), prices, 1)[0]
|
||||
return float(abs(slope) / np.mean(prices))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating trend strength from universal data: {e}")
|
||||
|
||||
return 0.5 # Moderate trend
|
||||
|
||||
def _determine_market_regime_from_universal(self, symbol: str, universal_stream: UniversalDataStream) -> str:
|
||||
"""Determine current market regime using universal data"""
|
||||
try:
|
||||
if symbol == 'ETH/USDT':
|
||||
# Analyze volatility and trend from multiple timeframes
|
||||
volatility = self._calculate_volatility_from_universal(symbol, universal_stream)
|
||||
trend_strength = self._calculate_trend_strength_from_universal(symbol, universal_stream)
|
||||
|
||||
# Determine regime based on volatility and trend
|
||||
if volatility > 0.05: # High volatility
|
||||
return 'volatile'
|
||||
elif trend_strength > 0.002: # Strong trend
|
||||
return 'trending'
|
||||
else:
|
||||
return 'ranging'
|
||||
|
||||
elif symbol == 'BTC/USDT':
|
||||
# Analyze BTC regime
|
||||
volatility = self._calculate_volatility_from_universal(symbol, universal_stream)
|
||||
|
||||
if volatility > 0.04: # High volatility for BTC
|
||||
return 'volatile'
|
||||
else:
|
||||
return 'trending' # Default for BTC
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error determining market regime from universal data: {e}")
|
||||
|
||||
return 'trending' # Default regime
|
||||
|
||||
# Legacy helper methods (kept for compatibility)
|
||||
def _calculate_volatility(self, symbol: str) -> float:
|
||||
"""Calculate current volatility for symbol"""
|
||||
# Placeholder - implement based on your data provider
|
||||
"""Calculate current volatility for symbol (legacy method)"""
|
||||
return 0.02 # 2% default volatility
|
||||
|
||||
def _get_current_volume(self, symbol: str) -> float:
|
||||
"""Get current volume ratio compared to average"""
|
||||
# Placeholder - implement based on your data provider
|
||||
"""Get current volume ratio compared to average (legacy method)"""
|
||||
return 1.0 # Normal volume
|
||||
|
||||
def _calculate_trend_strength(self, symbol: str) -> float:
|
||||
"""Calculate trend strength (0 = no trend, 1 = strong trend)"""
|
||||
# Placeholder - implement based on your data provider
|
||||
"""Calculate trend strength (legacy method)"""
|
||||
return 0.5 # Moderate trend
|
||||
|
||||
def _determine_market_regime(self, symbol: str) -> str:
|
||||
"""Determine current market regime"""
|
||||
# Placeholder - implement based on your analysis
|
||||
"""Determine current market regime (legacy method)"""
|
||||
return 'trending' # Default to trending
|
||||
|
||||
def _get_symbol_correlation(self, symbol: str) -> Dict[str, float]:
|
||||
@ -697,6 +951,47 @@ class EnhancedTradingOrchestrator:
|
||||
|
||||
return np.array(state_components, dtype=np.float32)
|
||||
|
||||
def process_realtime_features(self, feature_dict: Dict[str, Any]):
|
||||
"""Process real-time tick features from the tick processor"""
|
||||
try:
|
||||
symbol = feature_dict['symbol']
|
||||
|
||||
# Store the features
|
||||
if symbol in self.realtime_tick_features:
|
||||
self.realtime_tick_features[symbol].append(feature_dict)
|
||||
|
||||
# Log high-confidence features
|
||||
if feature_dict['confidence'] > 0.8:
|
||||
logger.info(f"High-confidence tick features for {symbol}: confidence={feature_dict['confidence']:.3f}")
|
||||
|
||||
# Trigger immediate decision if we have very high confidence features
|
||||
if feature_dict['confidence'] > 0.9:
|
||||
logger.info(f"Ultra-high confidence tick signal for {symbol} - triggering immediate analysis")
|
||||
# Could trigger immediate decision making here
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing real-time features: {e}")
|
||||
|
||||
async def start_realtime_processing(self):
|
||||
"""Start real-time tick processing"""
|
||||
try:
|
||||
await self.tick_processor.start_processing()
|
||||
logger.info("Real-time tick processing started")
|
||||
except Exception as e:
|
||||
logger.error(f"Error starting real-time tick processing: {e}")
|
||||
|
||||
async def stop_realtime_processing(self):
|
||||
"""Stop real-time tick processing"""
|
||||
try:
|
||||
await self.tick_processor.stop_processing()
|
||||
logger.info("Real-time tick processing stopped")
|
||||
except Exception as e:
|
||||
logger.error(f"Error stopping real-time tick processing: {e}")
|
||||
|
||||
def get_realtime_tick_stats(self) -> Dict[str, Any]:
|
||||
"""Get real-time tick processing statistics"""
|
||||
return self.tick_processor.get_processing_stats()
|
||||
|
||||
def get_performance_metrics(self) -> Dict[str, Any]:
|
||||
"""Get performance metrics for dashboard compatibility"""
|
||||
total_actions = sum(len(actions) for actions in self.recent_actions.values())
|
||||
@ -706,6 +1001,9 @@ class EnhancedTradingOrchestrator:
|
||||
win_rate = 0.78 # 78% win rate
|
||||
total_pnl = 247.85 # Strong positive P&L from 500x leverage
|
||||
|
||||
# Add tick processing stats
|
||||
tick_stats = self.get_realtime_tick_stats()
|
||||
|
||||
return {
|
||||
'total_actions': total_actions,
|
||||
'perfect_moves': perfect_moves_count,
|
||||
@ -716,5 +1014,57 @@ class EnhancedTradingOrchestrator:
|
||||
'confidence_threshold': self.confidence_threshold,
|
||||
'decision_frequency': self.decision_frequency,
|
||||
'leverage': '500x', # Ultra-fast scalping
|
||||
'primary_timeframe': '1s' # Main scalping timeframe
|
||||
}
|
||||
'primary_timeframe': '1s', # Main scalping timeframe
|
||||
'tick_processing': tick_stats # Real-time tick processing stats
|
||||
}
|
||||
|
||||
def analyze_market_conditions(self, symbol: str) -> Dict[str, Any]:
|
||||
"""Analyze current market conditions for a given symbol"""
|
||||
try:
|
||||
# Get basic market data
|
||||
data = self.data_provider.get_historical_data(symbol, '1m', limit=50)
|
||||
|
||||
if data is None or data.empty:
|
||||
return {
|
||||
'status': 'no_data',
|
||||
'symbol': symbol,
|
||||
'analysis': 'No market data available'
|
||||
}
|
||||
|
||||
# Basic market analysis
|
||||
current_price = data['close'].iloc[-1]
|
||||
price_change = (current_price - data['close'].iloc[-2]) / data['close'].iloc[-2] * 100
|
||||
|
||||
# Volatility calculation
|
||||
volatility = data['close'].pct_change().std() * 100
|
||||
|
||||
# Volume analysis
|
||||
avg_volume = data['volume'].mean()
|
||||
current_volume = data['volume'].iloc[-1]
|
||||
volume_ratio = current_volume / avg_volume if avg_volume > 0 else 1.0
|
||||
|
||||
# Trend analysis
|
||||
ma_short = data['close'].rolling(10).mean().iloc[-1]
|
||||
ma_long = data['close'].rolling(30).mean().iloc[-1]
|
||||
trend = 'bullish' if ma_short > ma_long else 'bearish'
|
||||
|
||||
return {
|
||||
'status': 'success',
|
||||
'symbol': symbol,
|
||||
'current_price': current_price,
|
||||
'price_change': price_change,
|
||||
'volatility': volatility,
|
||||
'volume_ratio': volume_ratio,
|
||||
'trend': trend,
|
||||
'analysis': f"{symbol} is {trend} with {volatility:.2f}% volatility",
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error analyzing market conditions for {symbol}: {e}")
|
||||
return {
|
||||
'status': 'error',
|
||||
'symbol': symbol,
|
||||
'error': str(e),
|
||||
'analysis': f'Error analyzing {symbol}'
|
||||
}
|
Reference in New Issue
Block a user