t perd viz - wip

This commit is contained in:
Dobromir Popov
2025-11-19 18:46:09 +02:00
parent df5f9b47f2
commit feb6cec275
8 changed files with 919 additions and 9 deletions

View File

@@ -107,7 +107,7 @@ class BacktestRunner:
self.lock = threading.Lock()
def start_backtest(self, backtest_id: str, model, data_provider, symbol: str, timeframe: str,
start_time: Optional[str] = None, end_time: Optional[str] = None):
orchestrator=None, start_time: Optional[str] = None, end_time: Optional[str] = None):
"""Start backtest in background thread"""
# Initialize backtest state
@@ -122,22 +122,34 @@ class BacktestRunner:
'new_predictions': [],
'position': None, # {'type': 'long/short', 'entry_price': float, 'entry_time': str}
'error': None,
'stop_requested': False
'stop_requested': False,
'orchestrator': orchestrator,
'symbol': symbol
}
# Clear previous predictions from orchestrator
if orchestrator and hasattr(orchestrator, 'recent_transformer_predictions'):
if symbol in orchestrator.recent_transformer_predictions:
orchestrator.recent_transformer_predictions[symbol].clear()
if symbol in orchestrator.recent_cnn_predictions:
orchestrator.recent_cnn_predictions[symbol].clear()
if symbol in orchestrator.recent_dqn_predictions:
orchestrator.recent_dqn_predictions[symbol].clear()
logger.info(f"Cleared previous predictions for backtest on {symbol}")
with self.lock:
self.active_backtests[backtest_id] = state
# Run backtest in background thread
thread = threading.Thread(
target=self._run_backtest,
args=(backtest_id, model, data_provider, symbol, timeframe, start_time, end_time)
args=(backtest_id, model, data_provider, symbol, timeframe, orchestrator, start_time, end_time)
)
thread.daemon = True
thread.start()
def _run_backtest(self, backtest_id: str, model, data_provider, symbol: str, timeframe: str,
start_time: Optional[str] = None, end_time: Optional[str] = None):
orchestrator=None, start_time: Optional[str] = None, end_time: Optional[str] = None):
"""Execute backtest candle-by-candle"""
try:
state = self.active_backtests[backtest_id]
@@ -203,10 +215,51 @@ class BacktestRunner:
'price': current_price,
'action': prediction['action'],
'confidence': prediction['confidence'],
'timeframe': timeframe
'timeframe': timeframe,
'current_price': current_price
}
state['new_predictions'].append(pred_data)
# Store in orchestrator for visualization
if orchestrator and hasattr(orchestrator, 'store_transformer_prediction'):
# Determine model type from model class name
model_type = model.__class__.__name__.lower()
# Store in appropriate prediction collection
if 'transformer' in model_type:
orchestrator.store_transformer_prediction(symbol, {
'timestamp': current_time,
'current_price': current_price,
'predicted_price': current_price * (1.01 if prediction['action'] == 'BUY' else 0.99),
'price_change': 1.0 if prediction['action'] == 'BUY' else -1.0,
'confidence': prediction['confidence'],
'action': prediction['action'],
'horizon_minutes': 10
})
elif 'cnn' in model_type:
if hasattr(orchestrator, 'recent_cnn_predictions'):
if symbol not in orchestrator.recent_cnn_predictions:
from collections import deque
orchestrator.recent_cnn_predictions[symbol] = deque(maxlen=50)
orchestrator.recent_cnn_predictions[symbol].append({
'timestamp': current_time,
'current_price': current_price,
'predicted_price': current_price * (1.01 if prediction['action'] == 'BUY' else 0.99),
'confidence': prediction['confidence'],
'direction': 2 if prediction['action'] == 'BUY' else 0
})
elif 'dqn' in model_type or 'rl' in model_type:
if hasattr(orchestrator, 'recent_dqn_predictions'):
if symbol not in orchestrator.recent_dqn_predictions:
from collections import deque
orchestrator.recent_dqn_predictions[symbol] = deque(maxlen=100)
orchestrator.recent_dqn_predictions[symbol].append({
'timestamp': current_time,
'current_price': current_price,
'action': prediction['action'],
'confidence': prediction['confidence']
})
# Execute trade logic
self._execute_trade_logic(state, prediction, current_price, current_time)
@@ -1678,6 +1731,7 @@ class AnnotationDashboard:
data_provider=self.data_provider,
symbol=symbol,
timeframe=timeframe,
orchestrator=self.orchestrator,
start_time=start_time,
end_time=end_time
)
@@ -2024,6 +2078,81 @@ class AnnotationDashboard:
}
})
@self.server.route('/api/live-updates', methods=['POST'])
def get_live_updates():
"""Get live chart and prediction updates (polling endpoint)"""
try:
data = request.get_json()
symbol = data.get('symbol', 'ETH/USDT')
timeframe = data.get('timeframe', '1m')
response = {
'success': True,
'chart_update': None,
'prediction': None
}
# Get latest candle for the requested timeframe
if self.orchestrator and self.orchestrator.data_provider:
try:
# Get latest candle
ohlcv_data = self.orchestrator.data_provider.get_ohlcv_data(symbol, timeframe, limit=1)
if ohlcv_data and len(ohlcv_data) > 0:
latest_candle = ohlcv_data[-1]
response['chart_update'] = {
'symbol': symbol,
'timeframe': timeframe,
'candle': {
'timestamp': latest_candle[0],
'open': float(latest_candle[1]),
'high': float(latest_candle[2]),
'low': float(latest_candle[3]),
'close': float(latest_candle[4]),
'volume': float(latest_candle[5])
}
}
except Exception as e:
logger.debug(f"Error getting latest candle: {e}")
# Get latest model predictions
if self.orchestrator:
try:
# Get latest predictions from orchestrator
predictions = {}
# DQN predictions
if hasattr(self.orchestrator, 'recent_dqn_predictions') and symbol in self.orchestrator.recent_dqn_predictions:
dqn_preds = list(self.orchestrator.recent_dqn_predictions[symbol])
if dqn_preds:
predictions['dqn'] = dqn_preds[-1]
# CNN predictions
if hasattr(self.orchestrator, 'recent_cnn_predictions') and symbol in self.orchestrator.recent_cnn_predictions:
cnn_preds = list(self.orchestrator.recent_cnn_predictions[symbol])
if cnn_preds:
predictions['cnn'] = cnn_preds[-1]
# Transformer predictions
if hasattr(self.orchestrator, 'recent_transformer_predictions') and symbol in self.orchestrator.recent_transformer_predictions:
transformer_preds = list(self.orchestrator.recent_transformer_predictions[symbol])
if transformer_preds:
predictions['transformer'] = transformer_preds[-1]
if predictions:
response['prediction'] = predictions
except Exception as e:
logger.debug(f"Error getting predictions: {e}")
return jsonify(response)
except Exception as e:
logger.error(f"Error in live updates: {e}")
return jsonify({
'success': False,
'error': str(e)
})
@self.server.route('/api/realtime-inference/signals', methods=['GET'])
def get_realtime_signals():
"""Get latest real-time inference signals"""