listen to all IPs

This commit is contained in:
Dobromir Popov
2025-12-08 21:36:07 +02:00
parent 81a7f27d2d
commit 1ab1c02889
3 changed files with 82 additions and 8 deletions

View File

@@ -3207,6 +3207,12 @@ class RealTrainingAdapter:
if 'trend_vector' in outputs:
result_dict['trend_vector'] = outputs['trend_vector']
# DEBUG: Log if we have predicted candles
if predicted_candles_denorm:
logger.info(f"🔮 Generated prediction with {len(predicted_candles_denorm)} timeframe candles: {list(predicted_candles_denorm.keys())}")
else:
logger.warning("⚠️ No predicted candles in model output!")
return result_dict
return None
@@ -3993,6 +3999,7 @@ class RealTrainingAdapter:
predicted_candle_clean[tf] = candle_data
prediction_data['predicted_candle'] = predicted_candle_clean
logger.info(f"📊 Storing prediction with ghost candles for {len(predicted_candle_clean)} timeframes: {list(predicted_candle_clean.keys())}")
# Use actual predicted price from candle close (ensure it's a Python float)
predicted_price_val = None
@@ -4011,6 +4018,7 @@ class RealTrainingAdapter:
prediction_data['price_change'] = 1.0 if prediction['action'] == 'BUY' else -1.0
else:
# Fallback to estimated price if no candle prediction
logger.warning(f"!!! No predicted_candle in prediction object - ghost candles will not appear!")
prediction_data['predicted_price'] = prediction.get('predicted_price', current_price * (1.01 if prediction['action'] == 'BUY' else 0.99))
prediction_data['price_change'] = 1.0 if prediction['action'] == 'BUY' else -1.0

View File

@@ -1384,6 +1384,61 @@ class AnnotationDashboard:
'error': {'code': 'RECALC_ERROR', 'message': str(e)}
})
@self.server.route('/api/chart-data', methods=['GET'])
def get_chart_data_get():
"""GET endpoint for chart data (used by initial chart load)"""
try:
symbol = request.args.get('symbol', 'ETH/USDT')
timeframe = request.args.get('timeframe', '1m')
limit = int(request.args.get('limit', 2500))
webui_logger.info(f"Chart data GET request: {symbol} {timeframe} limit={limit}")
if not self.data_loader:
return jsonify({
'success': False,
'error': {'code': 'DATA_LOADER_UNAVAILABLE', 'message': 'Data loader not available'}
})
# Fetch data using data loader
df = self.data_loader.get_data(
symbol=symbol,
timeframe=timeframe,
limit=limit,
direction='latest'
)
if df is not None and not df.empty:
webui_logger.info(f" {timeframe}: {len(df)} candles")
# Get pivot points
pivot_markers = {}
if len(df) >= 50:
pivot_markers = self._get_pivot_markers_for_timeframe(symbol, timeframe, df)
chart_data = {
timeframe: {
'timestamps': df.index.strftime('%Y-%m-%d %H:%M:%S').tolist(),
'open': df['open'].tolist(),
'high': df['high'].tolist(),
'low': df['low'].tolist(),
'close': df['close'].tolist(),
'volume': df['volume'].tolist(),
'pivot_markers': pivot_markers
}
}
return jsonify({'success': True, 'data': chart_data})
else:
return jsonify({
'success': False,
'error': {'code': 'NO_DATA', 'message': f'No data available for {symbol} {timeframe}'}
})
except Exception as e:
webui_logger.error(f"Error in chart-data GET: {e}")
return jsonify({'success': False, 'error': {'code': 'ERROR', 'message': str(e)}})
@self.server.route('/api/chart-data', methods=['POST'])
def get_chart_data():
"""Get chart data for specified symbol and timeframes with infinite scroll support"""
@@ -2405,13 +2460,17 @@ class AnnotationDashboard:
}
})
@self.server.route('/api/live-updates', methods=['POST'])
@self.server.route('/api/live-updates', methods=['GET', 'POST'])
def get_live_updates():
"""Get live chart and prediction updates (polling endpoint)"""
try:
data = request.get_json()
symbol = data.get('symbol', 'ETH/USDT')
timeframe = data.get('timeframe', '1m')
# Support both GET and POST
if request.method == 'POST':
data = request.get_json() or {}
else:
data = {}
symbol = data.get('symbol', request.args.get('symbol', 'ETH/USDT'))
timeframe = data.get('timeframe', request.args.get('timeframe', '1m'))
response = {
'success': True,
@@ -3243,9 +3302,11 @@ class AnnotationDashboard:
except Exception as e:
logger.debug(f"Error clearing old cached predictions: {e}")
def run(self, host='127.0.0.1', port=8051, debug=False):
"""Run the application"""
def run(self, host='0.0.0.0', port=8051, debug=False):
"""Run the application - binds to all interfaces by default"""
logger.info(f"Starting Annotation Dashboard on http://{host}:{port}")
logger.info(f"Access locally at: http://localhost:{port}")
logger.info(f"Access from network at: http://<your-ip>:{port}")
if self.has_socketio:
logger.info("Running with WebSocket support (SocketIO)")

View File

@@ -599,10 +599,11 @@ class AdvancedTradingTransformer(nn.Module):
batched_tfs = stacked_tfs.reshape(batch_size * num_tfs, seq_len, self.config.d_model)
# Apply single cross-timeframe attention layer
batched_tfs = self.cross_timeframe_layer(batched_tfs)
# Use new variable to avoid inplace modification issues
cross_tf_encoded = self.cross_timeframe_layer(batched_tfs)
# Reshape back: [batch*num_tfs, seq_len, d_model] -> [batch, num_tfs, seq_len, d_model]
cross_tf_output = batched_tfs.reshape(batch_size, num_tfs, seq_len, self.config.d_model)
cross_tf_output = cross_tf_encoded.reshape(batch_size, num_tfs, seq_len, self.config.d_model)
# Average across timeframes to get unified representation
# [batch, num_tfs, seq_len, d_model] -> [batch, seq_len, d_model]
@@ -1347,6 +1348,10 @@ class TradingTransformerTrainer:
if param.grad is not None:
param.grad = None
# Clear CUDA cache to prevent tensor version conflicts
if torch.cuda.is_available():
torch.cuda.empty_cache()
# OPTIMIZATION: Only move batch to device if not already there
# Check if first tensor is already on correct device
needs_transfer = False