more acc stats and predictions storage. chart optimisation

This commit is contained in:
Dobromir Popov
2025-12-10 11:02:36 +02:00
parent 2fea288f62
commit 1d49269301
4 changed files with 229 additions and 12 deletions

View File

@@ -2843,6 +2843,11 @@ class AnnotationDashboard:
metrics['loss'] = rt_metrics.get('last_loss', 0.0)
metrics['accuracy'] = rt_metrics.get('last_accuracy', 0.0)
metrics['steps'] = rt_metrics.get('total_steps', 0)
# Add best checkpoint metrics
metrics['best_loss'] = rt_metrics.get('best_loss', float('inf'))
metrics['best_accuracy'] = rt_metrics.get('best_accuracy', 0.0)
if metrics['best_loss'] == float('inf'):
metrics['best_loss'] = None
# Get incremental training metrics
if hasattr(self, '_incremental_training_steps'):
@@ -2877,6 +2882,8 @@ class AnnotationDashboard:
'error': str(e)
}), 500
@self.server.route('/api/realtime-inference/train-manual', methods=['POST'])
def train_manual():
"""Manually trigger training on current candle with specified action"""

View File

@@ -15,12 +15,17 @@ class ChartManager {
this.lastPredictionUpdate = {}; // Track last prediction update per timeframe
this.predictionUpdateThrottle = 500; // Min ms between prediction updates
this.lastPredictionHash = null; // Track if predictions actually changed
this.ghostCandleHistory = {}; // Store ghost candles per timeframe (max 50 each)
this.ghostCandleHistory = {}; // Store ghost candles per timeframe (max 150 each)
this.maxGhostCandles = 150; // Maximum number of ghost candles to keep
this.modelAccuracyMetrics = {}; // Track overall model accuracy per timeframe
this.predictionHistory = []; // Store last 20 predictions with fading
this.maxPredictions = 20; // Maximum number of predictions to display
// PERFORMANCE: Debounced updates and batching
this.pendingUpdates = {};
this.updateDebounceMs = 100; // 100ms debounce for chart updates
this.batchSize = 10; // Max traces to add in one batch
// Prediction display toggles (all enabled by default)
this.displayToggles = {
ghostCandles: true,
@@ -182,6 +187,14 @@ class ChartManager {
}, 1000);
}
// PERFORMANCE: Periodic cleanup every 30 seconds
this.cleanupTimer = setInterval(() => {
this._performPeriodicCleanup();
}, 30000); // 30 seconds
// PERFORMANCE: Optimize Plotly rendering
this._optimizePlotlyConfig();
console.log('Auto-update enabled for:', Object.keys(this.updateTimers));
}
@@ -199,9 +212,135 @@ class ChartManager {
Object.values(this.updateTimers).forEach(timer => clearInterval(timer));
this.updateTimers = {};
// Clear cleanup timer
if (this.cleanupTimer) {
clearInterval(this.cleanupTimer);
this.cleanupTimer = null;
}
console.log('Auto-update stopped');
}
/**
* Periodic cleanup to prevent memory bloat and chart lag
*/
_performPeriodicCleanup() {
console.log('[Cleanup] Starting periodic cleanup...');
// Clean up ghost candles
Object.keys(this.ghostCandleHistory).forEach(timeframe => {
if (this.ghostCandleHistory[timeframe]) {
const before = this.ghostCandleHistory[timeframe].length;
this.ghostCandleHistory[timeframe] = this.ghostCandleHistory[timeframe].slice(-this.maxGhostCandles);
const after = this.ghostCandleHistory[timeframe].length;
if (before > after) {
console.log(`[Cleanup] ${timeframe}: Removed ${before - after} old ghost candles`);
}
}
});
// Clean up prediction history
if (this.predictionHistory.length > this.maxPredictions) {
const before = this.predictionHistory.length;
this.predictionHistory = this.predictionHistory.slice(-this.maxPredictions);
console.log(`[Cleanup] Removed ${before - this.predictionHistory.length} old predictions`);
}
// Clean up chart traces (remove old prediction traces)
Object.keys(this.charts).forEach(timeframe => {
const chart = this.charts[timeframe];
if (chart && chart.element) {
const plotElement = document.getElementById(chart.plotId);
if (plotElement && plotElement.data) {
const traces = plotElement.data;
// Keep only first 2 traces (candlestick + volume) and last 10 prediction traces
if (traces.length > 12) {
const keepTraces = traces.slice(0, 2).concat(traces.slice(-10));
const removed = traces.length - keepTraces.length;
if (removed > 0) {
console.log(`[Cleanup] ${timeframe}: Removed ${removed} old chart traces`);
Plotly.react(chart.plotId, keepTraces, plotElement.layout, plotElement.config);
}
}
}
}
});
console.log('[Cleanup] Periodic cleanup completed');
}
/**
* PERFORMANCE: Debounced chart update to prevent excessive redraws
*/
_debouncedChartUpdate(timeframe, updateFn) {
// Clear existing timeout for this timeframe
if (this.pendingUpdates[timeframe]) {
clearTimeout(this.pendingUpdates[timeframe]);
}
// Set new timeout
this.pendingUpdates[timeframe] = setTimeout(() => {
updateFn();
delete this.pendingUpdates[timeframe];
}, this.updateDebounceMs);
}
/**
* PERFORMANCE: Batch trace operations to reduce Plotly calls
*/
_batchAddTraces(plotId, traces) {
if (traces.length === 0) return;
// Add traces in batches to prevent UI blocking
const batches = [];
for (let i = 0; i < traces.length; i += this.batchSize) {
batches.push(traces.slice(i, i + this.batchSize));
}
// Add batches with small delays to keep UI responsive
batches.forEach((batch, index) => {
setTimeout(() => {
Plotly.addTraces(plotId, batch);
}, index * 10); // 10ms delay between batches
});
}
/**
* PERFORMANCE: Optimize Plotly configuration for better performance
*/
_optimizePlotlyConfig() {
// Set global Plotly config for better performance
if (typeof Plotly !== 'undefined') {
Plotly.setPlotConfig({
// Reduce animation for better performance
plotGlPixelRatio: 1,
// Use faster rendering
staticPlot: false,
// Optimize for frequent updates
responsive: true
});
}
}
/**
* PERFORMANCE: Check if element is visible in viewport
*/
_isElementVisible(element) {
if (!element) return false;
const rect = element.getBoundingClientRect();
const windowHeight = window.innerHeight || document.documentElement.clientHeight;
const windowWidth = window.innerWidth || document.documentElement.clientWidth;
// Element is visible if any part is in viewport
return (
rect.bottom > 0 &&
rect.right > 0 &&
rect.top < windowHeight &&
rect.left < windowWidth
);
}
/**
* Update a single chart with fresh data
*/
@@ -359,12 +498,12 @@ class ChartManager {
});
}
// CRITICAL: Preserve all historical candles - never truncate below 2500
// Only keep last 2500 candles if we exceed that limit (to prevent memory issues)
const maxCandles = 2500;
// PERFORMANCE: Limit to 1200 candles for responsive UI
// Keep only last 1200 candles to prevent memory issues and chart lag
const maxCandles = 1200;
if (chart.data.timestamps.length > maxCandles) {
const excess = chart.data.timestamps.length - maxCandles;
console.log(`[${timeframe}] Truncating ${excess} old candles (keeping last ${maxCandles})`);
console.log(`[${timeframe}] Truncating ${excess} old candles (keeping last ${maxCandles} for performance)`);
chart.data.timestamps = chart.data.timestamps.slice(-maxCandles);
chart.data.open = chart.data.open.slice(-maxCandles);
chart.data.high = chart.data.high.slice(-maxCandles);
@@ -2759,9 +2898,9 @@ class ChartManager {
Plotly.deleteTraces(plotId, indicesToRemove);
}
// Add updated traces
// PERFORMANCE: Use batched trace addition
if (predictionTraces.length > 0) {
Plotly.addTraces(plotId, predictionTraces);
this._batchAddTraces(plotId, predictionTraces);
console.log(`[${timeframe}] Refreshed ${predictionTraces.length} prediction candles with updated accuracy`);
}
}
@@ -2892,8 +3031,10 @@ class ChartManager {
console.log(`[updatePredictions] Drawing predictions on primary timeframe: ${primaryTimeframe}`);
// Update only the primary timeframe
// PERFORMANCE: Use debounced update to prevent excessive redraws
this._debouncedChartUpdate(primaryTimeframe, () => {
this._updatePredictionsForTimeframe(primaryTimeframe, predictions);
});
} catch (error) {
console.error('[updatePredictions] Error:', error);
@@ -2913,6 +3054,13 @@ class ChartManager {
return;
}
// PERFORMANCE: Only update visible charts
const plotElement = document.getElementById(chart.plotId);
if (!plotElement || !this._isElementVisible(plotElement)) {
console.debug(`[updatePredictions] Chart ${timeframe} not visible, skipping update`);
return;
}
// Throttle prediction updates to avoid flickering
const now = Date.now();
const lastUpdate = this.lastPredictionUpdate[timeframe] || 0;
@@ -3175,9 +3323,10 @@ class ChartManager {
return;
}
// Add new traces - these will overlay on top of real candles
// Plotly renders traces in order, so predictions added last appear on top
Plotly.addTraces(plotId, predictionTraces);
// PERFORMANCE: Use batched trace addition for better performance
if (predictionTraces.length > 0) {
this._batchAddTraces(plotId, predictionTraces);
}
// Ensure predictions are visible above real candles by setting z-order
// Update layout to ensure prediction traces are on top

View File

@@ -159,6 +159,11 @@
<div>Last Training: <span id="last-training-time">--</span></div>
</div>
</div>
<div class="mt-1 pt-1 border-top" style="font-size: 0.7rem;">
<div class="text-muted">Best Checkpoint:</div>
<div>Best Loss: <span id="best-checkpoint-loss" class="fw-bold text-warning">--</span></div>
<div>Best Accuracy: <span id="best-checkpoint-accuracy" class="fw-bold text-success">--</span></div>
</div>
</div>
</div>
@@ -1507,6 +1512,18 @@
}
}
// Update best checkpoint metrics
const bestLossEl = document.getElementById('best-checkpoint-loss');
if (bestLossEl && metrics.best_loss !== undefined && metrics.best_loss !== null) {
bestLossEl.textContent = metrics.best_loss.toFixed(4);
}
const bestAccuracyEl = document.getElementById('best-checkpoint-accuracy');
if (bestAccuracyEl && metrics.best_accuracy !== undefined && metrics.best_accuracy !== null) {
const accuracyPct = (metrics.best_accuracy * 100).toFixed(1);
bestAccuracyEl.textContent = accuracyPct + '%';
}
console.log(`[Online Learning] ${metrics.incremental_steps} incremental training steps completed`);
}
}

View File

@@ -2904,10 +2904,54 @@ class TradingOrchestrator:
prediction['timestamp'] = datetime.now()
self.recent_transformer_predictions[symbol].append(prediction)
# EFFICIENT: Store prediction in database at source (before sending to UI)
self._store_prediction_in_database(symbol, prediction, 'transformer')
logger.debug(f"Stored transformer prediction for {symbol}: {prediction.get('action', 'N/A')}")
except Exception as e:
logger.error(f"Error storing transformer prediction: {e}")
def _store_prediction_in_database(self, symbol: str, prediction: Dict, model_type: str):
"""Store prediction in database for later retrieval and training"""
try:
# Extract data from prediction
timestamp = prediction.get('timestamp')
if isinstance(timestamp, datetime):
timestamp_str = timestamp.isoformat()
else:
timestamp_str = str(timestamp)
action = prediction.get('action', 'HOLD')
confidence = prediction.get('confidence', 0.0)
predicted_candle = prediction.get('predicted_candle', {})
predicted_price = prediction.get('predicted_price')
primary_timeframe = prediction.get('primary_timeframe', '1m')
# Store in database if available
if hasattr(self, 'database_manager') and self.database_manager:
try:
prediction_id = self.database_manager.store_prediction(
symbol=symbol,
timeframe=primary_timeframe,
timestamp=timestamp_str,
prediction_type=model_type,
action=action,
confidence=confidence,
predicted_candle=predicted_candle,
predicted_price=predicted_price
)
logger.debug(f"Stored {model_type} prediction in database: {prediction_id}")
except Exception as db_error:
# Fallback: log prediction if database fails
logger.info(f"[PREDICTION DB] {symbol} {primary_timeframe} {model_type} {action} {confidence:.2f} @ {timestamp_str}")
else:
# Fallback: log prediction if no database manager
logger.info(f"[PREDICTION LOG] {symbol} {primary_timeframe} {model_type} {action} {confidence:.2f} @ {timestamp_str}")
except Exception as e:
logger.debug(f"Error storing prediction in database: {e}") # Debug level to avoid spam
def clear_predictions(self, symbol: str):
"""Clear all stored predictions for a symbol (useful for backtests)"""
try: