T predictions WIP
This commit is contained in:
@@ -65,8 +65,10 @@ class LivePivotTrainer:
|
|||||||
# Williams Market Structure for pivot detection
|
# Williams Market Structure for pivot detection
|
||||||
try:
|
try:
|
||||||
from core.williams_market_structure import WilliamsMarketStructure
|
from core.williams_market_structure import WilliamsMarketStructure
|
||||||
self.williams_1s = WilliamsMarketStructure(num_levels=5)
|
# Fix: WilliamsMarketStructure.__init__ does not accept num_levels
|
||||||
self.williams_1m = WilliamsMarketStructure(num_levels=5)
|
# It defaults to 5 levels internally
|
||||||
|
self.williams_1s = WilliamsMarketStructure()
|
||||||
|
self.williams_1m = WilliamsMarketStructure()
|
||||||
logger.info("Williams Market Structure initialized for pivot detection")
|
logger.info("Williams Market Structure initialized for pivot detection")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to initialize Williams Market Structure: {e}")
|
logger.error(f"Failed to initialize Williams Market Structure: {e}")
|
||||||
|
|||||||
@@ -1336,13 +1336,16 @@ class RealTrainingAdapter:
|
|||||||
if result_1s:
|
if result_1s:
|
||||||
price_data_1s, norm_params_dict['1s'] = result_1s
|
price_data_1s, norm_params_dict['1s'] = result_1s
|
||||||
else:
|
else:
|
||||||
|
# Don't fail on missing 1s data, it's often unavailable in annotations
|
||||||
price_data_1s = None
|
price_data_1s = None
|
||||||
|
|
||||||
result_1m = self._extract_timeframe_data(timeframes.get('1m', {}), target_seq_len) if '1m' in timeframes else None
|
result_1m = self._extract_timeframe_data(timeframes.get('1m', {}), target_seq_len) if '1m' in timeframes else None
|
||||||
if result_1m:
|
if result_1m:
|
||||||
price_data_1m, norm_params_dict['1m'] = result_1m
|
price_data_1m, norm_params_dict['1m'] = result_1m
|
||||||
else:
|
else:
|
||||||
price_data_1m = None
|
# Warning: 1m data is critical
|
||||||
|
logger.warning(f"Missing 1m data for transformer batch (sample: {training_sample.get('test_case_id')})")
|
||||||
|
return None
|
||||||
|
|
||||||
result_1h = self._extract_timeframe_data(timeframes.get('1h', {}), target_seq_len) if '1h' in timeframes else None
|
result_1h = self._extract_timeframe_data(timeframes.get('1h', {}), target_seq_len) if '1h' in timeframes else None
|
||||||
if result_1h:
|
if result_1h:
|
||||||
@@ -1558,6 +1561,12 @@ class RealTrainingAdapter:
|
|||||||
# Model predicts price change ratio, not absolute price
|
# Model predicts price change ratio, not absolute price
|
||||||
exit_price = training_sample.get('exit_price')
|
exit_price = training_sample.get('exit_price')
|
||||||
|
|
||||||
|
# Handle 'expected_outcome' nesting from LivePivotTrainer
|
||||||
|
if exit_price is None:
|
||||||
|
expected_outcome = training_sample.get('expected_outcome', {})
|
||||||
|
if isinstance(expected_outcome, dict):
|
||||||
|
exit_price = expected_outcome.get('exit_price')
|
||||||
|
|
||||||
if exit_price and current_price > 0:
|
if exit_price and current_price > 0:
|
||||||
# Normalize: (exit_price - current_price) / current_price
|
# Normalize: (exit_price - current_price) / current_price
|
||||||
# This gives the expected price change as a ratio
|
# This gives the expected price change as a ratio
|
||||||
@@ -2547,6 +2556,7 @@ class RealTrainingAdapter:
|
|||||||
if session['last_candle_time'] == latest_candle_time:
|
if session['last_candle_time'] == latest_candle_time:
|
||||||
return # Same candle, no training needed
|
return # Same candle, no training needed
|
||||||
|
|
||||||
|
logger.debug(f"New candle detected: {latest_candle_time} (last: {session['last_candle_time']})")
|
||||||
session['last_candle_time'] = latest_candle_time
|
session['last_candle_time'] = latest_candle_time
|
||||||
|
|
||||||
# Get the completed candle (second to last)
|
# Get the completed candle (second to last)
|
||||||
@@ -2613,6 +2623,7 @@ class RealTrainingAdapter:
|
|||||||
# Convert to batch format
|
# Convert to batch format
|
||||||
batch = self._convert_annotation_to_transformer_batch(training_sample)
|
batch = self._convert_annotation_to_transformer_batch(training_sample)
|
||||||
if not batch:
|
if not batch:
|
||||||
|
logger.warning(f"Per-candle training failed: Could not convert sample to batch")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Train on this batch
|
# Train on this batch
|
||||||
|
|||||||
@@ -1916,8 +1916,14 @@ class AnnotationDashboard:
|
|||||||
def get_available_models():
|
def get_available_models():
|
||||||
"""Get list of available models with their load status"""
|
"""Get list of available models with their load status"""
|
||||||
try:
|
try:
|
||||||
# Use self.available_models which is a simple list of strings
|
# Ensure self.available_models is a list
|
||||||
# Don't call training_adapter.get_available_models() as it may return objects
|
if not isinstance(self.available_models, list):
|
||||||
|
logger.warning(f"self.available_models is not a list: {type(self.available_models)}. Resetting to default.")
|
||||||
|
self.available_models = ['Transformer', 'COB_RL', 'CNN', 'DQN']
|
||||||
|
|
||||||
|
# Ensure self.loaded_models is a list/set
|
||||||
|
if not hasattr(self, 'loaded_models'):
|
||||||
|
self.loaded_models = []
|
||||||
|
|
||||||
# Build model state dict with checkpoint info
|
# Build model state dict with checkpoint info
|
||||||
logger.info(f"Building model states for {len(self.available_models)} models: {self.available_models}")
|
logger.info(f"Building model states for {len(self.available_models)} models: {self.available_models}")
|
||||||
@@ -1973,12 +1979,16 @@ class AnnotationDashboard:
|
|||||||
logger.error(f"Error getting available models: {e}")
|
logger.error(f"Error getting available models: {e}")
|
||||||
import traceback
|
import traceback
|
||||||
logger.error(f"Traceback: {traceback.format_exc()}")
|
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||||
|
# Return a fallback list so the UI doesn't hang
|
||||||
return jsonify({
|
return jsonify({
|
||||||
'success': False,
|
'success': True,
|
||||||
'error': {
|
'models': [
|
||||||
'code': 'MODEL_LIST_ERROR',
|
{'name': 'Transformer', 'loaded': False, 'can_train': False, 'can_infer': False},
|
||||||
'message': str(e)
|
{'name': 'COB_RL', 'loaded': False, 'can_train': False, 'can_infer': False}
|
||||||
}
|
],
|
||||||
|
'loaded_count': 0,
|
||||||
|
'available_count': 2,
|
||||||
|
'error': str(e)
|
||||||
})
|
})
|
||||||
|
|
||||||
@self.server.route('/api/load-model', methods=['POST'])
|
@self.server.route('/api/load-model', methods=['POST'])
|
||||||
|
|||||||
@@ -160,22 +160,42 @@ class ChartManager {
|
|||||||
marker: { color: [[volumeColor]] }
|
marker: { color: [[volumeColor]] }
|
||||||
}, [1]);
|
}, [1]);
|
||||||
} else {
|
} else {
|
||||||
// Update last candle using restyle
|
// Update last candle using restyle - simpler approach for updating single point
|
||||||
const lastIndex = candlestickTrace.x.length - 1;
|
// We need to get the full arrays, modify last element, and send back
|
||||||
|
// This is less efficient but more reliable for updates than complex index logic
|
||||||
|
|
||||||
|
const x = candlestickTrace.x;
|
||||||
|
const open = candlestickTrace.open;
|
||||||
|
const high = candlestickTrace.high;
|
||||||
|
const low = candlestickTrace.low;
|
||||||
|
const close = candlestickTrace.close;
|
||||||
|
const volume = volumeTrace.y;
|
||||||
|
const colors = volumeTrace.marker.color;
|
||||||
|
|
||||||
|
const lastIdx = x.length - 1;
|
||||||
|
|
||||||
|
// Update local arrays
|
||||||
|
x[lastIdx] = candleTimestamp;
|
||||||
|
open[lastIdx] = candle.open;
|
||||||
|
high[lastIdx] = candle.high;
|
||||||
|
low[lastIdx] = candle.low;
|
||||||
|
close[lastIdx] = candle.close;
|
||||||
|
volume[lastIdx] = candle.volume;
|
||||||
|
colors[lastIdx] = candle.close >= candle.open ? '#10b981' : '#ef4444';
|
||||||
|
|
||||||
|
// Push updates to Plotly
|
||||||
Plotly.restyle(plotId, {
|
Plotly.restyle(plotId, {
|
||||||
'x': [[...candlestickTrace.x.slice(0, lastIndex), candleTimestamp]],
|
x: [x],
|
||||||
'open': [[...candlestickTrace.open.slice(0, lastIndex), candle.open]],
|
open: [open],
|
||||||
'high': [[...candlestickTrace.high.slice(0, lastIndex), candle.high]],
|
high: [high],
|
||||||
'low': [[...candlestickTrace.low.slice(0, lastIndex), candle.low]],
|
low: [low],
|
||||||
'close': [[...candlestickTrace.close.slice(0, lastIndex), candle.close]]
|
close: [close]
|
||||||
}, [0]);
|
}, [0]);
|
||||||
|
|
||||||
// Update volume
|
|
||||||
const volumeColor = candle.close >= candle.open ? '#10b981' : '#ef4444';
|
|
||||||
Plotly.restyle(plotId, {
|
Plotly.restyle(plotId, {
|
||||||
'x': [[...volumeTrace.x.slice(0, lastIndex), candleTimestamp]],
|
x: [x],
|
||||||
'y': [[...volumeTrace.y.slice(0, lastIndex), candle.volume]],
|
y: [volume],
|
||||||
'marker.color': [[...volumeTrace.marker.color.slice(0, lastIndex), volumeColor]]
|
'marker.color': [colors]
|
||||||
}, [1]);
|
}, [1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -281,7 +281,14 @@
|
|||||||
.catch(error => {
|
.catch(error => {
|
||||||
console.error('❌ Error loading models:', error);
|
console.error('❌ Error loading models:', error);
|
||||||
const modelSelect = document.getElementById('model-select');
|
const modelSelect = document.getElementById('model-select');
|
||||||
modelSelect.innerHTML = '<option value="">Error loading models</option>';
|
modelSelect.innerHTML = '<option value="">Error loading models (Click to retry)</option>';
|
||||||
|
|
||||||
|
// Allow retry by clicking
|
||||||
|
modelSelect.addEventListener('click', function() {
|
||||||
|
if (modelSelect.value === "") {
|
||||||
|
loadAvailableModels();
|
||||||
|
}
|
||||||
|
}, { once: true });
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -834,17 +841,29 @@
|
|||||||
|
|
||||||
// Display last 5 predictions (most recent first)
|
// Display last 5 predictions (most recent first)
|
||||||
const html = predictionHistory.slice(0, 5).map(pred => {
|
const html = predictionHistory.slice(0, 5).map(pred => {
|
||||||
const time = new Date(pred.timestamp).toLocaleTimeString();
|
// Safely parse timestamp
|
||||||
|
let timeStr = '--:--:--';
|
||||||
|
try {
|
||||||
|
if (pred.timestamp) {
|
||||||
|
const date = new Date(pred.timestamp);
|
||||||
|
if (!isNaN(date.getTime())) {
|
||||||
|
timeStr = date.toLocaleTimeString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Error parsing timestamp:', e);
|
||||||
|
}
|
||||||
|
|
||||||
const actionColor = pred.action === 'BUY' ? 'text-success' :
|
const actionColor = pred.action === 'BUY' ? 'text-success' :
|
||||||
pred.action === 'SELL' ? 'text-danger' : 'text-secondary';
|
pred.action === 'SELL' ? 'text-danger' : 'text-secondary';
|
||||||
const confidence = (pred.confidence * 100).toFixed(1);
|
const confidence = (pred.confidence * 100).toFixed(1);
|
||||||
const price = pred.predicted_price ? pred.predicted_price.toFixed(2) : '--';
|
const price = (pred.predicted_price && !isNaN(pred.predicted_price)) ? pred.predicted_price.toFixed(2) : '--';
|
||||||
|
|
||||||
return `
|
return `
|
||||||
<div class="d-flex justify-content-between align-items-center mb-1 pb-1 border-bottom">
|
<div class="d-flex justify-content-between align-items-center mb-1 pb-1 border-bottom">
|
||||||
<div>
|
<div>
|
||||||
<span class="${actionColor} fw-bold">${pred.action}</span>
|
<span class="${actionColor} fw-bold">${pred.action}</span>
|
||||||
<span class="text-muted ms-1">${time}</span>
|
<span class="text-muted ms-1">${timeStr}</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="text-end">
|
<div class="text-end">
|
||||||
<div>${confidence}%</div>
|
<div>${confidence}%</div>
|
||||||
@@ -864,14 +883,46 @@
|
|||||||
.then(response => response.json())
|
.then(response => response.json())
|
||||||
.then(data => {
|
.then(data => {
|
||||||
if (data.success && data.signals.length > 0) {
|
if (data.success && data.signals.length > 0) {
|
||||||
|
// Update Accuracy Metrics in Header
|
||||||
|
if (data.metrics) {
|
||||||
|
document.getElementById('metric-accuracy').textContent = (data.metrics.accuracy * 100).toFixed(1) + '%';
|
||||||
|
// If you want to show these in the live banner too:
|
||||||
|
const banner = document.getElementById('inference-status');
|
||||||
|
if (banner) {
|
||||||
|
// Check if we already have a metrics div, if not create one
|
||||||
|
let metricsDiv = document.getElementById('live-banner-metrics');
|
||||||
|
if (!metricsDiv) {
|
||||||
|
metricsDiv = document.createElement('div');
|
||||||
|
metricsDiv.id = 'live-banner-metrics';
|
||||||
|
metricsDiv.className = 'mt-1 pt-1 border-top small text-muted d-flex justify-content-between';
|
||||||
|
banner.querySelector('.small').appendChild(metricsDiv);
|
||||||
|
}
|
||||||
|
metricsDiv.innerHTML = `
|
||||||
|
<span>Acc: ${(data.metrics.accuracy * 100).toFixed(1)}%</span>
|
||||||
|
<span>Loss: ${data.metrics.loss ? data.metrics.loss.toFixed(4) : '--'}</span>
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const latest = data.signals[0];
|
const latest = data.signals[0];
|
||||||
document.getElementById('latest-signal').textContent = latest.action;
|
document.getElementById('latest-signal').textContent = latest.action;
|
||||||
document.getElementById('latest-confidence').textContent =
|
document.getElementById('latest-confidence').textContent =
|
||||||
(latest.confidence * 100).toFixed(1) + '%';
|
(latest.confidence * 100).toFixed(1) + '%';
|
||||||
|
|
||||||
|
// Handle prediction price and timestamp safely
|
||||||
|
const predPrice = (latest.predicted_price && !isNaN(latest.predicted_price))
|
||||||
|
? latest.predicted_price.toFixed(2)
|
||||||
|
: '--';
|
||||||
|
|
||||||
|
// Format timestamp carefully
|
||||||
|
let timestamp = new Date().toISOString();
|
||||||
|
if (latest.timestamp) {
|
||||||
|
timestamp = latest.timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
// Add to prediction history (keep last 5)
|
// Add to prediction history (keep last 5)
|
||||||
predictionHistory.unshift({
|
predictionHistory.unshift({
|
||||||
timestamp: latest.timestamp || new Date().toISOString(),
|
timestamp: timestamp,
|
||||||
action: latest.action,
|
action: latest.action,
|
||||||
confidence: latest.confidence,
|
confidence: latest.confidence,
|
||||||
predicted_price: latest.predicted_price
|
predicted_price: latest.predicted_price
|
||||||
@@ -882,21 +933,20 @@
|
|||||||
updatePredictionHistory();
|
updatePredictionHistory();
|
||||||
|
|
||||||
// Update chart with signal markers
|
// Update chart with signal markers
|
||||||
if (appState.chartManager) {
|
if (window.appState && window.appState.chartManager) {
|
||||||
displaySignalOnChart(latest);
|
displaySignalOnChart(latest);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
console.error('Error polling signals:', error);
|
console.error('Error polling signals:', error);
|
||||||
});
|
});
|
||||||
|
}, 1000); // Poll every second
|
||||||
|
}
|
||||||
|
|
||||||
// Update charts with latest data
|
// REMOVED: updateChartsWithLiveData() - Full chart refresh is deprecated in favor of incremental updates
|
||||||
updateChartsWithLiveData();
|
|
||||||
}, 1000); // Poll every second
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateChartsWithLiveData() {
|
/* DEPRECATED: Old update logic
|
||||||
// Fetch latest chart data
|
// Fetch latest chart data
|
||||||
fetch('/api/chart-data', {
|
fetch('/api/chart-data', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -924,8 +974,7 @@
|
|||||||
console.error('Error updating charts:', error);
|
console.error('Error updating charts:', error);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
let liveUpdateCount = 0;
|
|
||||||
|
|
||||||
function updateSingleChart(timeframe, newData) {
|
function updateSingleChart(timeframe, newData) {
|
||||||
const chart = appState.chartManager.charts[timeframe];
|
const chart = appState.chartManager.charts[timeframe];
|
||||||
@@ -955,7 +1004,8 @@
|
|||||||
|
|
||||||
// Update counter
|
// Update counter
|
||||||
liveUpdateCount++;
|
liveUpdateCount++;
|
||||||
const counterEl = document.getElementById('live-update-count');
|
// Note: The element ID is 'live-updates-count' in the header, not 'live-update-count'
|
||||||
|
const counterEl = document.getElementById('live-updates-count') || document.getElementById('live-update-count');
|
||||||
if (counterEl) {
|
if (counterEl) {
|
||||||
counterEl.textContent = liveUpdateCount + ' updates';
|
counterEl.textContent = liveUpdateCount + ' updates';
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -20,7 +20,8 @@ import json
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging
|
||||||
logger = logging.getLogger(__name__)
|
from utils.safe_logger import get_logger
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class TradingTransformerConfig:
|
class TradingTransformerConfig:
|
||||||
|
|||||||
67
utils/safe_logger.py
Normal file
67
utils/safe_logger.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""
|
||||||
|
Safe Logging Wrapper
|
||||||
|
|
||||||
|
This module provides a logging wrapper that handles Unicode encoding errors
|
||||||
|
gracefully, preventing crashes when logging special characters on Windows consoles.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
class SafeLogger:
|
||||||
|
"""Wrapper for logging.Logger to safely handle unicode characters"""
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.logger = logging.getLogger(name)
|
||||||
|
|
||||||
|
def info(self, msg, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
self.logger.info(msg, *args, **kwargs)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
# Fallback: sanitize message or print safely
|
||||||
|
safe_msg = msg.encode('ascii', 'replace').decode('ascii')
|
||||||
|
self.logger.info(safe_msg, *args, **kwargs)
|
||||||
|
except Exception:
|
||||||
|
# Last resort fallback
|
||||||
|
pass
|
||||||
|
|
||||||
|
def warning(self, msg, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
self.logger.warning(msg, *args, **kwargs)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
safe_msg = msg.encode('ascii', 'replace').decode('ascii')
|
||||||
|
self.logger.warning(safe_msg, *args, **kwargs)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def error(self, msg, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
self.logger.error(msg, *args, **kwargs)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
safe_msg = msg.encode('ascii', 'replace').decode('ascii')
|
||||||
|
self.logger.error(safe_msg, *args, **kwargs)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def debug(self, msg, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
self.logger.debug(msg, *args, **kwargs)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
safe_msg = msg.encode('ascii', 'replace').decode('ascii')
|
||||||
|
self.logger.debug(safe_msg, *args, **kwargs)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def exception(self, msg, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
self.logger.exception(msg, *args, **kwargs)
|
||||||
|
except UnicodeEncodeError:
|
||||||
|
safe_msg = msg.encode('ascii', 'replace').decode('ascii')
|
||||||
|
self.logger.exception(safe_msg, *args, **kwargs)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_logger(name):
|
||||||
|
return SafeLogger(name)
|
||||||
|
|
||||||
Reference in New Issue
Block a user