This commit is contained in:
Dobromir Popov
2025-10-23 19:43:41 +03:00
parent 0225f4df58
commit de2ad92602
5 changed files with 379 additions and 96 deletions

View File

@@ -343,9 +343,34 @@ class AnnotationDashboard:
'volume': df['volume'].tolist() 'volume': df['volume'].tolist()
} }
# Get pivot bounds for the symbol
pivot_bounds = None
if self.data_provider:
try:
pivot_bounds = self.data_provider.get_pivot_bounds(symbol)
if pivot_bounds:
logger.info(f"Found pivot bounds for {symbol}: {len(pivot_bounds.pivot_support_levels)} support, {len(pivot_bounds.pivot_resistance_levels)} resistance")
except Exception as e:
logger.error(f"Error getting pivot bounds: {e}")
return jsonify({ return jsonify({
'success': True, 'success': True,
'chart_data': chart_data 'chart_data': chart_data,
'pivot_bounds': {
'support_levels': pivot_bounds.pivot_support_levels if pivot_bounds else [],
'resistance_levels': pivot_bounds.pivot_resistance_levels if pivot_bounds else [],
'price_range': {
'min': pivot_bounds.price_min if pivot_bounds else None,
'max': pivot_bounds.price_max if pivot_bounds else None
},
'volume_range': {
'min': pivot_bounds.volume_min if pivot_bounds else None,
'max': pivot_bounds.volume_max if pivot_bounds else None
},
'timeframe': '1m', # Pivot bounds are calculated from 1m data
'period': '30 days', # Monthly data
'total_levels': len(pivot_bounds.pivot_support_levels) + len(pivot_bounds.pivot_resistance_levels) if pivot_bounds else 0
} if pivot_bounds else None
}) })
except Exception as e: except Exception as e:
@@ -559,9 +584,34 @@ class AnnotationDashboard:
except Exception as e: except Exception as e:
logger.error(f"Error refreshing {timeframe} data: {e}") logger.error(f"Error refreshing {timeframe} data: {e}")
# Get pivot bounds for the symbol
pivot_bounds = None
if self.data_provider:
try:
pivot_bounds = self.data_provider.get_pivot_bounds(symbol)
if pivot_bounds:
logger.info(f"Found pivot bounds for {symbol}: {len(pivot_bounds.pivot_support_levels)} support, {len(pivot_bounds.pivot_resistance_levels)} resistance")
except Exception as e:
logger.error(f"Error getting pivot bounds: {e}")
return jsonify({ return jsonify({
'success': True, 'success': True,
'chart_data': chart_data, 'chart_data': chart_data,
'pivot_bounds': {
'support_levels': pivot_bounds.pivot_support_levels if pivot_bounds else [],
'resistance_levels': pivot_bounds.pivot_resistance_levels if pivot_bounds else [],
'price_range': {
'min': pivot_bounds.price_min if pivot_bounds else None,
'max': pivot_bounds.price_max if pivot_bounds else None
},
'volume_range': {
'min': pivot_bounds.volume_min if pivot_bounds else None,
'max': pivot_bounds.volume_max if pivot_bounds else None
},
'timeframe': '1m', # Pivot bounds are calculated from 1m data
'period': '30 days', # Monthly data
'total_levels': len(pivot_bounds.pivot_support_levels) + len(pivot_bounds.pivot_resistance_levels) if pivot_bounds else 0
} if pivot_bounds else None,
'message': f'Refreshed data for {symbol}' 'message': f'Refreshed data for {symbol}'
}) })

View File

@@ -14,14 +14,15 @@ class ChartManager {
} }
/** /**
* Initialize charts for all timeframes * Initialize charts for all timeframes with pivot bounds
*/ */
initializeCharts(chartData) { initializeCharts(chartData, pivotBounds = null) {
console.log('Initializing charts with data:', chartData); console.log('Initializing charts with data:', chartData);
console.log('Pivot bounds:', pivotBounds);
this.timeframes.forEach(timeframe => { this.timeframes.forEach(timeframe => {
if (chartData[timeframe]) { if (chartData[timeframe]) {
this.createChart(timeframe, chartData[timeframe]); this.createChart(timeframe, chartData[timeframe], pivotBounds);
} }
}); });
@@ -32,7 +33,7 @@ class ChartManager {
/** /**
* Create a single chart for a timeframe * Create a single chart for a timeframe
*/ */
createChart(timeframe, data) { createChart(timeframe, data, pivotBounds = null) {
const plotId = `plot-${timeframe}`; const plotId = `plot-${timeframe}`;
const plotElement = document.getElementById(plotId); const plotElement = document.getElementById(plotId);
@@ -129,7 +130,49 @@ class ChartManager {
scrollZoom: true scrollZoom: true
}; };
Plotly.newPlot(plotId, [candlestickTrace, volumeTrace], layout, config); // Prepare chart data with pivot bounds
const chartData = [candlestickTrace, volumeTrace];
// Add pivot levels if available
if (pivotBounds && pivotBounds.support_levels && pivotBounds.resistance_levels) {
// Add support levels
pivotBounds.support_levels.forEach((level, index) => {
chartData.push({
x: data.timestamps,
y: Array(data.timestamps.length).fill(level),
type: 'scatter',
mode: 'lines',
line: {
color: '#28a745',
width: 1,
dash: 'dash'
},
name: `Support ${index + 1}`,
showlegend: index === 0, // Only show legend for first support level
hovertemplate: `Support: $%{y:.2f}<extra></extra>`
});
});
// Add resistance levels
pivotBounds.resistance_levels.forEach((level, index) => {
chartData.push({
x: data.timestamps,
y: Array(data.timestamps.length).fill(level),
type: 'scatter',
mode: 'lines',
line: {
color: '#dc3545',
width: 1,
dash: 'dash'
},
name: `Resistance ${index + 1}`,
showlegend: index === 0, // Only show legend for first resistance level
hovertemplate: `Resistance: $%{y:.2f}<extra></extra>`
});
});
}
Plotly.newPlot(plotId, chartData, layout, config);
// Store chart reference // Store chart reference
this.charts[timeframe] = { this.charts[timeframe] = {
@@ -204,29 +247,74 @@ class ChartManager {
} }
/** /**
* Update charts with new data * Update charts with new data including pivot levels
*/ */
updateCharts(newData) { updateCharts(newData, pivotBounds = null) {
Object.keys(newData).forEach(timeframe => { Object.keys(newData).forEach(timeframe => {
if (this.charts[timeframe]) { if (this.charts[timeframe]) {
const plotId = this.charts[timeframe].plotId; const plotId = this.charts[timeframe].plotId;
Plotly.react(plotId, [ // Prepare chart data
const chartData = [
{ {
x: newData[timeframe].timestamps, x: newData[timeframe].timestamps,
open: newData[timeframe].open, open: newData[timeframe].open,
high: newData[timeframe].high, high: newData[timeframe].high,
low: newData[timeframe].low, low: newData[timeframe].low,
close: newData[timeframe].close, close: newData[timeframe].close,
type: 'candlestick' type: 'candlestick',
name: 'Price'
}, },
{ {
x: newData[timeframe].timestamps, x: newData[timeframe].timestamps,
y: newData[timeframe].volume, y: newData[timeframe].volume,
type: 'bar', type: 'bar',
yaxis: 'y2' yaxis: 'y2',
name: 'Volume',
marker: { color: 'rgba(0, 123, 255, 0.3)' }
} }
]); ];
// Add pivot levels if available
if (pivotBounds && pivotBounds.support_levels && pivotBounds.resistance_levels) {
// Add support levels
pivotBounds.support_levels.forEach((level, index) => {
chartData.push({
x: newData[timeframe].timestamps,
y: Array(newData[timeframe].timestamps.length).fill(level),
type: 'scatter',
mode: 'lines',
line: {
color: '#28a745',
width: 1,
dash: 'dash'
},
name: `Support ${index + 1}`,
showlegend: index === 0, // Only show legend for first support level
hovertemplate: `Support: $%{y:.2f}<extra></extra>`
});
});
// Add resistance levels
pivotBounds.resistance_levels.forEach((level, index) => {
chartData.push({
x: newData[timeframe].timestamps,
y: Array(newData[timeframe].timestamps.length).fill(level),
type: 'scatter',
mode: 'lines',
line: {
color: '#dc3545',
width: 1,
dash: 'dash'
},
name: `Resistance ${index + 1}`,
showlegend: index === 0, // Only show legend for first resistance level
hovertemplate: `Resistance: $%{y:.2f}<extra></extra>`
});
});
}
Plotly.react(plotId, chartData);
} }
}); });
} }

View File

@@ -61,8 +61,8 @@
// Initialize application state // Initialize application state
window.appState = { window.appState = {
currentSymbol: '{{ current_symbol }}', currentSymbol: '{{ current_symbol }}',
currentTimeframes: {{ timeframes | tojson }}, currentTimeframes: '{{ timeframes | tojson }}',
annotations: { { annotations | tojson } }, annotations: '{{ annotations | tojson }}',
pendingAnnotation: null, pendingAnnotation: null,
chartManager: null, chartManager: null,
annotationManager: null, annotationManager: null,
@@ -95,7 +95,87 @@
}); });
function loadInitialData() { function loadInitialData() {
console.log('Loading initial chart data...');
// Fetch initial chart data // Fetch initial chart data
fetch('/api/chart-data', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
symbol: appState.currentSymbol,
timeframes: appState.currentTimeframes,
start_time: null,
end_time: null
})
})
.then(response => {
console.log('Chart data response status:', response.status);
return response.json();
})
.then(data => {
console.log('Chart data received:', data);
if (data.success) {
console.log('Initializing charts with data...');
window.appState.chartManager.initializeCharts(data.chart_data, data.pivot_bounds);
// Show pivot bounds info if available
if (data.pivot_bounds) {
const pivotInfo = data.pivot_bounds;
console.log(`Loaded ${pivotInfo.total_levels} pivot levels (${pivotInfo.support_levels.length} support, ${pivotInfo.resistance_levels.length} resistance) from ${pivotInfo.timeframe} data over ${pivotInfo.period}`);
}
// Load existing annotations
console.log('Loading', window.appState.annotations.length, 'existing annotations');
window.appState.annotations.forEach(annotation => {
window.appState.chartManager.addAnnotation(annotation);
});
// Update annotation list
if (typeof renderAnnotationsList === 'function') {
renderAnnotationsList(window.appState.annotations);
}
// DISABLED: Live updates were causing data corruption (red wall issue)
// Use manual refresh button instead
// startLiveChartUpdates();
console.log('Initial data load complete');
} else {
console.error('Chart data load failed:', data.error);
showError('Failed to load chart data: ' + data.error.message);
}
})
.catch(error => {
console.error('Chart data fetch error:', error);
showError('Network error: ' + error.message);
});
}
// Live chart update mechanism
let liveUpdateInterval = null;
function startLiveChartUpdates() {
// Clear any existing interval
if (liveUpdateInterval) {
clearInterval(liveUpdateInterval);
}
console.log('Starting live chart updates (1s interval)');
// Update every second for 1s chart
liveUpdateInterval = setInterval(() => {
updateLiveChartData();
}, 1000);
}
function updateLiveChartData() {
// Only update if we have a chart manager
if (!window.appState || !window.appState.chartManager) {
return;
}
// Fetch latest data
fetch('/api/chart-data', { fetch('/api/chart-data', {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
@@ -108,28 +188,30 @@
}) })
.then(response => response.json()) .then(response => response.json())
.then(data => { .then(data => {
if (data.success) { if (data.success && window.appState.chartManager) {
window.appState.chartManager.initializeCharts(data.chart_data); // Update charts with new data and pivot bounds
window.appState.chartManager.updateCharts(data.chart_data, data.pivot_bounds);
// Load existing annotations // Show pivot bounds info if available
console.log('Loading', window.appState.annotations.length, 'existing annotations'); if (data.pivot_bounds) {
window.appState.annotations.forEach(annotation => { const pivotInfo = data.pivot_bounds;
window.appState.chartManager.addAnnotation(annotation); console.log(`Loaded ${pivotInfo.total_levels} pivot levels (${pivotInfo.support_levels.length} support, ${pivotInfo.resistance_levels.length} resistance) from ${pivotInfo.timeframe} data over ${pivotInfo.period}`);
});
// Update annotation list
if (typeof renderAnnotationsList === 'function') {
renderAnnotationsList(window.appState.annotations);
} }
} else {
showError('Failed to load chart data: ' + data.error.message);
} }
}) })
.catch(error => { .catch(error => {
showError('Network error: ' + error.message); console.debug('Live update error:', error);
// Don't show error to user for live updates
}); });
} }
// Clean up on page unload
window.addEventListener('beforeunload', function () {
if (liveUpdateInterval) {
clearInterval(liveUpdateInterval);
}
});
function setupKeyboardShortcuts() { function setupKeyboardShortcuts() {
document.addEventListener('keydown', function (e) { document.addEventListener('keydown', function (e) {
// Arrow left - navigate backward // Arrow left - navigate backward

View File

@@ -218,11 +218,18 @@
.then(response => response.json()) .then(response => response.json())
.then(data => { .then(data => {
if (data.success) { if (data.success) {
// Update charts with new data // Update charts with new data and pivot bounds
if (appState.chartManager) { if (appState.chartManager) {
appState.chartManager.updateCharts(data.chart_data); appState.chartManager.updateCharts(data.chart_data, data.pivot_bounds);
}
// Show pivot bounds info if available
if (data.pivot_bounds) {
const pivotInfo = data.pivot_bounds;
showSuccess(`Chart data refreshed successfully. Found ${pivotInfo.total_levels} pivot levels (${pivotInfo.support_levels.length} support, ${pivotInfo.resistance_levels.length} resistance) from ${pivotInfo.timeframe} data over ${pivotInfo.period}`);
} else {
showSuccess('Chart data refreshed successfully');
} }
showSuccess('Chart data refreshed successfully');
} else { } else {
showError('Failed to refresh data: ' + data.error.message); showError('Failed to refresh data: ' + data.error.message);
} }

View File

@@ -156,6 +156,14 @@ class DataProvider:
self.real_time_data = {} # {symbol: {timeframe: deque}} self.real_time_data = {} # {symbol: {timeframe: deque}}
self.current_prices = {} # {symbol: float} self.current_prices = {} # {symbol: float}
# Thread-safe data access with RLock (reentrant lock)
from threading import RLock
self.data_lock = RLock()
# Catch-up state tracking
self.catch_up_in_progress = False
self.catch_up_completed = False
# Live price cache for low-latency price updates # Live price cache for low-latency price updates
self.live_price_cache: Dict[str, Tuple[float, datetime]] = {} self.live_price_cache: Dict[str, Tuple[float, datetime]] = {}
self.live_price_cache_ttl = timedelta(milliseconds=500) self.live_price_cache_ttl = timedelta(milliseconds=500)
@@ -583,69 +591,114 @@ class DataProvider:
logger.info("Initial data load completed") logger.info("Initial data load completed")
# Catch up on missing candles if needed # Start background candle catch-up with proper locking
self._catch_up_missing_candles() self._start_background_catch_up()
def _start_background_catch_up(self):
"""
Start background candle catch-up with proper thread safety
This runs in a separate thread and uses locks to prevent race conditions
"""
import threading
def catch_up_worker():
# Wait a bit for initial data to settle
import time
time.sleep(2)
logger.info("Starting background candle catch-up with thread safety")
self._catch_up_missing_candles()
logger.info("Background candle catch-up completed")
catch_up_thread = threading.Thread(
target=catch_up_worker,
daemon=True,
name="CandleCatchUpWorker"
)
catch_up_thread.start()
def _catch_up_missing_candles(self): def _catch_up_missing_candles(self):
""" """
Catch up on missing candles at startup Catch up on missing candles at startup with thread-safe locking
Fetches up to 1500 candles per timeframe if we're missing data Fetches up to 1500 candles per timeframe if we're missing data
""" """
logger.info("Checking for missing candles to catch up...") # Mark catch-up as in progress
with self.data_lock:
if self.catch_up_in_progress:
logger.warning("Catch-up already in progress, skipping")
return
self.catch_up_in_progress = True
target_candles = 1500 # Target number of candles per timeframe try:
logger.info("Checking for missing candles to catch up...")
for symbol in self.symbols: target_candles = 1500 # Target number of candles per timeframe
for timeframe in self.timeframes:
try:
# Check current candle count
current_df = self.cached_data[symbol][timeframe]
current_count = len(current_df) if not current_df.empty else 0
if current_count >= target_candles: for symbol in self.symbols:
logger.debug(f"{symbol} {timeframe}: Already have {current_count} candles (target: {target_candles})") for timeframe in self.timeframes:
continue try:
# Read current count with lock
with self.data_lock:
current_df = self.cached_data[symbol][timeframe].copy()
current_count = len(current_df) if not current_df.empty else 0
# Calculate how many candles we need if current_count >= target_candles:
needed = target_candles - current_count logger.debug(f"{symbol} {timeframe}: Already have {current_count} candles (target: {target_candles})")
logger.info(f"{symbol} {timeframe}: Need {needed} more candles (have {current_count}/{target_candles})") continue
# Fetch missing candles # Calculate how many candles we need
# Try Binance first (usually has better historical data) needed = target_candles - current_count
df = self._fetch_from_binance(symbol, timeframe, needed) logger.info(f"{symbol} {timeframe}: Need {needed} more candles (have {current_count}/{target_candles})")
if df is None or df.empty: # Fetch missing candles (outside lock - network I/O)
# Fallback to MEXC # Try Binance first (usually has better historical data)
logger.debug(f"Binance fetch failed for {symbol} {timeframe}, trying MEXC...") df = self._fetch_from_binance(symbol, timeframe, needed)
df = self._fetch_from_mexc(symbol, timeframe, needed)
if df is not None and not df.empty: if df is None or df.empty:
# Ensure proper datetime index # Fallback to MEXC
df = self._ensure_datetime_index(df) logger.debug(f"Binance fetch failed for {symbol} {timeframe}, trying MEXC...")
df = self._fetch_from_mexc(symbol, timeframe, needed)
# Merge with existing data if df is not None and not df.empty:
if not current_df.empty: # Ensure proper datetime index
combined_df = pd.concat([current_df, df], ignore_index=False) df = self._ensure_datetime_index(df)
combined_df = combined_df[~combined_df.index.duplicated(keep='last')]
combined_df = combined_df.sort_index() # Update cached data with lock
self.cached_data[symbol][timeframe] = combined_df.tail(target_candles) with self.data_lock:
current_df = self.cached_data[symbol][timeframe]
# Merge with existing data
if not current_df.empty:
combined_df = pd.concat([current_df, df], ignore_index=False)
combined_df = combined_df[~combined_df.index.duplicated(keep='last')]
combined_df = combined_df.sort_index()
self.cached_data[symbol][timeframe] = combined_df.tail(target_candles)
else:
self.cached_data[symbol][timeframe] = df.tail(target_candles)
final_count = len(self.cached_data[symbol][timeframe])
logger.info(f"{symbol} {timeframe}: Caught up! Now have {final_count} candles")
else: else:
self.cached_data[symbol][timeframe] = df.tail(target_candles) logger.warning(f"{symbol} {timeframe}: Could not fetch historical data from any exchange")
final_count = len(self.cached_data[symbol][timeframe]) except Exception as e:
logger.info(f" {symbol} {timeframe}: Caught up! Now have {final_count} candles") logger.error(f"Error catching up candles for {symbol} {timeframe}: {e}")
else:
logger.warning(f"{symbol} {timeframe}: Could not fetch historical data from any exchange")
except Exception as e: logger.info("Candle catch-up completed successfully")
logger.error(f"Error catching up candles for {symbol} {timeframe}: {e}")
logger.info("Candle catch-up completed") except Exception as e:
logger.error(f"Fatal error in candle catch-up: {e}")
finally:
# Mark catch-up as complete
with self.data_lock:
self.catch_up_in_progress = False
self.catch_up_completed = True
def _update_cached_data(self, symbol: str, timeframe: str): def _update_cached_data(self, symbol: str, timeframe: str):
"""Update cached data by fetching last 2 candles""" """Update cached data by fetching last 2 candles with thread-safe locking"""
try: try:
# Fetch last 2 candles # Fetch last 2 candles (outside lock - network I/O)
df = self._fetch_from_binance(symbol, timeframe, 2) df = self._fetch_from_binance(symbol, timeframe, 2)
if df is None or df.empty: if df is None or df.empty:
@@ -655,21 +708,24 @@ class DataProvider:
# Ensure proper datetime index # Ensure proper datetime index
df = self._ensure_datetime_index(df) df = self._ensure_datetime_index(df)
# Get existing cached data # Update cached data with lock
existing_df = self.cached_data[symbol][timeframe] with self.data_lock:
existing_df = self.cached_data[symbol][timeframe]
if not existing_df.empty: if not existing_df.empty:
# Merge new data with existing, avoiding duplicates # Merge new data with existing, avoiding duplicates
combined_df = pd.concat([existing_df, df], ignore_index=False) combined_df = pd.concat([existing_df, df], ignore_index=False)
combined_df = combined_df[~combined_df.index.duplicated(keep='last')] combined_df = combined_df[~combined_df.index.duplicated(keep='last')]
combined_df = combined_df.sort_index() combined_df = combined_df.sort_index()
# Keep only last 1500 candles # Keep only last 1500 candles
self.cached_data[symbol][timeframe] = combined_df.tail(1500) self.cached_data[symbol][timeframe] = combined_df.tail(1500)
else: else:
self.cached_data[symbol][timeframe] = df self.cached_data[symbol][timeframe] = df
logger.debug(f"Updated cached data for {symbol} {timeframe}: {len(self.cached_data[symbol][timeframe])} candles") candle_count = len(self.cached_data[symbol][timeframe])
logger.debug(f"Updated cached data for {symbol} {timeframe}: {candle_count} candles")
except Exception as e: except Exception as e:
logger.debug(f"Error updating cached data for {symbol} {timeframe}: {e}") logger.debug(f"Error updating cached data for {symbol} {timeframe}: {e}")