LIVE CHART WORKS ON ANNOTATE APP

This commit is contained in:
Dobromir Popov
2025-11-22 01:26:25 +02:00
parent 6ad09acc19
commit a1d3d6a865
2 changed files with 169 additions and 27 deletions

View File

@@ -7,7 +7,7 @@ data quality and structure used by training and inference systems.
import logging import logging
from typing import Dict, List, Optional, Tuple from typing import Dict, List, Optional, Tuple
from datetime import datetime, timedelta from datetime import datetime, timedelta, timezone
import pandas as pd import pandas as pd
from pathlib import Path from pathlib import Path
import pickle import pickle
@@ -73,7 +73,11 @@ class HistoricalDataLoader:
elif timeframe == '1m': elif timeframe == '1m':
current_ttl = timedelta(seconds=5) current_ttl = timedelta(seconds=5)
if cache_key in self.memory_cache and direction == 'latest': # For 'after' direction (incremental updates), we should force a refresh if cache is stale
# or simply bypass cache for 1s/1m to ensure we get the absolute latest
bypass_cache = (direction == 'after' and timeframe in ['1s', '1m'])
if cache_key in self.memory_cache and direction == 'latest' and not bypass_cache:
cached_data, cached_time = self.memory_cache[cache_key] cached_data, cached_time = self.memory_cache[cache_key]
if datetime.now() - cached_time < current_ttl: if datetime.now() - cached_time < current_ttl:
# For 1s/1m, we want to return immediately if valid # For 1s/1m, we want to return immediately if valid
@@ -83,8 +87,8 @@ class HistoricalDataLoader:
return cached_data return cached_data
try: try:
# FORCE refresh for 1s/1m if requesting latest data # FORCE refresh for 1s/1m if requesting latest data OR incremental update
force_refresh = (timeframe in ['1s', '1m'] and not start_time and not end_time) force_refresh = (timeframe in ['1s', '1m'] and (bypass_cache or (not start_time and not end_time)))
# Try to get data from DataProvider's cached data first (most efficient) # Try to get data from DataProvider's cached data first (most efficient)
if hasattr(self.data_provider, 'cached_data'): if hasattr(self.data_provider, 'cached_data'):
with self.data_provider.data_lock: with self.data_provider.data_lock:
@@ -403,27 +407,53 @@ class HistoricalDataLoader:
Returns: Returns:
Filtered DataFrame Filtered DataFrame
""" """
if direction == 'before' and end_time: try:
# Get candles BEFORE end_time # Ensure df index is datetime and timezone-aware (UTC)
df = df[df.index < end_time] if not isinstance(df.index, pd.DatetimeIndex):
# Return the most recent N candles before end_time df.index = pd.to_datetime(df.index, utc=True)
df = df.tail(limit) elif df.index.tz is None:
elif direction == 'after' and start_time: df.index = df.index.tz_localize('UTC')
# Get candles AFTER start_time else:
df = df[df.index > start_time] # If already aware but not UTC, convert
# Return the oldest N candles after start_time if str(df.index.tz) != 'UTC' and str(df.index.tz) != 'datetime.timezone.utc':
df = df.head(limit) df.index = df.index.tz_convert('UTC')
else:
# Default: filter by range # Ensure start_time/end_time are UTC
if start_time: if start_time and start_time.tzinfo is None:
df = df[df.index >= start_time] start_time = start_time.replace(tzinfo=timezone.utc)
if end_time: elif start_time:
df = df[df.index <= end_time] start_time = start_time.astimezone(timezone.utc)
# Return most recent candles
if len(df) > limit: if end_time and end_time.tzinfo is None:
end_time = end_time.replace(tzinfo=timezone.utc)
elif end_time:
end_time = end_time.astimezone(timezone.utc)
if direction == 'before' and end_time:
# Get candles BEFORE end_time
df = df[df.index < end_time]
# Return the most recent N candles before end_time
df = df.tail(limit) df = df.tail(limit)
elif direction == 'after' and start_time:
return df # Get candles AFTER start_time
df = df[df.index > start_time]
# Return the oldest N candles after start_time
df = df.head(limit)
else:
# Default: filter by range
if start_time:
df = df[df.index >= start_time]
if end_time:
df = df[df.index <= end_time]
# Return most recent candles
if len(df) > limit:
df = df.tail(limit)
return df
except Exception as e:
logger.error(f"Error filtering data: {e}")
# Fallback: return original or empty
return df if not df.empty else pd.DataFrame()
def get_multi_timeframe_data(self, symbol: str, def get_multi_timeframe_data(self, symbol: str,
timeframes: List[str], timeframes: List[str],

View File

@@ -30,7 +30,7 @@ class ChartManager {
// Update 1s chart every 2 seconds (was 20s) // Update 1s chart every 2 seconds (was 20s)
if (this.timeframes.includes('1s')) { if (this.timeframes.includes('1s')) {
this.updateTimers['1s'] = setInterval(() => { this.updateTimers['1s'] = setInterval(() => {
this.updateChart('1s'); this.updateChartIncremental('1s');
}, 2000); // 2 seconds }, 2000); // 2 seconds
} }
@@ -42,11 +42,11 @@ class ChartManager {
// Update on next whole minute // Update on next whole minute
setTimeout(() => { setTimeout(() => {
this.updateChart('1m'); this.updateChartIncremental('1m');
// Then update every 5s // Then update every 5s
this.updateTimers['1m'] = setInterval(() => { this.updateTimers['1m'] = setInterval(() => {
this.updateChart('1m'); this.updateChartIncremental('1m');
}, 5000); // 5 seconds }, 5000); // 5 seconds
}, msUntilNextMinute); }, msUntilNextMinute);
} }
@@ -111,6 +111,118 @@ class ChartManager {
} }
} }
/**
* Update chart incrementally by appending only new data
* This is much lighter than full chart refresh
*/
async updateChartIncremental(timeframe) {
const chart = this.charts[timeframe];
if (!chart || !chart.data || !chart.data.timestamps || chart.data.timestamps.length === 0) {
// Fallback to full update if no existing data
return this.updateChart(timeframe);
}
try {
// Get last timestamp from current data
const lastTimestamp = chart.data.timestamps[chart.data.timestamps.length - 1];
// Fetch only data AFTER last timestamp
const response = await fetch('/api/chart-data', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
symbol: window.appState?.currentSymbol || 'ETH/USDT',
timeframes: [timeframe],
start_time: lastTimestamp,
limit: 50, // Small limit for incremental update
direction: 'after'
})
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}`);
}
const result = await response.json();
if (result.success && result.chart_data && result.chart_data[timeframe]) {
const newData = result.chart_data[timeframe];
// If we got new data
if (newData.timestamps.length > 0) {
// Filter out duplicates just in case
const uniqueIndices = [];
const lastTime = new Date(lastTimestamp).getTime();
newData.timestamps.forEach((ts, i) => {
if (new Date(ts).getTime() > lastTime) {
uniqueIndices.push(i);
}
});
if (uniqueIndices.length === 0) return;
const uniqueData = {
timestamps: uniqueIndices.map(i => newData.timestamps[i]),
open: uniqueIndices.map(i => newData.open[i]),
high: uniqueIndices.map(i => newData.high[i]),
low: uniqueIndices.map(i => newData.low[i]),
close: uniqueIndices.map(i => newData.close[i]),
volume: uniqueIndices.map(i => newData.volume[i])
};
// Update chart using extendTraces
const plotId = chart.plotId;
Plotly.extendTraces(plotId, {
x: [uniqueData.timestamps],
open: [uniqueData.open],
high: [uniqueData.high],
low: [uniqueData.low],
close: [uniqueData.close]
}, [0]);
// Update volume
const volumeColors = uniqueData.close.map((close, i) => {
return close >= uniqueData.open[i] ? '#10b981' : '#ef4444';
});
Plotly.extendTraces(plotId, {
x: [uniqueData.timestamps],
y: [uniqueData.volume],
'marker.color': [volumeColors]
}, [1]);
// Update local data cache
chart.data.timestamps.push(...uniqueData.timestamps);
chart.data.open.push(...uniqueData.open);
chart.data.high.push(...uniqueData.high);
chart.data.low.push(...uniqueData.low);
chart.data.close.push(...uniqueData.close);
chart.data.volume.push(...uniqueData.volume);
// Keep memory usage in check (limit to 5000 candles)
const MAX_CANDLES = 5000;
if (chart.data.timestamps.length > MAX_CANDLES) {
const dropCount = chart.data.timestamps.length - MAX_CANDLES;
chart.data.timestamps.splice(0, dropCount);
chart.data.open.splice(0, dropCount);
chart.data.high.splice(0, dropCount);
chart.data.low.splice(0, dropCount);
chart.data.close.splice(0, dropCount);
chart.data.volume.splice(0, dropCount);
// Note: Plotly.relayout could be used to shift window, but extending is fine for visual updates
}
console.log(`Appended ${uniqueData.timestamps.length} new candles to ${timeframe} chart`);
}
}
} catch (error) {
console.error(`Error updating ${timeframe} chart incrementally:`, error);
}
}
/** /**
* Update latest candle on chart (for live updates) * Update latest candle on chart (for live updates)
* Efficiently updates only the last candle or adds a new one * Efficiently updates only the last candle or adds a new one