try to fix chart udates - wip

This commit is contained in:
Dobromir Popov
2025-12-10 11:58:53 +02:00
parent 1d49269301
commit c7a37bf5f0
9 changed files with 364 additions and 170 deletions

View File

@@ -3775,10 +3775,22 @@ class DataProvider:
logger.error(f"Error emitting pivot event: {e}", exc_info=True)
def get_latest_candles(self, symbol: str, timeframe: str, limit: int = 100) -> pd.DataFrame:
"""Get the latest candles from cached data only"""
"""Get the latest candles combining cached data with real-time data"""
try:
# Get cached data
cached_df = self.get_historical_data(symbol, timeframe, limit=limit)
# Check for real-time data first
has_real_time_data = False
with self.data_lock:
if symbol in self.real_time_data and timeframe in self.real_time_data[symbol]:
real_time_candles = list(self.real_time_data[symbol][timeframe])
has_real_time_data = bool(real_time_candles)
# If no real-time data available, force refresh from API for live updates
if not has_real_time_data and limit <= 10: # Small limit suggests live update request
logger.debug(f"No real-time data for {symbol} {timeframe}, forcing API refresh for live update")
cached_df = self.get_historical_data(symbol, timeframe, limit=limit, refresh=True)
else:
# Get cached data normally
cached_df = self.get_historical_data(symbol, timeframe, limit=limit)
# Get real-time data if available
with self.data_lock:
@@ -3786,24 +3798,29 @@ class DataProvider:
real_time_candles = list(self.real_time_data[symbol][timeframe])
if real_time_candles:
# Convert to DataFrame
# Convert to DataFrame and ensure proper format
rt_df = pd.DataFrame(real_time_candles)
rt_df = self._ensure_datetime_index(rt_df)
if cached_df is not None and not cached_df.empty:
# Combine cached and real-time
# Remove overlapping candles from cached data
if not rt_df.empty:
cutoff_time = rt_df['timestamp'].min()
cutoff_time = rt_df.index.min()
cached_df = cached_df[cached_df.index < cutoff_time]
# Concatenate
combined_df = pd.concat([cached_df, rt_df], ignore_index=True)
# Concatenate and sort by index
combined_df = pd.concat([cached_df, rt_df])
combined_df = combined_df.sort_index()
combined_df = combined_df[~combined_df.index.duplicated(keep='last')]
else:
combined_df = rt_df
logger.debug(f"Combined data for {symbol} {timeframe}: {len(cached_df) if cached_df is not None else 0} cached + {len(rt_df)} real-time")
return combined_df.tail(limit)
# Return just cached data if no real-time data
logger.debug(f"Returning cached data only for {symbol} {timeframe}: {len(cached_df) if cached_df is not None else 0} candles")
return cached_df.tail(limit) if cached_df is not None else pd.DataFrame()
except Exception as e: