wip wip wip

This commit is contained in:
Dobromir Popov
2025-10-23 18:57:07 +03:00
parent b0771ff34e
commit 0225f4df58
17 changed files with 2739 additions and 756 deletions

View File

@@ -582,6 +582,65 @@ class DataProvider:
logger.error(f"Error loading initial data for {symbol} {timeframe}: {e}")
logger.info("Initial data load completed")
# Catch up on missing candles if needed
self._catch_up_missing_candles()
def _catch_up_missing_candles(self):
"""
Catch up on missing candles at startup
Fetches up to 1500 candles per timeframe if we're missing data
"""
logger.info("Checking for missing candles to catch up...")
target_candles = 1500 # Target number of candles per timeframe
for symbol in self.symbols:
for timeframe in self.timeframes:
try:
# Check current candle count
current_df = self.cached_data[symbol][timeframe]
current_count = len(current_df) if not current_df.empty else 0
if current_count >= target_candles:
logger.debug(f"{symbol} {timeframe}: Already have {current_count} candles (target: {target_candles})")
continue
# Calculate how many candles we need
needed = target_candles - current_count
logger.info(f"{symbol} {timeframe}: Need {needed} more candles (have {current_count}/{target_candles})")
# Fetch missing candles
# Try Binance first (usually has better historical data)
df = self._fetch_from_binance(symbol, timeframe, needed)
if df is None or df.empty:
# Fallback to MEXC
logger.debug(f"Binance fetch failed for {symbol} {timeframe}, trying MEXC...")
df = self._fetch_from_mexc(symbol, timeframe, needed)
if df is not None and not df.empty:
# Ensure proper datetime index
df = self._ensure_datetime_index(df)
# Merge with existing data
if not current_df.empty:
combined_df = pd.concat([current_df, df], ignore_index=False)
combined_df = combined_df[~combined_df.index.duplicated(keep='last')]
combined_df = combined_df.sort_index()
self.cached_data[symbol][timeframe] = combined_df.tail(target_candles)
else:
self.cached_data[symbol][timeframe] = df.tail(target_candles)
final_count = len(self.cached_data[symbol][timeframe])
logger.info(f"{symbol} {timeframe}: Caught up! Now have {final_count} candles")
else:
logger.warning(f"{symbol} {timeframe}: Could not fetch historical data from any exchange")
except Exception as e:
logger.error(f"Error catching up candles for {symbol} {timeframe}: {e}")
logger.info("Candle catch-up completed")
def _update_cached_data(self, symbol: str, timeframe: str):
"""Update cached data by fetching last 2 candles"""