infinite scroll fix

This commit is contained in:
Dobromir Popov
2025-10-24 22:46:44 +03:00
parent 6e58f4d88f
commit 07b82f0a1f
6 changed files with 352 additions and 95 deletions

View File

@@ -44,7 +44,8 @@ class HistoricalDataLoader:
def get_data(self, symbol: str, timeframe: str,
start_time: Optional[datetime] = None,
end_time: Optional[datetime] = None,
limit: int = 500) -> Optional[pd.DataFrame]:
limit: int = 500,
direction: str = 'latest') -> Optional[pd.DataFrame]:
"""
Get historical data for symbol and timeframe
@@ -54,12 +55,13 @@ class HistoricalDataLoader:
start_time: Start time for data range
end_time: End time for data range
limit: Maximum number of candles to return
direction: 'latest' (most recent), 'before' (older data), 'after' (newer data)
Returns:
DataFrame with OHLCV data or None if unavailable
"""
# Check memory cache first
cache_key = f"{symbol}_{timeframe}_{start_time}_{end_time}_{limit}"
cache_key = f"{symbol}_{timeframe}_{start_time}_{end_time}_{limit}_{direction}"
if cache_key in self.memory_cache:
cached_data, cached_time = self.memory_cache[cache_key]
if datetime.now() - cached_time < self.cache_ttl:
@@ -152,8 +154,7 @@ class HistoricalDataLoader:
logger.info(f"Loaded {len(df)} candles for {symbol} {timeframe}")
return df
# Fallback: fetch from DataProvider's historical data method
# During startup, allow stale cache to avoid slow API calls
# Fallback: Try DuckDB first, then fetch from API if needed
if self.startup_mode:
logger.info(f"Loading data for {symbol} {timeframe} (startup mode: allow stale cache)")
df = self.data_provider.get_historical_data(
@@ -163,11 +164,33 @@ class HistoricalDataLoader:
allow_stale_cache=True
)
else:
logger.info(f"Fetching fresh data for {symbol} {timeframe}")
# Check DuckDB first for historical data
if self.data_provider.duckdb_storage and (start_time or end_time):
logger.info(f"Checking DuckDB for {symbol} {timeframe} historical data (direction={direction})")
df = self.data_provider.duckdb_storage.get_ohlcv_data(
symbol=symbol,
timeframe=timeframe,
start_time=start_time,
end_time=end_time,
limit=limit,
direction=direction
)
if df is not None and not df.empty:
logger.info(f"✅ Loaded {len(df)} candles from DuckDB for {symbol} {timeframe}")
# Cache in memory
self.memory_cache[cache_key] = (df.copy(), datetime.now())
return df
else:
logger.info(f"No data in DuckDB, fetching from API for {symbol} {timeframe}")
# Fetch from API and store in DuckDB
logger.info(f"Fetching data from API for {symbol} {timeframe}")
df = self.data_provider.get_historical_data(
symbol=symbol,
timeframe=timeframe,
limit=limit
limit=limit,
refresh=True # Force API fetch
)
if df is not None and not df.empty: