remove ws, fix predictions
This commit is contained in:
@@ -89,14 +89,32 @@ class HistoricalDataLoader:
|
||||
try:
|
||||
# FORCE refresh for 1s/1m if requesting latest data OR incremental update
|
||||
force_refresh = (timeframe in ['1s', '1m'] and (bypass_cache or (not start_time and not end_time)))
|
||||
|
||||
# Try to get data from DataProvider's cached data first (most efficient)
|
||||
if hasattr(self.data_provider, 'cached_data'):
|
||||
with self.data_provider.data_lock:
|
||||
cached_df = self.data_provider.cached_data.get(symbol, {}).get(timeframe)
|
||||
|
||||
if cached_df is not None and not cached_df.empty:
|
||||
# Use cached data if we have enough candles
|
||||
if len(cached_df) >= min(limit, 100): # Use cached if we have at least 100 candles
|
||||
# If time range is specified, check if cached data covers it
|
||||
use_cached_data = True
|
||||
if start_time or end_time:
|
||||
if isinstance(cached_df.index, pd.DatetimeIndex):
|
||||
cache_start = cached_df.index.min()
|
||||
cache_end = cached_df.index.max()
|
||||
|
||||
# Check if requested range is within cached range
|
||||
if start_time and start_time < cache_start:
|
||||
use_cached_data = False
|
||||
elif end_time and end_time > cache_end:
|
||||
use_cached_data = False
|
||||
elif start_time and end_time:
|
||||
# Both specified - check if range overlaps
|
||||
if end_time < cache_start or start_time > cache_end:
|
||||
use_cached_data = False
|
||||
|
||||
# Use cached data if we have enough candles and it covers the range
|
||||
if use_cached_data and len(cached_df) >= min(limit, 100): # Use cached if we have at least 100 candles
|
||||
elapsed_ms = (time.time() - start_time_ms) * 1000
|
||||
logger.debug(f" DataProvider cache hit for {symbol} {timeframe} ({len(cached_df)} candles, {elapsed_ms:.1f}ms)")
|
||||
|
||||
@@ -109,9 +127,12 @@ class HistoricalDataLoader:
|
||||
limit
|
||||
)
|
||||
|
||||
# Cache in memory
|
||||
self.memory_cache[cache_key] = (filtered_df, datetime.now())
|
||||
return filtered_df
|
||||
# Only return cached data if filter produced results
|
||||
if filtered_df is not None and not filtered_df.empty:
|
||||
# Cache in memory
|
||||
self.memory_cache[cache_key] = (filtered_df, datetime.now())
|
||||
return filtered_df
|
||||
# If filter returned empty, fall through to fetch from DuckDB/API
|
||||
|
||||
# Try unified storage first if available
|
||||
if hasattr(self.data_provider, 'is_unified_storage_enabled') and \
|
||||
@@ -156,28 +177,47 @@ class HistoricalDataLoader:
|
||||
except Exception as e:
|
||||
logger.debug(f"Unified storage not available, falling back to cached data: {e}")
|
||||
|
||||
# Fallback to existing cached data method
|
||||
# Use DataProvider's cached data if available
|
||||
# Fallback to existing cached data method (duplicate check - should not reach here if first check worked)
|
||||
# This is kept for backward compatibility but should rarely execute
|
||||
if hasattr(self.data_provider, 'cached_data'):
|
||||
if symbol in self.data_provider.cached_data:
|
||||
if timeframe in self.data_provider.cached_data[symbol]:
|
||||
df = self.data_provider.cached_data[symbol][timeframe]
|
||||
|
||||
if df is not None and not df.empty:
|
||||
# Filter by time range with direction support
|
||||
df = self._filter_by_time_range(
|
||||
df.copy(),
|
||||
start_time,
|
||||
end_time,
|
||||
direction,
|
||||
limit
|
||||
)
|
||||
# Check if cached data covers the requested time range
|
||||
use_cached_data = True
|
||||
if start_time or end_time:
|
||||
if isinstance(df.index, pd.DatetimeIndex):
|
||||
cache_start = df.index.min()
|
||||
cache_end = df.index.max()
|
||||
|
||||
if start_time and start_time < cache_start:
|
||||
use_cached_data = False
|
||||
elif end_time and end_time > cache_end:
|
||||
use_cached_data = False
|
||||
elif start_time and end_time:
|
||||
if end_time < cache_start or start_time > cache_end:
|
||||
use_cached_data = False
|
||||
|
||||
# Cache in memory
|
||||
self.memory_cache[cache_key] = (df.copy(), datetime.now())
|
||||
|
||||
logger.info(f"Loaded {len(df)} candles for {symbol} {timeframe}")
|
||||
return df
|
||||
if use_cached_data:
|
||||
# Filter by time range with direction support
|
||||
df = self._filter_by_time_range(
|
||||
df.copy(),
|
||||
start_time,
|
||||
end_time,
|
||||
direction,
|
||||
limit
|
||||
)
|
||||
|
||||
# Only return if filter produced results
|
||||
if df is not None and not df.empty:
|
||||
# Cache in memory
|
||||
self.memory_cache[cache_key] = (df.copy(), datetime.now())
|
||||
|
||||
logger.info(f"Loaded {len(df)} candles for {symbol} {timeframe}")
|
||||
return df
|
||||
# If filter returned empty or range not covered, fall through to fetch from DuckDB/API
|
||||
|
||||
# Check DuckDB first for historical data (always check for infinite scroll)
|
||||
if self.data_provider.duckdb_storage and (start_time or end_time):
|
||||
@@ -198,7 +238,7 @@ class HistoricalDataLoader:
|
||||
self.memory_cache[cache_key] = (df.copy(), datetime.now())
|
||||
return df
|
||||
else:
|
||||
logger.info(f"📡 No data in DuckDB, fetching from exchange API for {symbol} {timeframe}")
|
||||
logger.info(f"No data in DuckDB, fetching from exchange API for {symbol} {timeframe}")
|
||||
|
||||
# Fetch from exchange API with time range
|
||||
df = self._fetch_from_exchange_api(
|
||||
@@ -212,7 +252,7 @@ class HistoricalDataLoader:
|
||||
|
||||
if df is not None and not df.empty:
|
||||
elapsed_ms = (time.time() - start_time_ms) * 1000
|
||||
logger.info(f"🌐 Exchange API hit for {symbol} {timeframe} ({len(df)} candles, {elapsed_ms:.1f}ms)")
|
||||
logger.info(f"Exchange API hit for {symbol} {timeframe} ({len(df)} candles, {elapsed_ms:.1f}ms)")
|
||||
|
||||
# Store in DuckDB for future use
|
||||
if self.data_provider.duckdb_storage:
|
||||
|
||||
Reference in New Issue
Block a user