adding model predictions to dash (wip)

This commit is contained in:
Dobromir Popov
2025-06-25 02:59:16 +03:00
parent 11bbe8913a
commit 47173a8554
3 changed files with 268 additions and 9 deletions

View File

@ -1295,12 +1295,19 @@ class DataProvider:
try:
cache_file = self.cache_dir / f"{symbol.replace('/', '')}_{timeframe}.parquet"
if cache_file.exists():
# Check if cache is recent (less than 1 hour old)
# Check if cache is recent - stricter rules for startup
cache_age = time.time() - cache_file.stat().st_mtime
if cache_age < 3600: # 1 hour
# For 1m data, use cache only if less than 5 minutes old to avoid gaps
if timeframe == '1m':
max_age = 300 # 5 minutes
else:
max_age = 3600 # 1 hour for other timeframes
if cache_age < max_age:
try:
df = pd.read_parquet(cache_file)
logger.debug(f"Loaded {len(df)} rows from cache for {symbol} {timeframe}")
logger.debug(f"Loaded {len(df)} rows from cache for {symbol} {timeframe} (age: {cache_age/60:.1f}min)")
return df
except Exception as parquet_e:
# Handle corrupted Parquet file
@ -1314,7 +1321,7 @@ class DataProvider:
else:
raise parquet_e
else:
logger.debug(f"Cache for {symbol} {timeframe} is too old ({cache_age/3600:.1f}h)")
logger.debug(f"Cache for {symbol} {timeframe} is too old ({cache_age/60:.1f}min > {max_age/60:.1f}min)")
return None
except Exception as e:
logger.warning(f"Error loading cache for {symbol} {timeframe}: {e}")