cleanup models; beef up models to 500M

This commit is contained in:
Dobromir Popov
2025-05-24 23:22:34 +03:00
parent 01f0a2608f
commit d418f6ce59
10 changed files with 3918 additions and 730 deletions

View File

@ -292,74 +292,48 @@ class RealTimeScalpingDashboard:
time.sleep(5)
def _refresh_live_data(self):
"""Refresh chart data with fresh API calls (NO CACHING)"""
try:
logger.info("🔄 Fetching fresh market data (NO CACHE)...")
# Force fresh API calls for all timeframes
for symbol, timeframes in self.chart_data.items():
for timeframe in timeframes:
try:
# FORCE fresh data - explicitly set refresh=True
fresh_data = self._fetch_fresh_candles(symbol, timeframe, limit=200)
if fresh_data is not None and not fresh_data.empty:
with self.data_lock:
self.chart_data[symbol][timeframe] = fresh_data
logger.debug(f"✅ Fresh data loaded: {symbol} {timeframe} - {len(fresh_data)} candles")
except Exception as e:
logger.warning(f"Error fetching fresh data for {symbol} {timeframe}: {e}")
except Exception as e:
logger.error(f"Error in live data refresh: {e}")
"""Refresh live data for all charts with real-time streaming - NO CACHING"""
logger.info("🔄 Refreshing LIVE data for all charts...")
# Fetch fresh data for all charts - NO CACHING ALLOWED
for symbol in ['ETH/USDT', 'BTC/USDT']:
if symbol == 'ETH/USDT':
timeframes = ['1s', '1m', '1h', '1d']
else:
timeframes = ['1s']
for timeframe in timeframes:
# Always fetch fresh candles for real-time updates
fresh_data = self._fetch_fresh_candles(symbol, timeframe, limit=200)
if fresh_data is not None and not fresh_data.empty:
with self.data_lock:
self.chart_data[symbol][timeframe] = fresh_data
logger.info(f"✅ Updated {symbol} {timeframe} with {len(fresh_data)} LIVE candles")
else:
logger.warning(f"❌ No fresh data for {symbol} {timeframe}")
# Update orchestrator for fresh decisions
self.orchestrator.update()
logger.info("🔄 LIVE data refresh complete")
def _fetch_fresh_candles(self, symbol: str, timeframe: str, limit: int = 200) -> pd.DataFrame:
"""Fetch fresh candles directly from Binance API (bypass all caching)"""
"""Fetch fresh candles with NO caching - always real data"""
try:
# Convert symbol format
binance_symbol = symbol.replace('/', '').upper()
# Convert timeframe
timeframe_map = {
'1s': '1s', '1m': '1m', '1h': '1h', '1d': '1d'
}
binance_timeframe = timeframe_map.get(timeframe, '1m')
# Direct API call to Binance
url = "https://api.binance.com/api/v3/klines"
params = {
'symbol': binance_symbol,
'interval': binance_timeframe,
'limit': limit
}
response = requests.get(url, params=params, timeout=5)
response.raise_for_status()
data = response.json()
# Convert to DataFrame
df = pd.DataFrame(data, columns=[
'timestamp', 'open', 'high', 'low', 'close', 'volume',
'close_time', 'quote_volume', 'trades', 'taker_buy_base',
'taker_buy_quote', 'ignore'
])
# Process columns with Sofia timezone
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms').dt.tz_localize('UTC').dt.tz_convert(self.timezone)
for col in ['open', 'high', 'low', 'close', 'volume']:
df[col] = df[col].astype(float)
# Keep only OHLCV columns
df = df[['timestamp', 'open', 'high', 'low', 'close', 'volume']]
df = df.sort_values('timestamp').reset_index(drop=True)
logger.debug(f"📊 Fresh API data: {symbol} {timeframe} - {len(df)} candles")
return df
# Force fresh data fetch - NO CACHE
df = self.data_provider.get_historical_data(
symbol=symbol,
timeframe=timeframe,
limit=limit,
refresh=True # Force fresh data - critical for real-time
)
if df is None or df.empty:
logger.warning(f"No fresh data available for {symbol} {timeframe}")
return pd.DataFrame()
logger.info(f"Fetched {len(df)} fresh candles for {symbol} {timeframe}")
return df.tail(limit)
except Exception as e:
logger.error(f"Error fetching fresh candles from API: {e}")
logger.error(f"Error fetching fresh candles for {symbol} {timeframe}: {e}")
return pd.DataFrame()
def _create_live_chart(self, symbol: str, timeframe: str, main_chart: bool = False):