remove emojis from console

This commit is contained in:
Dobromir Popov
2025-10-25 16:35:08 +03:00
parent 5aa4925cff
commit b8f54e61fa
75 changed files with 828 additions and 828 deletions

View File

@@ -277,7 +277,7 @@ class DataProvider:
if DUCKDB_STORAGE_AVAILABLE:
try:
self.duckdb_storage = DuckDBStorage()
logger.info(" DuckDB storage initialized (unified Parquet + SQL)")
logger.info(" DuckDB storage initialized (unified Parquet + SQL)")
except Exception as e:
logger.warning(f"Could not initialize DuckDB storage: {e}")
@@ -396,7 +396,7 @@ class DataProvider:
if success:
self._unified_storage_enabled = True
logger.info(" Unified storage system enabled successfully")
logger.info(" Unified storage system enabled successfully")
return True
else:
logger.error("Failed to enable unified storage system")
@@ -550,7 +550,7 @@ class DataProvider:
else:
logger.info("Skipping initial data load (using DuckDB cache)")
logger.info(" Initial data load completed - stopping maintenance worker")
logger.info(" Initial data load completed - stopping maintenance worker")
logger.info("📊 Data will be updated on-demand only (no continuous fetching)")
# Stop the maintenance worker after initial load
@@ -602,7 +602,7 @@ class DataProvider:
# Cap at 1500 candles maximum
fetch_limit = min(estimated_missing + 10, 1500)
logger.info(f"🔄 Fetching {fetch_limit} recent candles for {symbol} {timeframe} (since {last_timestamp})")
logger.info(f" Fetching {fetch_limit} recent candles for {symbol} {timeframe} (since {last_timestamp})")
new_df = self._fetch_from_binance(symbol, timeframe, fetch_limit)
if new_df is None or new_df.empty:
@@ -622,9 +622,9 @@ class DataProvider:
combined_df = combined_df.sort_index()
self.cached_data[symbol][timeframe] = combined_df.tail(1500)
logger.info(f" {symbol} {timeframe}: +{len(new_df)} new (total: {len(self.cached_data[symbol][timeframe])})")
logger.info(f" {symbol} {timeframe}: +{len(new_df)} new (total: {len(self.cached_data[symbol][timeframe])})")
else:
logger.info(f" {symbol} {timeframe}: Up to date ({len(existing_df)} candles)")
logger.info(f" {symbol} {timeframe}: Up to date ({len(existing_df)} candles)")
else:
# No existing data - fetch initial 1500 candles
logger.info(f"🆕 No existing data, fetching 1500 candles for {symbol} {timeframe}")
@@ -643,7 +643,7 @@ class DataProvider:
with self.data_lock:
self.cached_data[symbol][timeframe] = df
logger.info(f" Loaded {len(df)} candles for {symbol} {timeframe}")
logger.info(f" Loaded {len(df)} candles for {symbol} {timeframe}")
# Small delay to avoid rate limits
time.sleep(0.1)
@@ -651,7 +651,7 @@ class DataProvider:
except Exception as e:
logger.error(f"Error loading data for {symbol} {timeframe}: {e}")
logger.info(" Smart incremental data load completed")
logger.info(" Smart incremental data load completed")
def _start_background_catch_up(self):
"""
@@ -737,9 +737,9 @@ class DataProvider:
final_count = len(self.cached_data[symbol][timeframe])
logger.info(f" {symbol} {timeframe}: Caught up! Now have {final_count} candles")
logger.info(f" {symbol} {timeframe}: Caught up! Now have {final_count} candles")
else:
logger.warning(f" {symbol} {timeframe}: Could not fetch historical data from any exchange")
logger.warning(f" {symbol} {timeframe}: Could not fetch historical data from any exchange")
except Exception as e:
logger.error(f"Error catching up candles for {symbol} {timeframe}: {e}")
@@ -775,7 +775,7 @@ class DataProvider:
# Cap at 1500 candles maximum
fetch_limit = min(estimated_missing + 5, 1500)
logger.info(f"🔄 Fetching {fetch_limit} recent candles for {symbol} {timeframe} (since {last_timestamp})")
logger.info(f" Fetching {fetch_limit} recent candles for {symbol} {timeframe} (since {last_timestamp})")
# Fetch missing candles
df = self._fetch_from_binance(symbol, timeframe, fetch_limit)
@@ -811,7 +811,7 @@ class DataProvider:
candle_count = len(self.cached_data[symbol][timeframe])
logger.info(f" Updated {symbol} {timeframe}: +{len(df)} new (total: {candle_count})")
logger.info(f" Updated {symbol} {timeframe}: +{len(df)} new (total: {candle_count})")
else:
logger.warning(f"Could not fetch new data for {symbol} {timeframe}")
else:
@@ -827,17 +827,17 @@ class DataProvider:
try:
if symbol and timeframe:
# Refresh specific symbol/timeframe
logger.info(f"🔄 Manual refresh requested for {symbol} {timeframe}")
logger.info(f" Manual refresh requested for {symbol} {timeframe}")
self._update_cached_data(symbol, timeframe)
else:
# Refresh all symbols/timeframes
logger.info("🔄 Manual refresh requested for all symbols/timeframes")
logger.info(" Manual refresh requested for all symbols/timeframes")
for sym in self.symbols:
for tf in self.timeframes:
self._update_cached_data(sym, tf)
time.sleep(0.1) # Small delay to avoid rate limits
logger.info(" Manual refresh completed for all symbols/timeframes")
logger.info(" Manual refresh completed for all symbols/timeframes")
except Exception as e:
logger.error(f"Error in manual refresh: {e}")
@@ -3107,7 +3107,7 @@ class DataProvider:
def _load_from_duckdb_sync(self):
"""Load all data from DuckDB synchronously for instant startup"""
if not self.duckdb_storage:
logger.warning("⚠️ DuckDB storage not available - cannot load cached data")
logger.warning(" DuckDB storage not available - cannot load cached data")
return
logger.info("📦 Loading cached data from DuckDB...")
@@ -3125,18 +3125,18 @@ class DataProvider:
if df is not None and not df.empty:
with self.data_lock:
self.cached_data[symbol][timeframe] = df.tail(1500)
logger.info(f" {symbol} {timeframe}: {len(df)} candles from DuckDB")
logger.info(f" {symbol} {timeframe}: {len(df)} candles from DuckDB")
loaded_count += len(df)
else:
logger.debug(f"No data in DuckDB for {symbol} {timeframe} - will fetch from API")
except Exception as e:
logger.error(f" Error loading {symbol} {timeframe}: {e}")
logger.error(f" Error loading {symbol} {timeframe}: {e}")
if loaded_count > 0:
logger.info(f" Loaded {loaded_count:,} candles total")
logger.info(f" Loaded {loaded_count:,} candles total")
else:
logger.warning("⚠️ No cached data found - will fetch from API")
logger.warning(" No cached data found - will fetch from API")
def _load_from_duckdb(self, symbol: str, timeframe: str, limit: int = 1500) -> Optional[pd.DataFrame]:
"""Load data from DuckDB storage
@@ -3338,7 +3338,7 @@ class DataProvider:
async def _start_fallback_websocket_streaming(self):
"""Fallback to old WebSocket method if Enhanced COB WebSocket fails"""
try:
logger.warning("⚠️ Starting fallback WebSocket streaming")
logger.warning(" Starting fallback WebSocket streaming")
# Start old WebSocket for each symbol
for symbol in self.symbols:
@@ -3346,7 +3346,7 @@ class DataProvider:
self.websocket_tasks[symbol] = task
except Exception as e:
logger.error(f" Error starting fallback WebSocket: {e}")
logger.error(f" Error starting fallback WebSocket: {e}")
def get_cob_websocket_status(self) -> Dict[str, Any]:
"""Get COB WebSocket status for dashboard"""