fix emojies
This commit is contained in:
@@ -118,7 +118,7 @@ class COBIntegration:
|
||||
async def _on_enhanced_cob_update(self, symbol: str, cob_data: Dict):
|
||||
"""Handle COB updates from Enhanced WebSocket"""
|
||||
try:
|
||||
logger.debug(f"📊 Enhanced WebSocket COB update for {symbol}")
|
||||
logger.debug(f"Enhanced WebSocket COB update for {symbol}")
|
||||
|
||||
# Convert enhanced WebSocket data to COB format for existing callbacks
|
||||
# Notify CNN callbacks
|
||||
|
||||
@@ -29,7 +29,7 @@ async def test_bybit_balance():
|
||||
print("ERROR: Failed to connect to Bybit")
|
||||
return
|
||||
|
||||
print("✓ Connected to Bybit successfully")
|
||||
print("Connected to Bybit successfully")
|
||||
|
||||
# Test get_balance for USDT
|
||||
print("\nTesting get_balance('USDT')...")
|
||||
|
||||
@@ -97,7 +97,7 @@ def debug_interface():
|
||||
print(f"Manual signature: {manual_signature}")
|
||||
|
||||
# Compare parameters
|
||||
print(f"\n📊 COMPARISON:")
|
||||
print(f"\nCOMPARISON:")
|
||||
print(f"symbol: Interface='{interface_params['symbol']}', Manual='{manual_params['symbol']}' {'' if interface_params['symbol'] == manual_params['symbol'] else ''}")
|
||||
print(f"side: Interface='{interface_params['side']}', Manual='{manual_params['side']}' {'' if interface_params['side'] == manual_params['side'] else ''}")
|
||||
print(f"type: Interface='{interface_params['type']}', Manual='{manual_params['type']}' {'' if interface_params['type'] == manual_params['type'] else ''}")
|
||||
@@ -111,7 +111,7 @@ def debug_interface():
|
||||
print(f"timeInForce: Interface='{interface_params['timeInForce']}', Manual=None (EXTRA PARAMETER)")
|
||||
|
||||
# Test without timeInForce
|
||||
print(f"\n🔧 TESTING WITHOUT timeInForce:")
|
||||
print(f"\nTESTING WITHOUT timeInForce:")
|
||||
interface_params_minimal = interface_params.copy()
|
||||
del interface_params_minimal['timeInForce']
|
||||
|
||||
|
||||
@@ -817,7 +817,7 @@ class TradingOrchestrator:
|
||||
'status': 'loaded'
|
||||
}
|
||||
|
||||
logger.info(f"✅ Loaded transformer checkpoint: {os.path.basename(checkpoint_path)}")
|
||||
logger.info(f"Loaded transformer checkpoint: {os.path.basename(checkpoint_path)}")
|
||||
logger.info(f" Epoch: {epoch}, Loss: {loss:.6f}, Accuracy: {accuracy:.2%}, LR: {learning_rate:.6f}")
|
||||
checkpoint_loaded = True
|
||||
else:
|
||||
@@ -1154,7 +1154,7 @@ class TradingOrchestrator:
|
||||
|
||||
logger.info("Orchestrator session data cleared")
|
||||
logger.info("🧠 Model states preserved for continued training")
|
||||
logger.info("📊 Prediction history cleared")
|
||||
logger.info("Prediction history cleared")
|
||||
logger.info("💼 Position tracking reset")
|
||||
|
||||
except Exception as e:
|
||||
@@ -1711,10 +1711,10 @@ class TradingOrchestrator:
|
||||
self.dashboard, "update_cob_data_from_orchestrator"
|
||||
):
|
||||
self.dashboard.update_cob_data_from_orchestrator(symbol, cob_data)
|
||||
logger.debug(f"📊 Sent COB data for {symbol} to dashboard")
|
||||
logger.debug(f"Sent COB data for {symbol} to dashboard")
|
||||
else:
|
||||
logger.debug(
|
||||
f"📊 No dashboard connected to receive COB data for {symbol}"
|
||||
f"No dashboard connected to receive COB data for {symbol}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -2811,6 +2811,25 @@ class TradingOrchestrator:
|
||||
self.trading_executor = trading_executor
|
||||
logger.info("Trading executor set for position tracking and P&L feedback")
|
||||
|
||||
def get_latest_transformer_prediction(self, symbol: str = 'ETH/USDT') -> Optional[Dict]:
|
||||
"""
|
||||
Get latest transformer prediction with next_candles data for ghost candle display
|
||||
Returns dict with predicted OHLCV for each timeframe
|
||||
"""
|
||||
try:
|
||||
if not self.primary_transformer:
|
||||
return None
|
||||
|
||||
# Get recent predictions from storage
|
||||
if symbol in self.recent_transformer_predictions and self.recent_transformer_predictions[symbol]:
|
||||
return dict(self.recent_transformer_predictions[symbol][-1])
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Error getting latest transformer prediction: {e}")
|
||||
return None
|
||||
|
||||
def store_transformer_prediction(self, symbol: str, prediction: Dict):
|
||||
"""Store a transformer prediction for visualization and tracking"""
|
||||
try:
|
||||
|
||||
@@ -661,7 +661,7 @@ class OvernightTrainingCoordinator:
|
||||
logger.info("=" * 80)
|
||||
|
||||
# Overall statistics
|
||||
logger.info(f"📊 OVERALL STATISTICS:")
|
||||
logger.info(f"OVERALL STATISTICS:")
|
||||
logger.info(f" Total Signals Processed: {self.performance_stats['total_signals']}")
|
||||
logger.info(f" Total Trades Executed: {self.performance_stats['total_trades']}")
|
||||
logger.info(f" Successful Trades: {self.performance_stats['successful_trades']}")
|
||||
@@ -679,7 +679,7 @@ class OvernightTrainingCoordinator:
|
||||
executed_trades = [r for r in recent_records if r.executed]
|
||||
successful_trades = [r for r in executed_trades if r.trade_pnl and r.trade_pnl > 0]
|
||||
|
||||
logger.info(f"📈 RECENT PERFORMANCE (Last 20 signals):")
|
||||
logger.info(f"RECENT PERFORMANCE (Last 20 signals):")
|
||||
logger.info(f" Signals: {len(recent_records)}")
|
||||
logger.info(f" Executed: {len(executed_trades)}")
|
||||
logger.info(f" Successful: {len(successful_trades)}")
|
||||
|
||||
@@ -75,7 +75,7 @@ class RealtimePredictionLoop:
|
||||
new_candle_detected, timeframe = await self._detect_new_candle(symbol)
|
||||
|
||||
if new_candle_detected:
|
||||
logger.info(f"📊 New {timeframe} candle detected for {symbol} - running predictions")
|
||||
logger.info(f"New {timeframe} candle detected for {symbol} - running predictions")
|
||||
await self._run_all_model_predictions(symbol, trigger=f"new_{timeframe}_candle")
|
||||
|
||||
# 2. Check for pivot point
|
||||
|
||||
@@ -803,7 +803,7 @@ class TradingExecutor:
|
||||
self.max_profitability_multiplier,
|
||||
self.profitability_reward_multiplier + self.profitability_adjustment_step
|
||||
)
|
||||
logger.info(f"🎯 SUCCESS RATE HIGH ({success_rate:.1%}) - Increased profitability multiplier: {old_multiplier:.1f} → {self.profitability_reward_multiplier:.1f}")
|
||||
logger.info(f"SUCCESS RATE HIGH ({success_rate:.1%}) - Increased profitability multiplier: {old_multiplier:.1f} -> {self.profitability_reward_multiplier:.1f}")
|
||||
|
||||
# Decrease multiplier if success rate < 51%
|
||||
elif success_rate < self.success_rate_decrease_threshold:
|
||||
@@ -811,7 +811,7 @@ class TradingExecutor:
|
||||
self.min_profitability_multiplier,
|
||||
self.profitability_reward_multiplier - self.profitability_adjustment_step
|
||||
)
|
||||
logger.info(f" SUCCESS RATE LOW ({success_rate:.1%}) - Decreased profitability multiplier: {old_multiplier:.1f} → {self.profitability_reward_multiplier:.1f}")
|
||||
logger.info(f" SUCCESS RATE LOW ({success_rate:.1%}) - Decreased profitability multiplier: {old_multiplier:.1f} -> {self.profitability_reward_multiplier:.1f}")
|
||||
|
||||
else:
|
||||
logger.debug(f"Success rate {success_rate:.1%} in acceptable range - keeping multiplier at {self.profitability_reward_multiplier:.1f}")
|
||||
@@ -2168,9 +2168,9 @@ class TradingExecutor:
|
||||
f.write(f"Export Time: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
|
||||
f.write(f"Data File: {filename}\n")
|
||||
|
||||
logger.info(f"📊 Trade history exported to: {filepath}")
|
||||
logger.info(f"📈 Trade summary saved to: {summary_filepath}")
|
||||
logger.info(f"📊 Total Trades: {total_trades} | Win Rate: {win_rate:.1f}% | Total P&L: ${total_pnl:.2f}")
|
||||
logger.info(f"Trade history exported to: {filepath}")
|
||||
logger.info(f"Trade summary saved to: {summary_filepath}")
|
||||
logger.info(f"Total Trades: {total_trades} | Win Rate: {win_rate:.1f}% | Total P&L: ${total_pnl:.2f}")
|
||||
|
||||
return str(filepath)
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ class UnifiedDataProviderExtension:
|
||||
|
||||
# Initialize cache manager
|
||||
self.cache_manager = DataCacheManager(cache_duration_seconds=300)
|
||||
logger.info("✓ Cache manager initialized")
|
||||
logger.info("Cache manager initialized")
|
||||
|
||||
# Initialize database connection
|
||||
self.db_connection = DatabaseConnectionManager(self.config)
|
||||
@@ -77,11 +77,11 @@ class UnifiedDataProviderExtension:
|
||||
logger.error("Failed to initialize database connection")
|
||||
return False
|
||||
|
||||
logger.info("✓ Database connection initialized")
|
||||
logger.info("Database connection initialized")
|
||||
|
||||
# Initialize query manager
|
||||
self.db_query_manager = UnifiedDatabaseQueryManager(self.db_connection)
|
||||
logger.info("✓ Query manager initialized")
|
||||
logger.info("Query manager initialized")
|
||||
|
||||
# Initialize ingestion pipeline
|
||||
self.ingestion_pipeline = DataIngestionPipeline(
|
||||
@@ -93,7 +93,7 @@ class UnifiedDataProviderExtension:
|
||||
|
||||
# Start ingestion pipeline
|
||||
self.ingestion_pipeline.start()
|
||||
logger.info("✓ Ingestion pipeline started")
|
||||
logger.info("Ingestion pipeline started")
|
||||
|
||||
self._initialized = True
|
||||
logger.info(" Unified storage system initialized successfully")
|
||||
@@ -112,11 +112,11 @@ class UnifiedDataProviderExtension:
|
||||
|
||||
if self.ingestion_pipeline:
|
||||
await self.ingestion_pipeline.stop()
|
||||
logger.info("✓ Ingestion pipeline stopped")
|
||||
logger.info("Ingestion pipeline stopped")
|
||||
|
||||
if self.db_connection:
|
||||
await self.db_connection.close()
|
||||
logger.info("✓ Database connection closed")
|
||||
logger.info("Database connection closed")
|
||||
|
||||
self._initialized = False
|
||||
logger.info(" Unified storage system shutdown complete")
|
||||
|
||||
Reference in New Issue
Block a user