set logger leveer to warning
This commit is contained in:
@ -53,12 +53,12 @@ async def start_training_pipeline(orchestrator, trading_executor):
|
|||||||
try:
|
try:
|
||||||
# Start real-time processing
|
# Start real-time processing
|
||||||
await orchestrator.start_realtime_processing()
|
await orchestrator.start_realtime_processing()
|
||||||
logger.info("✅ Real-time processing started")
|
logger.info("Real-time processing started")
|
||||||
|
|
||||||
# Start COB integration
|
# Start COB integration
|
||||||
if hasattr(orchestrator, 'start_cob_integration'):
|
if hasattr(orchestrator, 'start_cob_integration'):
|
||||||
await orchestrator.start_cob_integration()
|
await orchestrator.start_cob_integration()
|
||||||
logger.info("✅ COB integration started")
|
logger.info("COB integration started")
|
||||||
|
|
||||||
# Main training loop
|
# Main training loop
|
||||||
iteration = 0
|
iteration = 0
|
||||||
|
@ -43,6 +43,11 @@ from dataclasses import asdict
|
|||||||
# Setup logger
|
# Setup logger
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Reduce Werkzeug/Dash logging noise
|
||||||
|
logging.getLogger('werkzeug').setLevel(logging.WARNING)
|
||||||
|
logging.getLogger('dash').setLevel(logging.WARNING)
|
||||||
|
logging.getLogger('dash.dash').setLevel(logging.WARNING)
|
||||||
|
|
||||||
# Import core components
|
# Import core components
|
||||||
from core.config import get_config
|
from core.config import get_config
|
||||||
from core.data_provider import DataProvider
|
from core.data_provider import DataProvider
|
||||||
@ -152,6 +157,9 @@ class CleanTradingDashboard:
|
|||||||
'https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css'
|
'https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css'
|
||||||
])
|
])
|
||||||
|
|
||||||
|
# Suppress Dash development mode logging
|
||||||
|
self.app.enable_dev_tools(debug=False, dev_tools_silence_routes_logging=True)
|
||||||
|
|
||||||
# Setup layout and callbacks
|
# Setup layout and callbacks
|
||||||
self._setup_layout()
|
self._setup_layout()
|
||||||
self._setup_callbacks()
|
self._setup_callbacks()
|
||||||
@ -1294,13 +1302,13 @@ class CleanTradingDashboard:
|
|||||||
"""Start continuous signal generation loop"""
|
"""Start continuous signal generation loop"""
|
||||||
try:
|
try:
|
||||||
def signal_worker():
|
def signal_worker():
|
||||||
logger.info("🚀 Starting continuous signal generation loop")
|
logger.info("Starting continuous signal generation loop")
|
||||||
|
|
||||||
# Initialize DQN if not available
|
# Initialize DQN if not available
|
||||||
if not hasattr(self.orchestrator, 'sensitivity_dqn_agent') or self.orchestrator.sensitivity_dqn_agent is None:
|
if not hasattr(self.orchestrator, 'sensitivity_dqn_agent') or self.orchestrator.sensitivity_dqn_agent is None:
|
||||||
try:
|
try:
|
||||||
self.orchestrator._initialize_sensitivity_dqn()
|
self.orchestrator._initialize_sensitivity_dqn()
|
||||||
logger.info("✅ DQN Agent initialized for signal generation")
|
logger.info("DQN Agent initialized for signal generation")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Could not initialize DQN: {e}")
|
logger.warning(f"Could not initialize DQN: {e}")
|
||||||
|
|
||||||
@ -1337,7 +1345,7 @@ class CleanTradingDashboard:
|
|||||||
# Start signal generation thread
|
# Start signal generation thread
|
||||||
signal_thread = threading.Thread(target=signal_worker, daemon=True)
|
signal_thread = threading.Thread(target=signal_worker, daemon=True)
|
||||||
signal_thread.start()
|
signal_thread.start()
|
||||||
logger.info("✅ Signal generation loop started")
|
logger.info("Signal generation loop started")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error starting signal generation loop: {e}")
|
logger.error(f"Error starting signal generation loop: {e}")
|
||||||
@ -1481,7 +1489,7 @@ class CleanTradingDashboard:
|
|||||||
self.recent_decisions = self.recent_decisions[-20:]
|
self.recent_decisions = self.recent_decisions[-20:]
|
||||||
|
|
||||||
# Log signal generation
|
# Log signal generation
|
||||||
logger.info(f"📊 Generated {signal['action']} signal for {signal['symbol']} "
|
logger.info(f"Generated {signal['action']} signal for {signal['symbol']} "
|
||||||
f"(conf: {signal['confidence']:.2f}, model: {signal.get('model', 'UNKNOWN')})")
|
f"(conf: {signal['confidence']:.2f}, model: {signal.get('model', 'UNKNOWN')})")
|
||||||
|
|
||||||
# Trigger training if DQN agent is available
|
# Trigger training if DQN agent is available
|
||||||
@ -1580,7 +1588,7 @@ class CleanTradingDashboard:
|
|||||||
result = self.trading_executor.execute_trade(symbol, action, 0.01) # Small size for testing
|
result = self.trading_executor.execute_trade(symbol, action, 0.01) # Small size for testing
|
||||||
if result:
|
if result:
|
||||||
decision['executed'] = True
|
decision['executed'] = True
|
||||||
logger.info(f"✅ Manual {action} executed at ${current_price:.2f}")
|
logger.info(f"Manual {action} executed at ${current_price:.2f}")
|
||||||
|
|
||||||
# Create a trade record for tracking
|
# Create a trade record for tracking
|
||||||
trade_record = {
|
trade_record = {
|
||||||
@ -1612,13 +1620,13 @@ class CleanTradingDashboard:
|
|||||||
decision['executed'] = False
|
decision['executed'] = False
|
||||||
decision['blocked'] = True
|
decision['blocked'] = True
|
||||||
decision['block_reason'] = "Trading executor returned False"
|
decision['block_reason'] = "Trading executor returned False"
|
||||||
logger.warning(f"❌ Manual {action} failed - executor returned False")
|
logger.warning(f"Manual {action} failed - executor returned False")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
decision['executed'] = False
|
decision['executed'] = False
|
||||||
decision['blocked'] = True
|
decision['blocked'] = True
|
||||||
decision['block_reason'] = str(e)
|
decision['block_reason'] = str(e)
|
||||||
logger.error(f"❌ Manual {action} failed with error: {e}")
|
logger.error(f"Manual {action} failed with error: {e}")
|
||||||
|
|
||||||
# Add to recent decisions for display
|
# Add to recent decisions for display
|
||||||
self.recent_decisions.append(decision)
|
self.recent_decisions.append(decision)
|
||||||
@ -1671,9 +1679,9 @@ class CleanTradingDashboard:
|
|||||||
import threading
|
import threading
|
||||||
threading.Thread(target=self._start_cob_data_subscription, daemon=True).start()
|
threading.Thread(target=self._start_cob_data_subscription, daemon=True).start()
|
||||||
|
|
||||||
logger.info("✅ COB RL integration initialized successfully")
|
logger.info("COB RL integration initialized successfully")
|
||||||
logger.info("🧠 1B parameter model ready for inference")
|
logger.info("1B parameter model ready for inference")
|
||||||
logger.info("📊 COB data subscription started")
|
logger.info("COB data subscription started")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to initialize COB integration: {e}")
|
logger.error(f"Failed to initialize COB integration: {e}")
|
||||||
@ -2053,8 +2061,12 @@ class CleanTradingDashboard:
|
|||||||
|
|
||||||
def run_server(self, host='127.0.0.1', port=8051, debug=False):
|
def run_server(self, host='127.0.0.1', port=8051, debug=False):
|
||||||
"""Run the dashboard server"""
|
"""Run the dashboard server"""
|
||||||
|
# Set logging level for Flask/Werkzeug to reduce noise
|
||||||
|
if not debug:
|
||||||
|
logging.getLogger('werkzeug').setLevel(logging.ERROR)
|
||||||
|
|
||||||
logger.info(f"Starting Clean Trading Dashboard at http://{host}:{port}")
|
logger.info(f"Starting Clean Trading Dashboard at http://{host}:{port}")
|
||||||
self.app.run(host=host, port=port, debug=debug)
|
self.app.run(host=host, port=port, debug=debug, dev_tools_silence_routes_logging=True)
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
"""Stop the dashboard and cleanup resources"""
|
"""Stop the dashboard and cleanup resources"""
|
||||||
@ -2120,8 +2132,8 @@ class CleanTradingDashboard:
|
|||||||
self._stream_update_count = 1
|
self._stream_update_count = 1
|
||||||
|
|
||||||
if self._stream_update_count % 100 == 0: # Every 100 updates
|
if self._stream_update_count % 100 == 0: # Every 100 updates
|
||||||
logger.info(f"📈 Universal Stream: {self._stream_update_count} updates processed for {consumer_name}")
|
logger.info(f"Universal Stream: {self._stream_update_count} updates processed for {consumer_name}")
|
||||||
logger.debug(f"📊 Current data: ticks={len(data_packet.get('ticks', []))}, "
|
logger.debug(f"Current data: ticks={len(data_packet.get('ticks', []))}, "
|
||||||
f"tf_symbols={len(data_packet.get('multi_timeframe', {}))}")
|
f"tf_symbols={len(data_packet.get('multi_timeframe', {}))}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
Reference in New Issue
Block a user