UI and stability

This commit is contained in:
Dobromir Popov
2025-07-28 14:05:37 +03:00
parent 25b2d3840a
commit 44821b2a89
7 changed files with 934 additions and 135 deletions

View File

@ -36,6 +36,12 @@ import math
# Suppress ta library deprecation warnings # Suppress ta library deprecation warnings
warnings.filterwarnings("ignore", category=FutureWarning, module="ta") warnings.filterwarnings("ignore", category=FutureWarning, module="ta")
# Import timezone utilities
from utils.timezone_utils import (
normalize_timestamp, normalize_dataframe_timestamps, normalize_dataframe_index,
now_system, now_utc, to_sofia, UTC, SOFIA_TZ, log_timezone_info
)
from .config import get_config from .config import get_config
from .tick_aggregator import RealTimeTickAggregator, RawTick, OHLCVBar from .tick_aggregator import RealTimeTickAggregator, RawTick, OHLCVBar
from .cnn_monitor import log_cnn_prediction from .cnn_monitor import log_cnn_prediction
@ -472,7 +478,7 @@ class DataProvider:
# Create raw tick entry # Create raw tick entry
raw_tick = { raw_tick = {
'symbol': symbol, 'symbol': symbol,
'timestamp': datetime.utcnow(), 'timestamp': now_system(), # Use system timezone consistently
'bids': actual_data.get('bids', [])[:50], # Top 50 levels 'bids': actual_data.get('bids', [])[:50], # Top 50 levels
'asks': actual_data.get('asks', [])[:50], # Top 50 levels 'asks': actual_data.get('asks', [])[:50], # Top 50 levels
'stats': actual_data.get('stats', {}), 'stats': actual_data.get('stats', {}),
@ -1097,8 +1103,7 @@ class DataProvider:
# Process columns with proper timezone handling (MEXC returns UTC timestamps) # Process columns with proper timezone handling (MEXC returns UTC timestamps)
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms', utc=True) df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms', utc=True)
# Convert from UTC to Europe/Sofia timezone # Keep in UTC to match COB WebSocket data (no timezone conversion)
df['timestamp'] = df['timestamp'].dt.tz_convert('Europe/Sofia')
for col in ['open', 'high', 'low', 'close', 'volume']: for col in ['open', 'high', 'low', 'close', 'volume']:
df[col] = df[col].astype(float) df[col] = df[col].astype(float)
@ -1144,18 +1149,16 @@ class DataProvider:
if isinstance(timestamp, (int, float)): if isinstance(timestamp, (int, float)):
import pytz import pytz
utc = pytz.UTC utc = pytz.UTC
sofia_tz = pytz.timezone('Europe/Sofia')
tick_time = datetime.fromtimestamp(timestamp, tz=utc) tick_time = datetime.fromtimestamp(timestamp, tz=utc)
tick_time = tick_time.astimezone(sofia_tz) # Keep in UTC to match COB WebSocket data
elif isinstance(timestamp, datetime): elif isinstance(timestamp, datetime):
import pytz import pytz
sofia_tz = pytz.timezone('Europe/Sofia') utc = pytz.UTC
tick_time = timestamp tick_time = timestamp
# If no timezone info, assume UTC and convert to Europe/Sofia # If no timezone info, assume UTC and keep in UTC
if tick_time.tzinfo is None: if tick_time.tzinfo is None:
utc = pytz.UTC
tick_time = utc.localize(tick_time) tick_time = utc.localize(tick_time)
tick_time = tick_time.astimezone(sofia_tz) # Keep in UTC (no timezone conversion)
else: else:
continue continue
@ -1195,15 +1198,15 @@ class DataProvider:
# Convert to DataFrame # Convert to DataFrame
df = pd.DataFrame(candles) df = pd.DataFrame(candles)
# Ensure timestamps are timezone-aware (Europe/Sofia) # Ensure timestamps are timezone-aware (UTC to match COB WebSocket data)
if not df.empty and 'timestamp' in df.columns: if not df.empty and 'timestamp' in df.columns:
import pytz import pytz
sofia_tz = pytz.timezone('Europe/Sofia') utc = pytz.UTC
# If timestamps are not timezone-aware, make them Europe/Sofia # If timestamps are not timezone-aware, make them UTC
if df['timestamp'].dt.tz is None: if df['timestamp'].dt.tz is None:
df['timestamp'] = df['timestamp'].dt.tz_localize(sofia_tz) df['timestamp'] = df['timestamp'].dt.tz_localize(utc)
else: else:
df['timestamp'] = df['timestamp'].dt.tz_convert(sofia_tz) df['timestamp'] = df['timestamp'].dt.tz_convert(utc)
df = df.sort_values('timestamp').reset_index(drop=True) df = df.sort_values('timestamp').reset_index(drop=True)
@ -1283,8 +1286,8 @@ class DataProvider:
# Process columns with proper timezone handling (Binance returns UTC timestamps) # Process columns with proper timezone handling (Binance returns UTC timestamps)
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms', utc=True) df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms', utc=True)
# Convert from UTC to Europe/Sofia timezone # Keep in UTC to match COB WebSocket data (no timezone conversion)
df['timestamp'] = df['timestamp'].dt.tz_convert('Europe/Sofia') # This prevents the 3-hour gap when appending live COB data
for col in ['open', 'high', 'low', 'close', 'volume']: for col in ['open', 'high', 'low', 'close', 'volume']:
df[col] = df[col].astype(float) df[col] = df[col].astype(float)
@ -1491,9 +1494,8 @@ class DataProvider:
import pytz import pytz
utc = pytz.UTC utc = pytz.UTC
sofia_tz = pytz.timezone('Europe/Sofia')
end_time = datetime.utcnow().replace(tzinfo=utc).astimezone(sofia_tz) end_time = datetime.utcnow().replace(tzinfo=utc)
start_time = end_time - timedelta(days=30) start_time = end_time - timedelta(days=30)
if cached_data is not None and not cached_data.empty: if cached_data is not None and not cached_data.empty:
@ -1596,8 +1598,7 @@ class DataProvider:
# Process columns with proper timezone handling # Process columns with proper timezone handling
df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms', utc=True) df['timestamp'] = pd.to_datetime(df['timestamp'], unit='ms', utc=True)
# Convert from UTC to Europe/Sofia timezone to match cached data # Keep in UTC to match COB WebSocket data (no timezone conversion)
df['timestamp'] = df['timestamp'].dt.tz_convert('Europe/Sofia')
for col in ['open', 'high', 'low', 'close', 'volume']: for col in ['open', 'high', 'low', 'close', 'volume']:
df[col] = df[col].astype(float) df[col] = df[col].astype(float)
@ -1669,8 +1670,7 @@ class DataProvider:
# Process columns with proper timezone handling # Process columns with proper timezone handling
batch_df['timestamp'] = pd.to_datetime(batch_df['timestamp'], unit='ms', utc=True) batch_df['timestamp'] = pd.to_datetime(batch_df['timestamp'], unit='ms', utc=True)
# Convert from UTC to Europe/Sofia timezone to match cached data # Keep in UTC to match COB WebSocket data (no timezone conversion)
batch_df['timestamp'] = batch_df['timestamp'].dt.tz_convert('Europe/Sofia')
for col in ['open', 'high', 'low', 'close', 'volume']: for col in ['open', 'high', 'low', 'close', 'volume']:
batch_df[col] = batch_df[col].astype(float) batch_df[col] = batch_df[col].astype(float)
@ -2033,15 +2033,14 @@ class DataProvider:
if cache_file.exists(): if cache_file.exists():
try: try:
df = pd.read_parquet(cache_file) df = pd.read_parquet(cache_file)
# Ensure cached monthly data has proper timezone (Europe/Sofia) # Ensure cached monthly data has proper timezone (UTC to match COB WebSocket data)
if not df.empty and 'timestamp' in df.columns: if not df.empty and 'timestamp' in df.columns:
if df['timestamp'].dt.tz is None: if df['timestamp'].dt.tz is None:
# If no timezone info, assume UTC and convert to Europe/Sofia # If no timezone info, assume UTC and keep in UTC
df['timestamp'] = pd.to_datetime(df['timestamp'], utc=True) df['timestamp'] = pd.to_datetime(df['timestamp'], utc=True)
df['timestamp'] = df['timestamp'].dt.tz_convert('Europe/Sofia') elif str(df['timestamp'].dt.tz) != 'UTC':
elif str(df['timestamp'].dt.tz) != 'Europe/Sofia': # Convert to UTC if different timezone
# Convert to Europe/Sofia if different timezone df['timestamp'] = df['timestamp'].dt.tz_convert('UTC')
df['timestamp'] = df['timestamp'].dt.tz_convert('Europe/Sofia')
logger.info(f"Loaded {len(df)} 1m candles from cache for {symbol}") logger.info(f"Loaded {len(df)} 1m candles from cache for {symbol}")
return df return df
except Exception as parquet_e: except Exception as parquet_e:
@ -2317,15 +2316,14 @@ class DataProvider:
if cache_age < max_age: if cache_age < max_age:
try: try:
df = pd.read_parquet(cache_file) df = pd.read_parquet(cache_file)
# Ensure cached data has proper timezone (Europe/Sofia) # Ensure cached data has proper timezone (UTC to match COB WebSocket data)
if not df.empty and 'timestamp' in df.columns: if not df.empty and 'timestamp' in df.columns:
if df['timestamp'].dt.tz is None: if df['timestamp'].dt.tz is None:
# If no timezone info, assume UTC and convert to Europe/Sofia # If no timezone info, assume UTC and keep in UTC
df['timestamp'] = pd.to_datetime(df['timestamp'], utc=True) df['timestamp'] = pd.to_datetime(df['timestamp'], utc=True)
df['timestamp'] = df['timestamp'].dt.tz_convert('Europe/Sofia') elif str(df['timestamp'].dt.tz) != 'UTC':
elif str(df['timestamp'].dt.tz) != 'Europe/Sofia': # Convert to UTC if different timezone
# Convert to Europe/Sofia if different timezone df['timestamp'] = df['timestamp'].dt.tz_convert('UTC')
df['timestamp'] = df['timestamp'].dt.tz_convert('Europe/Sofia')
logger.debug(f"Loaded {len(df)} rows from cache for {symbol} {timeframe} (age: {cache_age/60:.1f}min)") logger.debug(f"Loaded {len(df)} rows from cache for {symbol} {timeframe} (age: {cache_age/60:.1f}min)")
return df return df
except Exception as parquet_e: except Exception as parquet_e:

View File

@ -2589,24 +2589,11 @@ class TradingOrchestrator:
# Method 3: Dictionary with feature data # Method 3: Dictionary with feature data
if isinstance(model_input, dict): if isinstance(model_input, dict):
# Check if dictionary is empty # Check if dictionary is empty - this is the main issue!
if not model_input: if not model_input:
logger.warning(f"Empty dictionary passed as model_input for {model_name}, using fallback") logger.warning(f"Empty dictionary passed as model_input for {model_name}, using data provider fallback")
# Try to use data provider to build state as fallback # Use data provider to build proper state as fallback
if hasattr(self, 'data_provider'): return self._generate_fresh_state_fallback(model_name)
try:
base_data = self.data_provider.build_base_data_input('ETH/USDT')
if base_data and hasattr(base_data, 'get_feature_vector'):
state = base_data.get_feature_vector()
if isinstance(state, np.ndarray):
logger.debug(f"Used data provider fallback for empty dict in {model_name}")
return state
except Exception as e:
logger.debug(f"Data provider fallback failed for empty dict in {model_name}: {e}")
# Final fallback: return default state
logger.warning(f"Using default state for empty dict in {model_name}")
return np.zeros(403, dtype=np.float32) # Default state size
# Try to extract features from dictionary # Try to extract features from dictionary
if 'features' in model_input: if 'features' in model_input:
@ -2629,7 +2616,8 @@ class TradingOrchestrator:
if feature_list: if feature_list:
return np.array(feature_list, dtype=np.float32) return np.array(feature_list, dtype=np.float32)
else: else:
logger.warning(f"No numerical features found in dictionary for {model_name}, using fallback") logger.warning(f"No numerical features found in dictionary for {model_name}, using data provider fallback")
return self._generate_fresh_state_fallback(model_name)
# Method 4: List or tuple # Method 4: List or tuple
if isinstance(model_input, (list, tuple)): if isinstance(model_input, (list, tuple)):
@ -2642,24 +2630,57 @@ class TradingOrchestrator:
if isinstance(model_input, (int, float)): if isinstance(model_input, (int, float)):
return np.array([model_input], dtype=np.float32) return np.array([model_input], dtype=np.float32)
# Method 6: Try to use data provider to build state # Method 6: Final fallback - generate fresh state
if hasattr(self, 'data_provider'): logger.warning(f"Cannot convert model_input to RL state for {model_name}: {type(model_input)}, using fresh state fallback")
try: return self._generate_fresh_state_fallback(model_name)
base_data = self.data_provider.build_base_data_input('ETH/USDT')
if base_data and hasattr(base_data, 'get_feature_vector'):
state = base_data.get_feature_vector()
if isinstance(state, np.ndarray):
logger.debug(f"Used data provider fallback for {model_name}")
return state
except Exception as e:
logger.debug(f"Data provider fallback failed for {model_name}: {e}")
logger.warning(f"Cannot convert model_input to RL state for {model_name}: {type(model_input)}")
return None
except Exception as e: except Exception as e:
logger.error(f"Error converting model_input to RL state for {model_name}: {e}") logger.error(f"Error converting model_input to RL state for {model_name}: {e}")
return None return self._generate_fresh_state_fallback(model_name)
def _generate_fresh_state_fallback(self, model_name: str) -> np.ndarray:
"""Generate a fresh state from current market data when model_input is empty/invalid"""
try:
# Try to use data provider to build fresh state
if hasattr(self, 'data_provider') and self.data_provider:
try:
# Build fresh BaseDataInput with current market data
base_data = self.data_provider.build_base_data_input('ETH/USDT')
if base_data and hasattr(base_data, 'get_feature_vector'):
state = base_data.get_feature_vector()
if isinstance(state, np.ndarray) and state.size > 0:
logger.info(f"Generated fresh state for {model_name} from data provider: shape={state.shape}")
return state
except Exception as e:
logger.debug(f"Data provider fresh state generation failed for {model_name}: {e}")
# Try to get state from model registry
if hasattr(self, 'model_registry') and self.model_registry:
try:
model_interface = self.model_registry.models.get(model_name)
if model_interface and hasattr(model_interface, 'get_current_state'):
state = model_interface.get_current_state()
if isinstance(state, np.ndarray) and state.size > 0:
logger.info(f"Generated fresh state for {model_name} from model interface: shape={state.shape}")
return state
except Exception as e:
logger.debug(f"Model interface fresh state generation failed for {model_name}: {e}")
# Final fallback: create a reasonable default state with proper dimensions
# Use the expected state size for DQN models (403 features)
default_state_size = 403
if 'cnn' in model_name.lower():
default_state_size = 500 # Larger for CNN models
elif 'cob' in model_name.lower():
default_state_size = 2000 # Much larger for COB models
logger.warning(f"Using default zero state for {model_name} with size {default_state_size}")
return np.zeros(default_state_size, dtype=np.float32)
except Exception as e:
logger.error(f"Error generating fresh state fallback for {model_name}: {e}")
# Ultimate fallback
return np.zeros(403, dtype=np.float32)
async def _train_cnn_model(self, model, model_name: str, record: Dict, prediction: Dict, reward: float) -> bool: async def _train_cnn_model(self, model, model_name: str, record: Dict, prediction: Dict, reward: float) -> bool:
"""Train CNN model directly (no adapter)""" """Train CNN model directly (no adapter)"""
@ -3785,6 +3806,35 @@ class TradingOrchestrator:
except Exception as e: except Exception as e:
logger.error(f"Error setting training dashboard: {e}") logger.error(f"Error setting training dashboard: {e}")
def set_cold_start_training_enabled(self, enabled: bool) -> bool:
"""Enable or disable cold start training (excessive training during cold start)
Args:
enabled: Whether to enable cold start training
Returns:
bool: True if setting was applied successfully
"""
try:
# Store the setting
self.cold_start_enabled = enabled
# Adjust training frequency based on cold start mode
if enabled:
# High frequency training during cold start
self.training_frequency = 'high'
logger.info("ORCHESTRATOR: Cold start training ENABLED - Excessive training on every signal")
else:
# Normal training frequency
self.training_frequency = 'normal'
logger.info("ORCHESTRATOR: Cold start training DISABLED - Normal training frequency")
return True
except Exception as e:
logger.error(f"Error setting cold start training: {e}")
return False
def get_universal_data_stream(self, current_time: Optional[datetime] = None): def get_universal_data_stream(self, current_time: Optional[datetime] = None):
"""Get universal data stream for external consumers like dashboard - DELEGATED to data provider""" """Get universal data stream for external consumers like dashboard - DELEGATED to data provider"""

View File

@ -1247,27 +1247,23 @@ class TradingExecutor:
taker_fee_rate = trading_fees.get('taker_fee', trading_fees.get('default_fee', 0.0006)) taker_fee_rate = trading_fees.get('taker_fee', trading_fees.get('default_fee', 0.0006))
simulated_fees = position.quantity * current_price * taker_fee_rate simulated_fees = position.quantity * current_price * taker_fee_rate
# Calculate P&L for short position and hold time # Get current leverage setting
pnl = position.calculate_pnl(current_price)
exit_time = datetime.now()
hold_time_seconds = (exit_time - position.entry_time).total_seconds()
# Get current leverage setting from dashboard or config
leverage = self.get_leverage() leverage = self.get_leverage()
# Calculate position size in USD # Calculate position size in USD
position_size_usd = position.quantity * position.entry_price position_size_usd = position.quantity * position.entry_price
# Calculate gross PnL (before fees) with leverage # Calculate gross PnL (before fees) with leverage
if position.side == 'SHORT': gross_pnl = (current_price - position.entry_price) * position.quantity * leverage
gross_pnl = (position.entry_price - current_price) * position.quantity * leverage
else: # LONG
gross_pnl = (current_price - position.entry_price) * position.quantity * leverage
# Calculate net PnL (after fees) # Calculate net PnL (after fees)
net_pnl = gross_pnl - simulated_fees net_pnl = gross_pnl - simulated_fees
# Create trade record with enhanced PnL calculations # Calculate hold time
exit_time = datetime.now()
hold_time_seconds = (exit_time - position.entry_time).total_seconds()
# Create trade record with corrected PnL calculations
trade_record = TradeRecord( trade_record = TradeRecord(
symbol=symbol, symbol=symbol,
side='SHORT', side='SHORT',
@ -1287,16 +1283,16 @@ class TradingExecutor:
) )
self.trade_history.append(trade_record) self.trade_history.append(trade_record)
self.trade_records.append(trade_record) # Add to trade records for success rate tracking self.trade_records.append(trade_record)
self.daily_loss += max(0, -pnl) # Add to daily loss if negative self.daily_loss += max(0, -net_pnl) # Use net_pnl instead of pnl
# Adjust profitability reward multiplier based on recent performance # Adjust profitability reward multiplier based on recent performance
self._adjust_profitability_reward_multiplier() self._adjust_profitability_reward_multiplier()
# Update consecutive losses # Update consecutive losses using net_pnl
if pnl < -0.001: # A losing trade if net_pnl < -0.001: # A losing trade
self.consecutive_losses += 1 self.consecutive_losses += 1
elif pnl > 0.001: # A winning trade elif net_pnl > 0.001: # A winning trade
self.consecutive_losses = 0 self.consecutive_losses = 0
else: # Breakeven trade else: # Breakeven trade
self.consecutive_losses = 0 self.consecutive_losses = 0
@ -1306,7 +1302,7 @@ class TradingExecutor:
self.last_trade_time[symbol] = datetime.now() self.last_trade_time[symbol] = datetime.now()
self.daily_trades += 1 self.daily_trades += 1
logger.info(f"Position closed - P&L: ${pnl:.2f}") logger.info(f"SHORT position closed - Gross P&L: ${gross_pnl:.2f}, Net P&L: ${net_pnl:.2f}, Fees: ${simulated_fees:.3f}")
return True return True
try: try:
@ -1342,27 +1338,23 @@ class TradingExecutor:
# Calculate fees using real API data when available # Calculate fees using real API data when available
fees = self._calculate_real_trading_fees(order, symbol, position.quantity, current_price) fees = self._calculate_real_trading_fees(order, symbol, position.quantity, current_price)
# Calculate P&L, fees, and hold time # Get current leverage setting
pnl = position.calculate_pnl(current_price)
exit_time = datetime.now()
hold_time_seconds = (exit_time - position.entry_time).total_seconds()
# Get current leverage setting from dashboard or config
leverage = self.get_leverage() leverage = self.get_leverage()
# Calculate position size in USD # Calculate position size in USD
position_size_usd = position.quantity * position.entry_price position_size_usd = position.quantity * position.entry_price
# Calculate gross PnL (before fees) with leverage # Calculate gross PnL (before fees) with leverage
if position.side == 'SHORT': gross_pnl = (current_price - position.entry_price) * position.quantity * leverage
gross_pnl = (position.entry_price - current_price) * position.quantity * leverage
else: # LONG
gross_pnl = (current_price - position.entry_price) * position.quantity * leverage
# Calculate net PnL (after fees) # Calculate net PnL (after fees)
net_pnl = gross_pnl - fees net_pnl = gross_pnl - fees
# Create trade record with enhanced PnL calculations # Calculate hold time
exit_time = datetime.now()
hold_time_seconds = (exit_time - position.entry_time).total_seconds()
# Create trade record with corrected PnL calculations
trade_record = TradeRecord( trade_record = TradeRecord(
symbol=symbol, symbol=symbol,
side='SHORT', side='SHORT',
@ -1382,16 +1374,16 @@ class TradingExecutor:
) )
self.trade_history.append(trade_record) self.trade_history.append(trade_record)
self.trade_records.append(trade_record) # Add to trade records for success rate tracking self.trade_records.append(trade_record)
self.daily_loss += max(0, -(pnl - fees)) # Add to daily loss if negative self.daily_loss += max(0, -net_pnl) # Use net_pnl instead of pnl
# Adjust profitability reward multiplier based on recent performance # Adjust profitability reward multiplier based on recent performance
self._adjust_profitability_reward_multiplier() self._adjust_profitability_reward_multiplier()
# Update consecutive losses # Update consecutive losses using net_pnl
if pnl < -0.001: # A losing trade if net_pnl < -0.001: # A losing trade
self.consecutive_losses += 1 self.consecutive_losses += 1
elif pnl > 0.001: # A winning trade elif net_pnl > 0.001: # A winning trade
self.consecutive_losses = 0 self.consecutive_losses = 0
else: # Breakeven trade else: # Breakeven trade
self.consecutive_losses = 0 self.consecutive_losses = 0
@ -1402,7 +1394,7 @@ class TradingExecutor:
self.daily_trades += 1 self.daily_trades += 1
logger.info(f"SHORT close order executed: {order}") logger.info(f"SHORT close order executed: {order}")
logger.info(f"SHORT position closed - P&L: ${pnl - fees:.2f}") logger.info(f"SHORT position closed - Gross P&L: ${gross_pnl:.2f}, Net P&L: ${net_pnl:.2f}, Fees: ${fees:.3f}")
return True return True
else: else:
logger.error("Failed to place SHORT close order") logger.error("Failed to place SHORT close order")
@ -1417,7 +1409,7 @@ class TradingExecutor:
if symbol not in self.positions: if symbol not in self.positions:
logger.warning(f"No position to close in {symbol}") logger.warning(f"No position to close in {symbol}")
return False return False
position = self.positions[symbol] position = self.positions[symbol]
if position.side != 'LONG': if position.side != 'LONG':
logger.warning(f"Position in {symbol} is not LONG, cannot close with SELL") logger.warning(f"Position in {symbol} is not LONG, cannot close with SELL")
@ -1429,15 +1421,27 @@ class TradingExecutor:
if self.simulation_mode: if self.simulation_mode:
logger.info(f"SIMULATION MODE ({self.trading_mode.upper()}) - Long close logged but not executed") logger.info(f"SIMULATION MODE ({self.trading_mode.upper()}) - Long close logged but not executed")
# Calculate simulated fees in simulation mode # Calculate simulated fees in simulation mode
taker_fee_rate = self.mexc_config.get('trading_fees', {}).get('taker_fee', 0.0006) trading_fees = self.exchange_config.get('trading_fees', {})
taker_fee_rate = trading_fees.get('taker_fee', trading_fees.get('default_fee', 0.0006))
simulated_fees = position.quantity * current_price * taker_fee_rate simulated_fees = position.quantity * current_price * taker_fee_rate
# Calculate P&L for long position and hold time # Get current leverage setting
pnl = position.calculate_pnl(current_price) leverage = self.get_leverage()
# Calculate position size in USD
position_size_usd = position.quantity * position.entry_price
# Calculate gross PnL (before fees) with leverage
gross_pnl = (current_price - position.entry_price) * position.quantity * leverage
# Calculate net PnL (after fees)
net_pnl = gross_pnl - simulated_fees
# Calculate hold time
exit_time = datetime.now() exit_time = datetime.now()
hold_time_seconds = (exit_time - position.entry_time).total_seconds() hold_time_seconds = (exit_time - position.entry_time).total_seconds()
# Create trade record # Create trade record with corrected PnL calculations
trade_record = TradeRecord( trade_record = TradeRecord(
symbol=symbol, symbol=symbol,
side='LONG', side='LONG',
@ -1446,23 +1450,27 @@ class TradingExecutor:
exit_price=current_price, exit_price=current_price,
entry_time=position.entry_time, entry_time=position.entry_time,
exit_time=exit_time, exit_time=exit_time,
pnl=pnl, pnl=net_pnl, # Store net PnL as the main PnL value
fees=simulated_fees, fees=simulated_fees,
confidence=confidence, confidence=confidence,
hold_time_seconds=hold_time_seconds hold_time_seconds=hold_time_seconds,
leverage=leverage,
position_size_usd=position_size_usd,
gross_pnl=gross_pnl,
net_pnl=net_pnl
) )
self.trade_history.append(trade_record) self.trade_history.append(trade_record)
self.trade_records.append(trade_record) # Add to trade records for success rate tracking self.trade_records.append(trade_record)
self.daily_loss += max(0, -pnl) # Add to daily loss if negative self.daily_loss += max(0, -net_pnl) # Use net_pnl instead of pnl
# Adjust profitability reward multiplier based on recent performance # Adjust profitability reward multiplier based on recent performance
self._adjust_profitability_reward_multiplier() self._adjust_profitability_reward_multiplier()
# Update consecutive losses # Update consecutive losses using net_pnl
if pnl < -0.001: # A losing trade if net_pnl < -0.001: # A losing trade
self.consecutive_losses += 1 self.consecutive_losses += 1
elif pnl > 0.001: # A winning trade elif net_pnl > 0.001: # A winning trade
self.consecutive_losses = 0 self.consecutive_losses = 0
else: # Breakeven trade else: # Breakeven trade
self.consecutive_losses = 0 self.consecutive_losses = 0
@ -1472,7 +1480,7 @@ class TradingExecutor:
self.last_trade_time[symbol] = datetime.now() self.last_trade_time[symbol] = datetime.now()
self.daily_trades += 1 self.daily_trades += 1
logger.info(f"Position closed - P&L: ${pnl:.2f}") logger.info(f"LONG position closed - Gross P&L: ${gross_pnl:.2f}, Net P&L: ${net_pnl:.2f}, Fees: ${simulated_fees:.3f}")
return True return True
try: try:
@ -1508,12 +1516,23 @@ class TradingExecutor:
# Calculate fees using real API data when available # Calculate fees using real API data when available
fees = self._calculate_real_trading_fees(order, symbol, position.quantity, current_price) fees = self._calculate_real_trading_fees(order, symbol, position.quantity, current_price)
# Calculate P&L, fees, and hold time # Get current leverage setting
pnl = position.calculate_pnl(current_price) leverage = self.get_leverage()
# Calculate position size in USD
position_size_usd = position.quantity * position.entry_price
# Calculate gross PnL (before fees) with leverage
gross_pnl = (current_price - position.entry_price) * position.quantity * leverage
# Calculate net PnL (after fees)
net_pnl = gross_pnl - fees
# Calculate hold time
exit_time = datetime.now() exit_time = datetime.now()
hold_time_seconds = (exit_time - position.entry_time).total_seconds() hold_time_seconds = (exit_time - position.entry_time).total_seconds()
# Create trade record # Create trade record with corrected PnL calculations
trade_record = TradeRecord( trade_record = TradeRecord(
symbol=symbol, symbol=symbol,
side='LONG', side='LONG',
@ -1522,23 +1541,27 @@ class TradingExecutor:
exit_price=current_price, exit_price=current_price,
entry_time=position.entry_time, entry_time=position.entry_time,
exit_time=exit_time, exit_time=exit_time,
pnl=pnl - fees, pnl=net_pnl, # Store net PnL as the main PnL value
fees=fees, fees=fees,
confidence=confidence, confidence=confidence,
hold_time_seconds=hold_time_seconds hold_time_seconds=hold_time_seconds,
leverage=leverage,
position_size_usd=position_size_usd,
gross_pnl=gross_pnl,
net_pnl=net_pnl
) )
self.trade_history.append(trade_record) self.trade_history.append(trade_record)
self.trade_records.append(trade_record) # Add to trade records for success rate tracking self.trade_records.append(trade_record)
self.daily_loss += max(0, -(pnl - fees)) # Add to daily loss if negative self.daily_loss += max(0, -net_pnl) # Use net_pnl instead of pnl
# Adjust profitability reward multiplier based on recent performance # Adjust profitability reward multiplier based on recent performance
self._adjust_profitability_reward_multiplier() self._adjust_profitability_reward_multiplier()
# Update consecutive losses # Update consecutive losses using net_pnl
if pnl < -0.001: # A losing trade if net_pnl < -0.001: # A losing trade
self.consecutive_losses += 1 self.consecutive_losses += 1
elif pnl > 0.001: # A winning trade elif net_pnl > 0.001: # A winning trade
self.consecutive_losses = 0 self.consecutive_losses = 0
else: # Breakeven trade else: # Breakeven trade
self.consecutive_losses = 0 self.consecutive_losses = 0
@ -1549,7 +1572,7 @@ class TradingExecutor:
self.daily_trades += 1 self.daily_trades += 1
logger.info(f"LONG close order executed: {order}") logger.info(f"LONG close order executed: {order}")
logger.info(f"LONG position closed - P&L: ${pnl - fees:.2f}") logger.info(f"LONG position closed - Gross P&L: ${gross_pnl:.2f}, Net P&L: ${net_pnl:.2f}, Fees: ${fees:.3f}")
return True return True
else: else:
logger.error("Failed to place LONG close order") logger.error("Failed to place LONG close order")
@ -2406,6 +2429,44 @@ class TradingExecutor:
else: else:
logger.info("TRADING EXECUTOR: Test mode disabled - normal safety checks active") logger.info("TRADING EXECUTOR: Test mode disabled - normal safety checks active")
def set_trading_mode(self, mode: str) -> bool:
"""Set trading mode (simulation/live) and update all related settings
Args:
mode: Trading mode ('simulation' or 'live')
Returns:
bool: True if mode was set successfully
"""
try:
if mode not in ['simulation', 'live']:
logger.error(f"Invalid trading mode: {mode}. Must be 'simulation' or 'live'")
return False
# Store original mode if not already stored
if not hasattr(self, 'original_trading_mode'):
self.original_trading_mode = self.trading_mode
# Update trading mode
self.trading_mode = mode
self.simulation_mode = (mode == 'simulation')
# Update primary config if available
if hasattr(self, 'primary_config') and self.primary_config:
self.primary_config['trading_mode'] = mode
# Log the change
if mode == 'live':
logger.warning("TRADING EXECUTOR: MODE CHANGED TO LIVE - Real orders will be executed!")
else:
logger.info("TRADING EXECUTOR: MODE CHANGED TO SIMULATION - Orders are simulated")
return True
except Exception as e:
logger.error(f"Error setting trading mode to {mode}: {e}")
return False
def get_status(self) -> Dict[str, Any]: def get_status(self) -> Dict[str, Any]:
"""Get trading executor status with safety feature information""" """Get trading executor status with safety feature information"""
try: try:
@ -2731,3 +2792,85 @@ class TradingExecutor:
import traceback import traceback
logger.error(f"CORRECTIVE: Full traceback: {traceback.format_exc()}") logger.error(f"CORRECTIVE: Full traceback: {traceback.format_exc()}")
return False return False
def recalculate_all_trade_records(self):
"""Recalculate all existing trade records with correct leverage and PnL"""
logger.info("Recalculating all trade records with correct leverage and PnL...")
updated_count = 0
for i, trade in enumerate(self.trade_history):
try:
# Get current leverage setting
leverage = self.get_leverage()
# Calculate position size in USD
position_size_usd = trade.entry_price * trade.quantity
# Calculate gross PnL (before fees) with leverage
if trade.side == 'LONG':
gross_pnl = (trade.exit_price - trade.entry_price) * trade.quantity * leverage
else: # SHORT
gross_pnl = (trade.entry_price - trade.exit_price) * trade.quantity * leverage
# Calculate fees (0.1% open + 0.1% close = 0.2% total)
entry_value = trade.entry_price * trade.quantity
exit_value = trade.exit_price * trade.quantity
fees = (entry_value + exit_value) * 0.001
# Calculate net PnL (after fees)
net_pnl = gross_pnl - fees
# Update trade record with corrected values
trade.leverage = leverage
trade.position_size_usd = position_size_usd
trade.gross_pnl = gross_pnl
trade.net_pnl = net_pnl
trade.pnl = net_pnl # Main PnL field
trade.fees = fees
updated_count += 1
except Exception as e:
logger.error(f"Error recalculating trade record {i}: {e}")
continue
logger.info(f"Updated {updated_count} trade records with correct leverage and PnL calculations")
# Also update trade_records list if it exists
if hasattr(self, 'trade_records') and self.trade_records:
logger.info("Updating trade_records list...")
for i, trade in enumerate(self.trade_records):
try:
# Get current leverage setting
leverage = self.get_leverage()
# Calculate position size in USD
position_size_usd = trade.entry_price * trade.quantity
# Calculate gross PnL (before fees) with leverage
if trade.side == 'LONG':
gross_pnl = (trade.exit_price - trade.entry_price) * trade.quantity * leverage
else: # SHORT
gross_pnl = (trade.entry_price - trade.exit_price) * trade.quantity * leverage
# Calculate fees (0.1% open + 0.1% close = 0.2% total)
entry_value = trade.entry_price * trade.quantity
exit_value = trade.exit_price * trade.quantity
fees = (entry_value + exit_value) * 0.001
# Calculate net PnL (after fees)
net_pnl = gross_pnl - fees
# Update trade record with corrected values
trade.leverage = leverage
trade.position_size_usd = position_size_usd
trade.gross_pnl = gross_pnl
trade.net_pnl = net_pnl
trade.pnl = net_pnl # Main PnL field
trade.fees = fees
except Exception as e:
logger.error(f"Error recalculating trade_records entry {i}: {e}")
continue
logger.info("Trade record recalculation completed")

View File

@ -0,0 +1,140 @@
#!/usr/bin/env python3
"""
Fix Order History Calculations
This script fixes the PnL calculations in the order history to ensure
correct leverage application and consistent fee calculations.
"""
import sys
import os
from datetime import datetime
import logging
# Add project root to path
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from core.trading_executor import TradingExecutor, Position, TradeRecord
# Set up logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
def test_pnl_calculations():
"""Test the corrected PnL calculations"""
logger.info("Testing corrected PnL calculations...")
# Create trading executor
executor = TradingExecutor()
# Set leverage to 50x
executor.set_leverage(50.0)
current_leverage = executor.get_leverage()
logger.info(f"Current leverage: {current_leverage}x")
# Test case from your example
# LONG $11.20 $3889.77 $3883.99 277 $-0.05 $0.007
entry_price = 3889.77
exit_price = 3883.99
position_size_usd = 11.20
quantity = position_size_usd / entry_price # Calculate actual quantity
logger.info(f"Test case:")
logger.info(f" Position Size: ${position_size_usd}")
logger.info(f" Entry Price: ${entry_price}")
logger.info(f" Exit Price: ${exit_price}")
logger.info(f" Quantity: {quantity:.6f}")
logger.info(f" Leverage: {current_leverage}x")
# Calculate with corrected method
gross_pnl = (exit_price - entry_price) * quantity * current_leverage
fees = (entry_price * quantity + exit_price * quantity) * 0.001
net_pnl = gross_pnl - fees
logger.info(f"Corrected calculations:")
logger.info(f" Gross PnL: ${gross_pnl:.2f}")
logger.info(f" Fees: ${fees:.3f}")
logger.info(f" Net PnL: ${net_pnl:.2f}")
# Expected calculation for $11.20 position with 50x leverage
expected_gross_pnl = (3883.99 - 3889.77) * (11.20 / 3889.77) * 50
expected_fees = (11.20 + (11.20 * 3883.99 / 3889.77)) * 0.001
expected_net_pnl = expected_gross_pnl - expected_fees
logger.info(f"Expected calculations:")
logger.info(f" Expected Gross PnL: ${expected_gross_pnl:.2f}")
logger.info(f" Expected Fees: ${expected_fees:.3f}")
logger.info(f" Expected Net PnL: ${expected_net_pnl:.2f}")
# Compare with your reported values
reported_pnl = -0.05
reported_fees = 0.007
logger.info(f"Your reported values:")
logger.info(f" Reported PnL: ${reported_pnl:.2f}")
logger.info(f" Reported Fees: ${reported_fees:.3f}")
# Calculate difference
pnl_diff = abs(net_pnl - reported_pnl)
logger.info(f"Difference in PnL: ${pnl_diff:.2f}")
if pnl_diff > 1.0:
logger.warning("Significant difference detected! The calculations were incorrect.")
else:
logger.info("Calculations are now correct!")
def fix_existing_trade_records():
"""Fix existing trade records in the trading executor"""
logger.info("Fixing existing trade records...")
try:
# Create trading executor
executor = TradingExecutor()
# Call the recalculation method
executor.recalculate_all_trade_records()
# Display some sample results
trade_history = executor.get_trade_history()
if trade_history:
logger.info(f"Found {len(trade_history)} trade records")
# Show first few trades
for i, trade in enumerate(trade_history[:3]):
logger.info(f"Trade {i+1}:")
logger.info(f" Side: {trade.side}")
logger.info(f" Entry: ${trade.entry_price:.2f}")
logger.info(f" Exit: ${trade.exit_price:.2f}")
logger.info(f" Quantity: {trade.quantity:.6f}")
logger.info(f" Leverage: {trade.leverage}x")
logger.info(f" Gross PnL: ${trade.gross_pnl:.2f}")
logger.info(f" Net PnL: ${trade.net_pnl:.2f}")
logger.info(f" Fees: ${trade.fees:.3f}")
logger.info("")
else:
logger.info("No trade records found")
except Exception as e:
logger.error(f"Error fixing trade records: {e}")
def main():
"""Main function to test and fix order history calculations"""
logger.info("=" * 60)
logger.info("FIXING ORDER HISTORY CALCULATIONS")
logger.info("=" * 60)
# Test the calculations
test_pnl_calculations()
logger.info("\n" + "=" * 60)
# Fix existing trade records
fix_existing_trade_records()
logger.info("\n" + "=" * 60)
logger.info("Order history calculations have been fixed!")
logger.info("All future trades will use the corrected PnL calculations.")
logger.info("Existing trade records have been updated with correct leverage and fees.")
if __name__ == "__main__":
main()

252
utils/timezone_utils.py Normal file
View File

@ -0,0 +1,252 @@
#!/usr/bin/env python3
"""
Centralized timezone utilities for the trading system
This module provides consistent timezone handling across all components:
- All external data (Binance, MEXC) comes in UTC
- All internal processing uses Europe/Sofia timezone
- All timestamps stored in database are timezone-aware
- All NN model inputs use consistent timezone
"""
import pytz
import pandas as pd
from datetime import datetime, timezone
from typing import Union, Optional
import logging
logger = logging.getLogger(__name__)
# Define timezone constants
UTC = pytz.UTC
SOFIA_TZ = pytz.timezone('Europe/Sofia')
SYSTEM_TIMEZONE = SOFIA_TZ # Our system's primary timezone
def get_system_timezone():
"""Get the system's primary timezone (Europe/Sofia)"""
return SYSTEM_TIMEZONE
def get_utc_timezone():
"""Get UTC timezone"""
return UTC
def now_utc() -> datetime:
"""Get current time in UTC"""
return datetime.now(UTC)
def now_sofia() -> datetime:
"""Get current time in Sofia timezone"""
return datetime.now(SOFIA_TZ)
def now_system() -> datetime:
"""Get current time in system timezone (Sofia)"""
return now_sofia()
def to_utc(dt: Union[datetime, pd.Timestamp]) -> datetime:
"""Convert datetime to UTC timezone"""
if dt is None:
return None
if isinstance(dt, pd.Timestamp):
dt = dt.to_pydatetime()
if dt.tzinfo is None:
# Assume it's in system timezone if no timezone info
dt = SYSTEM_TIMEZONE.localize(dt)
return dt.astimezone(UTC)
def to_sofia(dt: Union[datetime, pd.Timestamp]) -> datetime:
"""Convert datetime to Sofia timezone"""
if dt is None:
return None
if isinstance(dt, pd.Timestamp):
dt = dt.to_pydatetime()
if dt.tzinfo is None:
# Assume it's UTC if no timezone info (common for external data)
dt = UTC.localize(dt)
return dt.astimezone(SOFIA_TZ)
def to_system_timezone(dt: Union[datetime, pd.Timestamp]) -> datetime:
"""Convert datetime to system timezone (Sofia)"""
return to_sofia(dt)
def normalize_timestamp(timestamp: Union[int, float, str, datetime, pd.Timestamp],
source_tz: Optional[pytz.BaseTzInfo] = None) -> datetime:
"""
Normalize various timestamp formats to system timezone (Sofia)
Args:
timestamp: Timestamp in various formats
source_tz: Source timezone (defaults to UTC for external data)
Returns:
datetime: Timezone-aware datetime in system timezone
"""
if timestamp is None:
return now_system()
# Default source timezone is UTC (most external APIs use UTC)
if source_tz is None:
source_tz = UTC
try:
# Handle different timestamp formats
if isinstance(timestamp, (int, float)):
# Unix timestamp (assume seconds, convert to milliseconds if needed)
if timestamp > 1e10: # Milliseconds
timestamp = timestamp / 1000
dt = datetime.fromtimestamp(timestamp, tz=source_tz)
elif isinstance(timestamp, str):
# String timestamp
dt = pd.to_datetime(timestamp)
if dt.tzinfo is None:
dt = source_tz.localize(dt)
elif isinstance(timestamp, pd.Timestamp):
dt = timestamp.to_pydatetime()
if dt.tzinfo is None:
dt = source_tz.localize(dt)
elif isinstance(timestamp, datetime):
dt = timestamp
if dt.tzinfo is None:
dt = source_tz.localize(dt)
else:
logger.warning(f"Unknown timestamp format: {type(timestamp)}")
return now_system()
# Convert to system timezone
return dt.astimezone(SYSTEM_TIMEZONE)
except Exception as e:
logger.error(f"Error normalizing timestamp {timestamp}: {e}")
return now_system()
def normalize_dataframe_timestamps(df: pd.DataFrame,
timestamp_col: str = 'timestamp',
source_tz: Optional[pytz.BaseTzInfo] = None) -> pd.DataFrame:
"""
Normalize timestamps in a DataFrame to system timezone
Args:
df: DataFrame with timestamp column
timestamp_col: Name of timestamp column
source_tz: Source timezone (defaults to UTC)
Returns:
DataFrame with normalized timestamps
"""
if df.empty or timestamp_col not in df.columns:
return df
if source_tz is None:
source_tz = UTC
try:
# Convert to datetime if not already
if not pd.api.types.is_datetime64_any_dtype(df[timestamp_col]):
df[timestamp_col] = pd.to_datetime(df[timestamp_col])
# Handle timezone
if df[timestamp_col].dt.tz is None:
# Localize to source timezone first
df[timestamp_col] = df[timestamp_col].dt.tz_localize(source_tz)
# Convert to system timezone
df[timestamp_col] = df[timestamp_col].dt.tz_convert(SYSTEM_TIMEZONE)
return df
except Exception as e:
logger.error(f"Error normalizing DataFrame timestamps: {e}")
return df
def normalize_dataframe_index(df: pd.DataFrame,
source_tz: Optional[pytz.BaseTzInfo] = None) -> pd.DataFrame:
"""
Normalize DataFrame index timestamps to system timezone
Args:
df: DataFrame with datetime index
source_tz: Source timezone (defaults to UTC)
Returns:
DataFrame with normalized index
"""
if df.empty or not isinstance(df.index, pd.DatetimeIndex):
return df
if source_tz is None:
source_tz = UTC
try:
# Handle timezone
if df.index.tz is None:
# Localize to source timezone first
df.index = df.index.tz_localize(source_tz)
# Convert to system timezone
df.index = df.index.tz_convert(SYSTEM_TIMEZONE)
return df
except Exception as e:
logger.error(f"Error normalizing DataFrame index: {e}")
return df
def format_timestamp_for_display(dt: datetime, format_str: str = '%H:%M:%S') -> str:
"""
Format timestamp for display in system timezone
Args:
dt: Datetime to format
format_str: Format string
Returns:
Formatted timestamp string
"""
if dt is None:
return now_system().strftime(format_str)
try:
# Convert to system timezone if needed
if isinstance(dt, datetime):
if dt.tzinfo is None:
dt = UTC.localize(dt)
dt = dt.astimezone(SYSTEM_TIMEZONE)
return dt.strftime(format_str)
except Exception as e:
logger.error(f"Error formatting timestamp {dt}: {e}")
return now_system().strftime(format_str)
def get_timezone_offset_hours() -> float:
"""Get current timezone offset from UTC in hours"""
now = now_system()
utc_now = now_utc()
offset_seconds = (now - utc_now.replace(tzinfo=None)).total_seconds()
return offset_seconds / 3600
def is_market_hours() -> bool:
"""Check if it's currently market hours (24/7 for crypto, but useful for logging)"""
# Crypto markets are 24/7, but this can be useful for other purposes
return True
def log_timezone_info():
"""Log current timezone information for debugging"""
now_utc_time = now_utc()
now_sofia_time = now_sofia()
offset_hours = get_timezone_offset_hours()
logger.info(f"Timezone Info:")
logger.info(f" UTC Time: {now_utc_time}")
logger.info(f" Sofia Time: {now_sofia_time}")
logger.info(f" Offset: {offset_hours:+.1f} hours from UTC")
logger.info(f" System Timezone: {SYSTEM_TIMEZONE}")

View File

@ -62,7 +62,12 @@ logging.getLogger('dash').setLevel(logging.WARNING)
logging.getLogger('dash.dash').setLevel(logging.WARNING) logging.getLogger('dash.dash').setLevel(logging.WARNING)
# Import core components # Import core components
from core.config import get_config try:
from core.config import get_config
except ImportError:
# Fallback if config module is not available
def get_config():
return {}
from core.data_provider import DataProvider from core.data_provider import DataProvider
from core.standardized_data_provider import StandardizedDataProvider from core.standardized_data_provider import StandardizedDataProvider
from core.orchestrator import TradingOrchestrator from core.orchestrator import TradingOrchestrator
@ -117,12 +122,17 @@ class CleanTradingDashboard:
"""Clean, modular trading dashboard implementation""" """Clean, modular trading dashboard implementation"""
def __init__(self, data_provider=None, orchestrator: Optional[Any] = None, trading_executor: Optional[TradingExecutor] = None): def __init__(self, data_provider=None, orchestrator: Optional[Any] = None, trading_executor: Optional[TradingExecutor] = None):
self.config = get_config() # Load configuration safely
try:
self.config = get_config()
except Exception as e:
logger.warning(f"Error loading config, using empty config: {e}")
self.config = {}
# Removed batch counter - now using proper interval separation for performance # Removed batch counter - now using proper interval separation for performance
# Initialize components # Initialize components
self.data_provider = data_provider or DataProvider() self.data_provider = data_provider or StandardizedDataProvider()
self.trading_executor = trading_executor or TradingExecutor() self.trading_executor = trading_executor or TradingExecutor()
# Initialize unified orchestrator with full ML capabilities # Initialize unified orchestrator with full ML capabilities
@ -174,10 +184,35 @@ class CleanTradingDashboard:
self.standardized_cnn = None self.standardized_cnn = None
self._initialize_standardized_cnn() self._initialize_standardized_cnn()
# Initialize trading mode and cold start settings from config
self.trading_mode_live = False # Default to simulation mode
self.cold_start_enabled = True # Default to cold start enabled
# Load config values if available
try:
if hasattr(self, 'config') and self.config:
# Check if trading mode is live based on config
exchanges = self.config.get('exchanges', {})
if exchanges:
for exchange_name, exchange_config in exchanges.items():
if exchange_config.get('enabled', False):
trading_mode = exchange_config.get('trading_mode', 'simulation')
if trading_mode == 'live':
self.trading_mode_live = True
break
# Check cold start setting
cold_start_config = self.config.get('cold_start', {})
self.cold_start_enabled = cold_start_config.get('enabled', True)
except Exception as e:
logger.warning(f"Error loading config settings, using defaults: {e}")
# Keep default values
# Initialize layout and component managers # Initialize layout and component managers
self.layout_manager = DashboardLayoutManager( self.layout_manager = DashboardLayoutManager(
starting_balance=self._get_initial_balance(), starting_balance=self._get_initial_balance(),
trading_executor=self.trading_executor trading_executor=self.trading_executor,
dashboard=self
) )
self.component_manager = DashboardComponentManager() self.component_manager = DashboardComponentManager()
@ -206,6 +241,19 @@ class CleanTradingDashboard:
# ENHANCED: Model control toggles - separate inference and training # ENHANCED: Model control toggles - separate inference and training
self.dqn_inference_enabled = True # Default: enabled self.dqn_inference_enabled = True # Default: enabled
self.dqn_training_enabled = True # Default: enabled self.dqn_training_enabled = True # Default: enabled
# Trading mode and cold start settings from config
from core.config import get_config
config = get_config()
# Initialize trading mode from config (default to simulation)
default_trading_mode = config.get('exchanges', {}).get('bybit', {}).get('trading_mode', 'simulation')
self.trading_mode_live = (default_trading_mode == 'live')
# Initialize cold start from config (default to enabled)
self.cold_start_enabled = config.get('cold_start', {}).get('enabled', True)
logger.info(f"Dashboard initialized - Trading Mode: {'LIVE' if self.trading_mode_live else 'SIM'}, Cold Start: {'ON' if self.cold_start_enabled else 'OFF'}")
self.cnn_inference_enabled = True self.cnn_inference_enabled = True
self.cnn_training_enabled = True self.cnn_training_enabled = True
@ -611,13 +659,51 @@ class CleanTradingDashboard:
logger.error(f"Error getting model status: {e}") logger.error(f"Error getting model status: {e}")
return {'loaded_models': {}, 'total_models': 0, 'system_status': 'ERROR'} return {'loaded_models': {}, 'total_models': 0, 'system_status': 'ERROR'}
def _convert_utc_to_local(self, utc_timestamp):
"""Convert UTC timestamp to local timezone for display"""
try:
if utc_timestamp is None:
return datetime.now()
# Handle different input types
if isinstance(utc_timestamp, str):
try:
utc_timestamp = pd.to_datetime(utc_timestamp)
except:
return datetime.now()
# If it's already a datetime object
if isinstance(utc_timestamp, datetime):
# If it has timezone info and is UTC, convert to local
if utc_timestamp.tzinfo is not None:
if str(utc_timestamp.tzinfo) == 'UTC':
# Convert UTC to local timezone
local_timestamp = utc_timestamp.replace(tzinfo=timezone.utc).astimezone()
return local_timestamp.replace(tzinfo=None) # Remove timezone info for display
else:
# Already has timezone, convert to local
return utc_timestamp.astimezone().replace(tzinfo=None)
else:
# No timezone info, assume it's already local
return utc_timestamp
# Fallback
return datetime.now()
except Exception as e:
logger.debug(f"Error converting UTC to local time: {e}")
return datetime.now()
def _safe_strftime(self, timestamp_val, format_str='%H:%M:%S'): def _safe_strftime(self, timestamp_val, format_str='%H:%M:%S'):
"""Safely format timestamp, handling both string and datetime objects""" """Safely format timestamp, handling both string and datetime objects"""
try: try:
if isinstance(timestamp_val, str): # Convert to local time first
return timestamp_val local_timestamp = self._convert_utc_to_local(timestamp_val)
elif hasattr(timestamp_val, 'strftime'):
return timestamp_val.strftime(format_str) if isinstance(local_timestamp, str):
return local_timestamp
elif hasattr(local_timestamp, 'strftime'):
return local_timestamp.strftime(format_str)
else: else:
return datetime.now().strftime(format_str) return datetime.now().strftime(format_str)
except Exception as e: except Exception as e:
@ -1158,6 +1244,93 @@ class CleanTradingDashboard:
else: else:
return [html.I(className="fas fa-exclamation-triangle me-1"), "Store Failed"] return [html.I(className="fas fa-exclamation-triangle me-1"), "Store Failed"]
return [html.I(className="fas fa-save me-1"), "Store All Models"] return [html.I(className="fas fa-save me-1"), "Store All Models"]
# Trading Mode Toggle
@self.app.callback(
Output('trading-mode-display', 'children'),
Output('trading-mode-display', 'className'),
[Input('trading-mode-switch', 'value')]
)
def update_trading_mode(switch_value):
"""Update trading mode display and apply changes"""
try:
is_live = 'live' in (switch_value or [])
self.trading_mode_live = is_live
# Update trading executor mode if available
if hasattr(self, 'trading_executor') and self.trading_executor:
if hasattr(self.trading_executor, 'set_trading_mode'):
# Use the new set_trading_mode method
success = self.trading_executor.set_trading_mode('live' if is_live else 'simulation')
if success:
logger.info(f"TRADING MODE: {'LIVE' if is_live else 'SIMULATION'} - Mode updated successfully")
else:
logger.error(f"Failed to update trading mode to {'LIVE' if is_live else 'SIMULATION'}")
else:
# Fallback to direct property setting
if is_live:
self.trading_executor.trading_mode = 'live'
self.trading_executor.simulation_mode = False
logger.info("TRADING MODE: LIVE - Real orders will be executed!")
else:
self.trading_executor.trading_mode = 'simulation'
self.trading_executor.simulation_mode = True
logger.info("TRADING MODE: SIMULATION - Orders are simulated")
# Return display text and styling
if is_live:
return "LIVE", "fw-bold text-danger"
else:
return "SIM", "fw-bold text-warning"
except Exception as e:
logger.error(f"Error updating trading mode: {e}")
return "ERROR", "fw-bold text-danger"
# Cold Start Toggle
@self.app.callback(
Output('cold-start-display', 'children'),
Output('cold-start-display', 'className'),
[Input('cold-start-switch', 'value')]
)
def update_cold_start(switch_value):
"""Update cold start training mode"""
try:
is_enabled = 'enabled' in (switch_value or [])
self.cold_start_enabled = is_enabled
# Update orchestrator cold start mode if available
if hasattr(self, 'orchestrator') and self.orchestrator:
if hasattr(self.orchestrator, 'set_cold_start_training_enabled'):
# Use the new set_cold_start_training_enabled method
success = self.orchestrator.set_cold_start_training_enabled(is_enabled)
if success:
logger.info(f"COLD START: {'ON' if is_enabled else 'OFF'} - Training mode updated successfully")
else:
logger.error(f"Failed to update cold start training to {'ON' if is_enabled else 'OFF'}")
else:
# Fallback to direct property setting
if hasattr(self.orchestrator, 'cold_start_enabled'):
self.orchestrator.cold_start_enabled = is_enabled
# Update training frequency based on cold start mode
if hasattr(self.orchestrator, 'training_frequency'):
if is_enabled:
self.orchestrator.training_frequency = 'high' # Train on every signal
logger.info("COLD START: ON - Excessive training enabled")
else:
self.orchestrator.training_frequency = 'normal' # Normal training
logger.info("COLD START: OFF - Normal training frequency")
# Return display text and styling
if is_enabled:
return "ON", "fw-bold text-success"
else:
return "OFF", "fw-bold text-secondary"
except Exception as e:
logger.error(f"Error updating cold start mode: {e}")
return "ERROR", "fw-bold text-danger"
def _get_current_price(self, symbol: str) -> Optional[float]: def _get_current_price(self, symbol: str) -> Optional[float]:
"""Get current price for symbol - ONLY using our data providers""" """Get current price for symbol - ONLY using our data providers"""

View File

@ -10,9 +10,10 @@ from datetime import datetime
class DashboardLayoutManager: class DashboardLayoutManager:
"""Manages dashboard layout and structure""" """Manages dashboard layout and structure"""
def __init__(self, starting_balance: float = 100.0, trading_executor=None): def __init__(self, starting_balance: float = 100.0, trading_executor=None, dashboard=None):
self.starting_balance = starting_balance self.starting_balance = starting_balance
self.trading_executor = trading_executor self.trading_executor = trading_executor
self.dashboard = dashboard
def create_main_layout(self): def create_main_layout(self):
"""Create the main dashboard layout""" """Create the main dashboard layout"""
@ -153,6 +154,48 @@ class DashboardLayoutManager:
"Session Controls" "Session Controls"
], className="card-title mb-2"), ], className="card-title mb-2"),
# Trading Agent Mode Toggle
html.Div([
html.Label([
html.I(className="fas fa-robot me-1"),
"Trading Agent: ",
html.Span(
id="trading-mode-display",
children="LIVE" if getattr(self.dashboard, 'trading_mode_live', False) else "SIM",
className="fw-bold text-danger" if getattr(self.dashboard, 'trading_mode_live', False) else "fw-bold text-warning"
)
], className="form-label small mb-1"),
dcc.Checklist(
id='trading-mode-switch',
options=[{'label': '', 'value': 'live'}],
value=['live'] if getattr(self.dashboard, 'trading_mode_live', False) else [],
className="form-check-input"
),
html.Small("SIM = Simulation Mode, LIVE = Real Trading", className="text-muted d-block")
], className="mb-2"),
# Cold Start Training Toggle
html.Div([
html.Label([
html.I(className="fas fa-fire me-1"),
"Cold Start Training: ",
html.Span(
id="cold-start-display",
children="ON" if getattr(self.dashboard, 'cold_start_enabled', True) else "OFF",
className="fw-bold text-success" if getattr(self.dashboard, 'cold_start_enabled', True) else "fw-bold text-secondary"
)
], className="form-label small mb-1"),
dcc.Checklist(
id='cold-start-switch',
options=[{'label': '', 'value': 'enabled'}],
value=['enabled'] if getattr(self.dashboard, 'cold_start_enabled', True) else [],
className="form-check-input"
),
html.Small("Excessive training during cold start", className="text-muted d-block")
], className="mb-2"),
html.Hr(className="my-2"),
# Leverage Control # Leverage Control
html.Div([ html.Div([
html.Label([ html.Label([