even better dash

This commit is contained in:
Dobromir Popov
2025-06-25 02:36:17 +03:00
parent 8770038e20
commit afefcea308
2 changed files with 95 additions and 64 deletions

View File

@ -373,14 +373,17 @@ class CleanTradingDashboard:
if ws_data_1s is not None and len(ws_data_1s) > 5:
fig = make_subplots(
rows=3, cols=1,
shared_xaxes=True,
vertical_spacing=0.05,
shared_xaxes=False, # Make 1s chart independent from 1m chart
vertical_spacing=0.08,
subplot_titles=(
f'{symbol} - {main_source} ({len(df_main)} bars)',
f'1s Mini Chart ({len(ws_data_1s)} bars)',
f'1s Mini Chart - Independent Axis ({len(ws_data_1s)} bars)',
'Volume'
),
row_heights=[0.5, 0.25, 0.25]
row_heights=[0.5, 0.25, 0.25],
specs=[[{"secondary_y": False}],
[{"secondary_y": False}],
[{"secondary_y": False}]]
)
has_mini_chart = True
else:
@ -448,9 +451,23 @@ class CleanTradingDashboard:
xaxis_rangeslider_visible=False
)
# Update axes
fig.update_xaxes(showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)')
fig.update_yaxes(showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)')
# Update axes with specific configurations for independent charts
if has_mini_chart:
# Main 1m chart (row 1)
fig.update_xaxes(title_text="Time (1m intervals)", showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)', row=1, col=1)
fig.update_yaxes(title_text="Price (USD)", showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)', row=1, col=1)
# Independent 1s chart (row 2) - can zoom/pan separately
fig.update_xaxes(title_text="Time (1s ticks)", showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)', row=2, col=1)
fig.update_yaxes(title_text="Price (USD)", showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)', row=2, col=1)
# Volume chart (row 3)
fig.update_xaxes(title_text="Time", showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)', row=3, col=1)
fig.update_yaxes(title_text="Volume", showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)', row=3, col=1)
else:
# Main chart only
fig.update_xaxes(showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)')
fig.update_yaxes(showgrid=True, gridwidth=1, gridcolor='rgba(128,128,128,0.2)')
chart_info = f"1m bars: {len(df_main)}"
if has_mini_chart:
@ -675,15 +692,29 @@ class CleanTradingDashboard:
"""Handle trading decision from orchestrator"""
try:
# Convert orchestrator decision to dashboard format
dashboard_decision = {
'timestamp': datetime.now().strftime('%H:%M:%S'),
'action': decision.action if hasattr(decision, 'action') else decision.get('action', 'UNKNOWN'),
'confidence': decision.confidence if hasattr(decision, 'confidence') else decision.get('confidence', 0),
'price': decision.price if hasattr(decision, 'price') else decision.get('price', 0),
'executed': True, # Orchestrator decisions are executed
'blocked': False,
'manual': False
}
# Handle both TradingDecision objects and dictionary formats
if hasattr(decision, 'action'):
# This is a TradingDecision object (dataclass)
dashboard_decision = {
'timestamp': datetime.now().strftime('%H:%M:%S'),
'action': decision.action,
'confidence': decision.confidence,
'price': decision.price,
'executed': True, # Orchestrator decisions are executed
'blocked': False,
'manual': False
}
else:
# This is a dictionary format
dashboard_decision = {
'timestamp': datetime.now().strftime('%H:%M:%S'),
'action': decision.get('action', 'UNKNOWN'),
'confidence': decision.get('confidence', 0),
'price': decision.get('price', 0),
'executed': True, # Orchestrator decisions are executed
'blocked': False,
'manual': False
}
# Add to recent decisions
self.recent_decisions.append(dashboard_decision)

View File

@ -2584,8 +2584,8 @@ class TradingDashboard:
# If datetime is naive, assume it's UTC
if dt.tzinfo is None:
dt = pytz.UTC.localize(dt)
# Convert to local timezone
# Convert to local timezone
return dt.astimezone(self.timezone)
except Exception as e:
logger.warning(f"Error converting timezone: {e}")
@ -2606,7 +2606,7 @@ class TradingDashboard:
return df
else:
# Data has timezone info, convert to local timezone
df.index = df.index.tz_convert(self.timezone)
df.index = df.index.tz_convert(self.timezone)
# Make timezone-naive to prevent browser double-conversion
df.index = df.index.tz_localize(None)
@ -3375,7 +3375,7 @@ class TradingDashboard:
# Minimal cleanup to prevent interference
if is_cleanup_update:
try:
self._cleanup_old_data()
self._cleanup_old_data()
except:
pass # Don't let cleanup interfere with updates
@ -3503,7 +3503,7 @@ class TradingDashboard:
# Only recreate chart if data is very old (5 minutes)
if time.time() - self._cached_chart_data_time > 300:
needs_new_chart = True
else:
else:
needs_new_chart = True
if needs_new_chart:
@ -3514,7 +3514,7 @@ class TradingDashboard:
if price_chart is not None:
self._cached_price_chart = price_chart
self._cached_chart_data_time = time.time()
else:
else:
# If chart creation failed, try cached version or create empty
if hasattr(self, '_cached_price_chart') and self._cached_price_chart is not None:
price_chart = self._cached_price_chart
@ -4508,7 +4508,7 @@ class TradingDashboard:
except Exception as e:
logger.debug(f"[COMPREHENSIVE] Error adding trade markers: {e}")
def _create_price_chart(self, symbol: str) -> go.Figure:
"""Create price chart with volume and Williams pivot points from cached data"""
try:
@ -4523,30 +4523,30 @@ class TradingDashboard:
logger.debug(f"[CHART] Using WebSocket real-time data: {len(df)} ticks")
else:
# Fallback to traditional data provider approach
# For Williams Market Structure, we need 1s data for proper recursive analysis
# Get 4 hours (240 minutes) of 1m data for better trade visibility
df_1s = None
df_1m = None
# For Williams Market Structure, we need 1s data for proper recursive analysis
# Get 4 hours (240 minutes) of 1m data for better trade visibility
df_1s = None
df_1m = None
if ws_df is not None:
logger.debug(f"[CHART] WebSocket data insufficient ({len(ws_df) if not ws_df.empty else 0} rows), falling back to data provider")
# Try to get 1s data first for Williams analysis (reduced to 10 minutes for performance)
try:
df_1s = self.data_provider.get_historical_data(symbol, '1s', limit=600, refresh=False)
if df_1s is None or df_1s.empty:
logger.warning("[CHART] No 1s cached data available, trying fresh 1s data")
df_1s = self.data_provider.get_historical_data(symbol, '1s', limit=300, refresh=True)
if df_1s is not None and not df_1s.empty:
# Aggregate 1s data to 1m for chart display (cleaner visualization)
df = self._aggregate_1s_to_1m(df_1s)
actual_timeframe = '1s→1m'
else:
df_1s = None
except Exception as e:
logger.warning(f"[CHART] Error getting 1s data: {e}")
# Try to get 1s data first for Williams analysis (reduced to 10 minutes for performance)
try:
df_1s = self.data_provider.get_historical_data(symbol, '1s', limit=600, refresh=False)
if df_1s is None or df_1s.empty:
logger.warning("[CHART] No 1s cached data available, trying fresh 1s data")
df_1s = self.data_provider.get_historical_data(symbol, '1s', limit=300, refresh=True)
if df_1s is not None and not df_1s.empty:
# Aggregate 1s data to 1m for chart display (cleaner visualization)
df = self._aggregate_1s_to_1m(df_1s)
actual_timeframe = '1s→1m'
else:
df_1s = None
except Exception as e:
logger.warning(f"[CHART] Error getting 1s data: {e}")
df_1s = None
# Fallback to 1m data if 1s not available (4 hours for historical trades)
if df_1s is None:
@ -4728,7 +4728,7 @@ class TradingDashboard:
hovertemplate='<b>Volume: %{y:.0f}</b><br>%{x}<extra></extra>'
),
row=2, col=1
)
)
# Mark recent trading decisions with proper markers
if self.recent_decisions and df is not None and not df.empty:
@ -4769,10 +4769,10 @@ class TradingDashboard:
decision_time_pd = pd.to_datetime(decision_time_utc)
if chart_start_utc <= decision_time_pd <= chart_end_utc:
signal_type = decision.get('signal_type', 'UNKNOWN')
if decision['action'] == 'BUY':
buy_decisions.append((decision, signal_type))
elif decision['action'] == 'SELL':
sell_decisions.append((decision, signal_type))
if decision['action'] == 'BUY':
buy_decisions.append((decision, signal_type))
elif decision['action'] == 'SELL':
sell_decisions.append((decision, signal_type))
@ -4892,30 +4892,30 @@ class TradingDashboard:
# Convert times to UTC for comparison - FIXED timezone handling
try:
if isinstance(entry_time, datetime):
if isinstance(entry_time, datetime):
# If naive datetime, assume it's in local timezone
if entry_time.tzinfo is None:
entry_time_utc = self.timezone.localize(entry_time).astimezone(timezone.utc).replace(tzinfo=None)
else:
entry_time_utc = entry_time.astimezone(timezone.utc).replace(tzinfo=None)
else:
continue
if isinstance(exit_time, datetime):
else:
continue
if isinstance(exit_time, datetime):
# If naive datetime, assume it's in local timezone
if exit_time.tzinfo is None:
exit_time_utc = self.timezone.localize(exit_time).astimezone(timezone.utc).replace(tzinfo=None)
else:
exit_time_utc = exit_time.astimezone(timezone.utc).replace(tzinfo=None)
else:
continue
# Check if trade overlaps with chart timeframe
entry_time_pd = pd.to_datetime(entry_time_utc)
exit_time_pd = pd.to_datetime(exit_time_utc)
if (chart_start_utc <= entry_time_pd <= chart_end_utc) or (chart_start_utc <= exit_time_pd <= chart_end_utc):
chart_trades.append(trade)
else:
continue
# Check if trade overlaps with chart timeframe
entry_time_pd = pd.to_datetime(entry_time_utc)
exit_time_pd = pd.to_datetime(exit_time_utc)
if (chart_start_utc <= entry_time_pd <= chart_end_utc) or (chart_start_utc <= exit_time_pd <= chart_end_utc):
chart_trades.append(trade)
except Exception as e:
logger.debug(f"Error processing trade timestamps: {e}")
continue
@ -9094,7 +9094,7 @@ class TradingDashboard:
except Exception as e:
logger.debug(f"Error adding Williams pivot points safely: {e}")
def _add_williams_pivot_points_to_chart(self, fig, pivot_points: Dict, row: int = 1):
"""Add Williams pivot points as small triangles to the chart with proper timezone conversion"""
try:
@ -10176,7 +10176,7 @@ class TradingDashboard:
except Exception as e:
logger.error(f"Error creating COB table rows: {e}")
return [html.Tr([html.Td("Error loading order book", colSpan=4, className="text-danger small")])]
def _create_cob_status_content(self) -> List:
"""Create COB status and training pipeline content"""
try: