TZ fix again - wip

This commit is contained in:
Dobromir Popov
2025-08-08 01:41:30 +03:00
parent ba532327b6
commit ded7e7f008
5 changed files with 216 additions and 54 deletions

View File

@ -1966,6 +1966,11 @@ class CleanTradingDashboard:
# 1. Get historical 1-minute data as base (180 candles = 3 hours) - FORCE REFRESH on first load
is_startup = not hasattr(self, '_chart_initialized') or not self._chart_initialized
df_historical = self.data_provider.get_historical_data(symbol, '1m', limit=180, refresh=is_startup)
# Determine local timezone
try:
_local_tz = datetime.now().astimezone().tzinfo
except Exception:
_local_tz = None
# Mark chart as initialized to use cache on subsequent loads
if is_startup:
@ -1987,31 +1992,42 @@ class CleanTradingDashboard:
# 3. Merge historical + live data intelligently
if df_historical is not None and not df_historical.empty:
if df_live is not None and not df_live.empty:
# Find overlap point - where live data starts
live_start = df_live.index[0]
# FIXED: Normalize timezone for comparison
# Convert both to UTC timezone-naive for safe comparison
if hasattr(live_start, 'tz') and live_start.tz is not None:
live_start = live_start.tz_localize(None)
# Normalize historical index timezone
# Convert historical to local timezone
try:
if hasattr(df_historical.index, 'tz') and df_historical.index.tz is not None:
df_historical_normalized = df_historical.copy()
df_historical_normalized.index = df_historical_normalized.index.tz_localize(None)
df_historical_local = df_historical.tz_convert(_local_tz) if _local_tz else df_historical
else:
df_historical_normalized = df_historical
# Treat as UTC then convert to local
df_historical_local = df_historical.copy()
df_historical_local.index = df_historical_local.index.tz_localize('UTC')
if _local_tz:
df_historical_local = df_historical_local.tz_convert(_local_tz)
except Exception:
df_historical_local = df_historical
if df_live is not None and not df_live.empty:
# Convert live to local timezone
try:
if hasattr(df_live.index, 'tz') and df_live.index.tz is not None:
df_live_local = df_live.tz_convert(_local_tz) if _local_tz else df_live
else:
df_live_local = df_live.copy()
df_live_local.index = df_live_local.index.tz_localize('UTC')
if _local_tz:
df_live_local = df_live_local.tz_convert(_local_tz)
except Exception:
df_live_local = df_live
# Find overlap point - where live data starts (in local tz)
live_start = df_live_local.index[0]
# Keep historical data up to live data start
df_historical_clean = df_historical_normalized[df_historical_normalized.index < live_start]
df_historical_clean = df_historical_local[df_historical_local.index < live_start]
# Combine: historical (older) + live (newer)
df_main = pd.concat([df_historical_clean, df_live]).tail(180)
main_source = f"Historical + Live ({len(df_historical_clean)} + {len(df_live)} bars)"
df_main = pd.concat([df_historical_clean, df_live_local]).tail(180)
main_source = f"Historical + Live ({len(df_historical_clean)} + {len(df_live_local)} bars)"
else:
# No live data, use historical only
df_main = df_historical
# No live data, use historical only (local tz)
df_main = df_historical_local
main_source = "Historical 1m"
elif df_live is not None and not df_live.empty:
# No historical data, use live only
@ -2024,6 +2040,16 @@ class CleanTradingDashboard:
# Get 1-second data (mini chart)
ws_data_1s = self._get_websocket_chart_data(symbol, '1s')
if ws_data_1s is not None and not ws_data_1s.empty:
try:
if hasattr(ws_data_1s.index, 'tz') and ws_data_1s.index.tz is not None:
ws_data_1s = ws_data_1s.tz_convert(_local_tz) if _local_tz else ws_data_1s
else:
ws_data_1s.index = ws_data_1s.index.tz_localize('UTC')
if _local_tz:
ws_data_1s = ws_data_1s.tz_convert(_local_tz)
except Exception:
pass
if df_main is None or df_main.empty:
return go.Figure().add_annotation(text="No data available",
@ -2081,6 +2107,60 @@ class CleanTradingDashboard:
# ADD TRADES TO MAIN CHART
self._add_trades_to_chart(fig, symbol, df_main, row=1)
# ADD PIVOT POINTS TO MAIN CHART (overlay on 1m)
try:
pivots_input = None
if hasattr(self.data_provider, 'get_base_data_input'):
bdi = self.data_provider.get_base_data_input(symbol)
if bdi and getattr(bdi, 'pivot_points', None):
pivots_input = bdi.pivot_points
if pivots_input:
# Filter pivots within the visible time range of df_main
start_ts = df_main.index.min()
end_ts = df_main.index.max()
xs_high = []
ys_high = []
xs_low = []
ys_low = []
for p in pivots_input:
ts = getattr(p, 'timestamp', None)
price = getattr(p, 'price', None)
ptype = getattr(p, 'type', 'low')
if ts is None or price is None:
continue
# Convert pivot timestamp to local tz to match chart axes
try:
if hasattr(ts, 'tzinfo') and ts.tzinfo is not None:
pt = ts.astimezone(_local_tz) if _local_tz else ts
else:
# Assume UTC then convert
pt = ts.replace(tzinfo=timezone.utc)
pt = pt.astimezone(_local_tz) if _local_tz else pt
except Exception:
pt = ts
if start_ts <= pt <= end_ts:
if str(ptype).lower() == 'high':
xs_high.append(pt)
ys_high.append(price)
else:
xs_low.append(pt)
ys_low.append(price)
if xs_high or xs_low:
fig.add_trace(
go.Scatter(x=xs_high, y=ys_high, mode='markers', name='Pivot High',
marker=dict(color='#ff7043', size=7, symbol='triangle-up'),
hoverinfo='skip'),
row=1, col=1
)
fig.add_trace(
go.Scatter(x=xs_low, y=ys_low, mode='markers', name='Pivot Low',
marker=dict(color='#42a5f5', size=7, symbol='triangle-down'),
hoverinfo='skip'),
row=1, col=1
)
except Exception as e:
logger.debug(f"Error overlaying pivot points: {e}")
# Mini 1-second chart (if available)
if has_mini_chart and ws_data_1s is not None:
fig.add_trace(
@ -2097,7 +2177,11 @@ class CleanTradingDashboard:
)
# ADD ALL SIGNALS TO 1S MINI CHART
self._add_signals_to_mini_chart(fig, symbol, ws_data_1s, row=2)
# Ensure signals use same local tz index
try:
self._add_signals_to_mini_chart(fig, symbol, ws_data_1s, row=2)
except Exception as e:
logger.debug(f"Error adding signals to mini chart: {e}")
# Volume bars (bottom subplot)
volume_row = 3 if has_mini_chart else 2
@ -3324,8 +3408,39 @@ class CleanTradingDashboard:
cob_data = self.data_provider.get_latest_cob_data(symbol)
if cob_data and isinstance(cob_data, dict):
# Validate COB data structure
stats = cob_data.get('stats', {})
# Validate/derive COB stats
stats = cob_data.get('stats', {}) if isinstance(cob_data.get('stats', {}), dict) else {}
bids_raw = cob_data.get('bids', []) or []
asks_raw = cob_data.get('asks', []) or []
mid_price = float(stats.get('mid_price', 0) or 0)
# Derive when missing and we have both sides
if (mid_price <= 0) and bids_raw and asks_raw:
try:
# bids/asks expected as [[price, size], ...]
best_bid = max([float(b[0]) for b in bids_raw if isinstance(b, (list, tuple)) and len(b) >= 2] or [0])
best_ask = min([float(a[0]) for a in asks_raw if isinstance(a, (list, tuple)) and len(a) >= 2] or [0])
if best_bid > 0 and best_ask > 0:
mid_price = (best_bid + best_ask) / 2.0
# compute spread and imbalance in USD terms
spread_bps = ((best_ask - best_bid) / mid_price) * 10000 if mid_price > 0 else 0
bid_liq = sum([float(b[0]) * float(b[1]) for b in bids_raw if isinstance(b, (list, tuple)) and len(b) >= 2])
ask_liq = sum([float(a[0]) * float(a[1]) for a in asks_raw if isinstance(a, (list, tuple)) and len(a) >= 2])
imbalance = ((bid_liq - ask_liq) / (bid_liq + ask_liq)) if (bid_liq + ask_liq) > 0 else 0.0
# Update/compose stats
stats = {
'mid_price': mid_price,
'spread_bps': spread_bps,
'imbalance': imbalance,
'best_bid': best_bid,
'best_ask': best_ask,
'total_bid_liquidity': bid_liq,
'total_ask_liquidity': ask_liq,
'bid_levels': len(bids_raw),
'ask_levels': len(asks_raw)
}
except Exception:
pass
if stats and stats.get('mid_price', 0) > 0:
logger.debug(f"COB snapshot available for {symbol} from data provider")
@ -3357,8 +3472,15 @@ class CleanTradingDashboard:
'total_volume_usd': ask[0] * ask[1]
})
# Use stats from data and calculate liquidity properly
# Use stats (possibly derived) and calculate liquidity properly
self.stats = stats.copy()
# Propagate staleness if present from provider
try:
s_stats = data.get('stats', {})
if isinstance(s_stats, dict) and 'age_ms' in s_stats:
self.stats['age_ms'] = s_stats['age_ms']
except Exception:
pass
# Calculate total liquidity from order book if not provided
bid_liquidity = stats.get('bid_liquidity', 0) or stats.get('total_bid_liquidity', 0)
@ -6773,8 +6895,6 @@ class CleanTradingDashboard:
logger.info("💾 Saving DQN model checkpoint...")
dqn_stats = self.orchestrator.get_model_statistics('dqn')
performance_score = dqn_stats.accuracy if dqn_stats and dqn_stats.accuracy else current_performance
from datetime import datetime
checkpoint_data = {
'model_state_dict': self.orchestrator.rl_agent.get_model_state() if hasattr(self.orchestrator.rl_agent, 'get_model_state') else None,
'performance_score': performance_score,

View File

@ -366,6 +366,15 @@ class DashboardComponentManager:
'bid_levels': len(bids),
'ask_levels': len(asks)
}
# Show staleness if provided via provider (age_ms)
try:
age_ms = None
if hasattr(cob_snapshot, 'stats') and isinstance(cob_snapshot.stats, dict):
age_ms = cob_snapshot.stats.get('age_ms')
if age_ms is not None:
stats['age_ms'] = age_ms
except Exception:
pass
# --- Left Panel: Overview and Stats ---
# Prepend update info to overview