TZ fix again - wip
This commit is contained in:
@ -729,11 +729,19 @@ class DataProvider:
|
||||
logger.error(f"Error getting current COB imbalance for {symbol}: {e}")
|
||||
return {'imbalance': 0.0, 'price_range': price_range, 'error': str(e)}
|
||||
|
||||
def _calculate_cob_imbalance(self, cob_data: Dict, price_range: float) -> float:
|
||||
"""Calculate order book imbalance within specified price range around mid price"""
|
||||
def _calculate_cob_imbalance(self, cob_data: Any, price_range: float) -> float:
|
||||
"""Calculate order book imbalance within specified price range around mid price.
|
||||
Accepts dict snapshot or COBData-like objects (with bids/asks as list of [price, size]).
|
||||
"""
|
||||
try:
|
||||
bids = cob_data.get('bids', [])
|
||||
asks = cob_data.get('asks', [])
|
||||
# Normalize input
|
||||
if isinstance(cob_data, dict):
|
||||
bids = cob_data.get('bids', [])
|
||||
asks = cob_data.get('asks', [])
|
||||
else:
|
||||
# Try attribute access (COBData-like or snapshot objects)
|
||||
bids = getattr(cob_data, 'bids', []) or []
|
||||
asks = getattr(cob_data, 'asks', []) or []
|
||||
|
||||
if not bids or not asks:
|
||||
return 0.0
|
||||
@ -1207,17 +1215,15 @@ class DataProvider:
|
||||
logger.warning(f"No valid candles generated for {symbol}")
|
||||
return None
|
||||
|
||||
# Convert to DataFrame
|
||||
# Convert to DataFrame (timestamps remain UTC tz-aware)
|
||||
df = pd.DataFrame(candles)
|
||||
# Ensure timestamps are timezone-aware (UTC to match COB WebSocket data)
|
||||
if not df.empty and 'timestamp' in df.columns:
|
||||
import pytz
|
||||
utc = pytz.UTC
|
||||
# If timestamps are not timezone-aware, make them UTC
|
||||
# Normalize to UTC tz-aware using pandas idioms
|
||||
if df['timestamp'].dt.tz is None:
|
||||
df['timestamp'] = df['timestamp'].dt.tz_localize(utc)
|
||||
df['timestamp'] = pd.to_datetime(df['timestamp'], utc=True)
|
||||
else:
|
||||
df['timestamp'] = df['timestamp'].dt.tz_convert(utc)
|
||||
df['timestamp'] = df['timestamp'].dt.tz_convert('UTC')
|
||||
|
||||
df = df.sort_values('timestamp').reset_index(drop=True)
|
||||
|
||||
@ -2153,7 +2159,7 @@ class DataProvider:
|
||||
# Get cached data (fast lookups)
|
||||
technical_indicators = self._get_latest_technical_indicators(symbol)
|
||||
cob_data = self._get_latest_cob_data_object(symbol)
|
||||
last_predictions = {} # TODO: Implement model prediction caching
|
||||
last_predictions = {}
|
||||
|
||||
# Build BaseDataInput (no validation for speed - assume data is good)
|
||||
base_data = BaseDataInput(
|
||||
@ -4655,26 +4661,53 @@ class DataProvider:
|
||||
return subscriber_id
|
||||
|
||||
def get_latest_cob_data(self, symbol: str) -> dict:
|
||||
"""Get latest COB data for a symbol"""
|
||||
"""Get the most recent valid COB snapshot.
|
||||
Falls back to the last valid snapshot in cache if the most recent is invalid.
|
||||
A snapshot is considered valid if bids and asks are non-empty and stats.mid_price > 0.
|
||||
"""
|
||||
with self.subscriber_lock:
|
||||
# Use the original symbol format for cache lookup (matches how data is stored)
|
||||
logger.debug(f"Getting COB data for {symbol}")
|
||||
|
||||
if not hasattr(self, 'cob_data_cache'):
|
||||
|
||||
cache = getattr(self, 'cob_data_cache', None)
|
||||
if not cache:
|
||||
logger.debug("COB data cache not initialized")
|
||||
return {}
|
||||
|
||||
if symbol not in self.cob_data_cache:
|
||||
logger.debug(f"Symbol {symbol} not in COB cache. Available: {list(self.cob_data_cache.keys())}")
|
||||
if symbol not in cache:
|
||||
logger.debug(f"Symbol {symbol} not in COB cache. Available: {list(cache.keys())}")
|
||||
return {}
|
||||
|
||||
if not self.cob_data_cache[symbol]:
|
||||
snapshots = cache.get(symbol) or []
|
||||
if not snapshots:
|
||||
logger.debug(f"COB cache for {symbol} is empty")
|
||||
return {}
|
||||
|
||||
latest_data = self.cob_data_cache[symbol][-1]
|
||||
logger.debug(f"Latest COB data type for {symbol}: {type(latest_data)}")
|
||||
return latest_data
|
||||
|
||||
def is_valid(snap: dict) -> bool:
|
||||
try:
|
||||
bids = snap.get('bids') or []
|
||||
asks = snap.get('asks') or []
|
||||
stats = snap.get('stats') or {}
|
||||
mid_price = float(stats.get('mid_price', 0) or 0)
|
||||
return bool(bids) and bool(asks) and mid_price > 0
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
# Walk cache backwards to find the most recent valid snapshot
|
||||
for snap in reversed(snapshots):
|
||||
if is_valid(snap):
|
||||
# Annotate staleness info in stats if timestamp present
|
||||
try:
|
||||
ts_ms = snap.get('timestamp')
|
||||
if isinstance(ts_ms, (int, float)):
|
||||
import time
|
||||
age_ms = int(time.time() * 1000) - int(ts_ms)
|
||||
if isinstance(snap.get('stats'), dict):
|
||||
snap['stats']['age_ms'] = max(age_ms, 0)
|
||||
except Exception:
|
||||
pass
|
||||
return snap
|
||||
|
||||
# No valid snapshot found
|
||||
logger.debug(f"No valid COB snapshot found for {symbol}")
|
||||
return {}
|
||||
|
||||
def get_cob_raw_ticks(self, symbol: str, count: int = 100) -> List[dict]:
|
||||
"""Get raw COB ticks for a symbol (100+ updates per second)"""
|
||||
|
@ -33,9 +33,9 @@ class StandardizedDataProvider(DataProvider):
|
||||
"""Initialize the standardized data provider"""
|
||||
super().__init__(symbols, timeframes)
|
||||
|
||||
# Standardized data storage
|
||||
# Standardized data storage (separate COB cache to avoid colliding with parent caches)
|
||||
self.base_data_cache: Dict[str, BaseDataInput] = {} # {symbol: BaseDataInput}
|
||||
self.cob_data_cache: Dict[str, COBData] = {} # {symbol: COBData}
|
||||
self.standardized_cob_data_cache: Dict[str, COBData] = {} # {symbol: COBData}
|
||||
|
||||
# Model output management with extensible storage
|
||||
self.model_output_manager = ModelOutputManager(
|
||||
@ -50,7 +50,7 @@ class StandardizedDataProvider(DataProvider):
|
||||
# Initialize caches for each symbol
|
||||
for symbol in self.symbols:
|
||||
self.base_data_cache[symbol] = None
|
||||
self.cob_data_cache[symbol] = None
|
||||
self.standardized_cob_data_cache[symbol] = None
|
||||
self.cob_imbalance_history[symbol] = deque(maxlen=300) # 5 minutes of 1s data
|
||||
|
||||
# Ensure live price cache exists (in case parent didn't initialize it)
|
||||
@ -253,7 +253,7 @@ class StandardizedDataProvider(DataProvider):
|
||||
cob_obj.ma_60s_imbalance = ma_data.get('60s', {})
|
||||
|
||||
# Cache and return
|
||||
self.cob_data_cache[symbol] = cob_obj
|
||||
self.standardized_cob_data_cache[symbol] = cob_obj
|
||||
return cob_obj
|
||||
|
||||
except Exception as e:
|
||||
|
BIN
data/trading_system.db-journal
Normal file
BIN
data/trading_system.db-journal
Normal file
Binary file not shown.
@ -1966,6 +1966,11 @@ class CleanTradingDashboard:
|
||||
# 1. Get historical 1-minute data as base (180 candles = 3 hours) - FORCE REFRESH on first load
|
||||
is_startup = not hasattr(self, '_chart_initialized') or not self._chart_initialized
|
||||
df_historical = self.data_provider.get_historical_data(symbol, '1m', limit=180, refresh=is_startup)
|
||||
# Determine local timezone
|
||||
try:
|
||||
_local_tz = datetime.now().astimezone().tzinfo
|
||||
except Exception:
|
||||
_local_tz = None
|
||||
|
||||
# Mark chart as initialized to use cache on subsequent loads
|
||||
if is_startup:
|
||||
@ -1987,31 +1992,42 @@ class CleanTradingDashboard:
|
||||
|
||||
# 3. Merge historical + live data intelligently
|
||||
if df_historical is not None and not df_historical.empty:
|
||||
if df_live is not None and not df_live.empty:
|
||||
# Find overlap point - where live data starts
|
||||
live_start = df_live.index[0]
|
||||
|
||||
# FIXED: Normalize timezone for comparison
|
||||
# Convert both to UTC timezone-naive for safe comparison
|
||||
if hasattr(live_start, 'tz') and live_start.tz is not None:
|
||||
live_start = live_start.tz_localize(None)
|
||||
|
||||
# Normalize historical index timezone
|
||||
# Convert historical to local timezone
|
||||
try:
|
||||
if hasattr(df_historical.index, 'tz') and df_historical.index.tz is not None:
|
||||
df_historical_normalized = df_historical.copy()
|
||||
df_historical_normalized.index = df_historical_normalized.index.tz_localize(None)
|
||||
df_historical_local = df_historical.tz_convert(_local_tz) if _local_tz else df_historical
|
||||
else:
|
||||
df_historical_normalized = df_historical
|
||||
|
||||
# Treat as UTC then convert to local
|
||||
df_historical_local = df_historical.copy()
|
||||
df_historical_local.index = df_historical_local.index.tz_localize('UTC')
|
||||
if _local_tz:
|
||||
df_historical_local = df_historical_local.tz_convert(_local_tz)
|
||||
except Exception:
|
||||
df_historical_local = df_historical
|
||||
|
||||
if df_live is not None and not df_live.empty:
|
||||
# Convert live to local timezone
|
||||
try:
|
||||
if hasattr(df_live.index, 'tz') and df_live.index.tz is not None:
|
||||
df_live_local = df_live.tz_convert(_local_tz) if _local_tz else df_live
|
||||
else:
|
||||
df_live_local = df_live.copy()
|
||||
df_live_local.index = df_live_local.index.tz_localize('UTC')
|
||||
if _local_tz:
|
||||
df_live_local = df_live_local.tz_convert(_local_tz)
|
||||
except Exception:
|
||||
df_live_local = df_live
|
||||
|
||||
# Find overlap point - where live data starts (in local tz)
|
||||
live_start = df_live_local.index[0]
|
||||
# Keep historical data up to live data start
|
||||
df_historical_clean = df_historical_normalized[df_historical_normalized.index < live_start]
|
||||
|
||||
df_historical_clean = df_historical_local[df_historical_local.index < live_start]
|
||||
# Combine: historical (older) + live (newer)
|
||||
df_main = pd.concat([df_historical_clean, df_live]).tail(180)
|
||||
main_source = f"Historical + Live ({len(df_historical_clean)} + {len(df_live)} bars)"
|
||||
df_main = pd.concat([df_historical_clean, df_live_local]).tail(180)
|
||||
main_source = f"Historical + Live ({len(df_historical_clean)} + {len(df_live_local)} bars)"
|
||||
else:
|
||||
# No live data, use historical only
|
||||
df_main = df_historical
|
||||
# No live data, use historical only (local tz)
|
||||
df_main = df_historical_local
|
||||
main_source = "Historical 1m"
|
||||
elif df_live is not None and not df_live.empty:
|
||||
# No historical data, use live only
|
||||
@ -2024,6 +2040,16 @@ class CleanTradingDashboard:
|
||||
|
||||
# Get 1-second data (mini chart)
|
||||
ws_data_1s = self._get_websocket_chart_data(symbol, '1s')
|
||||
if ws_data_1s is not None and not ws_data_1s.empty:
|
||||
try:
|
||||
if hasattr(ws_data_1s.index, 'tz') and ws_data_1s.index.tz is not None:
|
||||
ws_data_1s = ws_data_1s.tz_convert(_local_tz) if _local_tz else ws_data_1s
|
||||
else:
|
||||
ws_data_1s.index = ws_data_1s.index.tz_localize('UTC')
|
||||
if _local_tz:
|
||||
ws_data_1s = ws_data_1s.tz_convert(_local_tz)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if df_main is None or df_main.empty:
|
||||
return go.Figure().add_annotation(text="No data available",
|
||||
@ -2081,6 +2107,60 @@ class CleanTradingDashboard:
|
||||
# ADD TRADES TO MAIN CHART
|
||||
self._add_trades_to_chart(fig, symbol, df_main, row=1)
|
||||
|
||||
# ADD PIVOT POINTS TO MAIN CHART (overlay on 1m)
|
||||
try:
|
||||
pivots_input = None
|
||||
if hasattr(self.data_provider, 'get_base_data_input'):
|
||||
bdi = self.data_provider.get_base_data_input(symbol)
|
||||
if bdi and getattr(bdi, 'pivot_points', None):
|
||||
pivots_input = bdi.pivot_points
|
||||
if pivots_input:
|
||||
# Filter pivots within the visible time range of df_main
|
||||
start_ts = df_main.index.min()
|
||||
end_ts = df_main.index.max()
|
||||
xs_high = []
|
||||
ys_high = []
|
||||
xs_low = []
|
||||
ys_low = []
|
||||
for p in pivots_input:
|
||||
ts = getattr(p, 'timestamp', None)
|
||||
price = getattr(p, 'price', None)
|
||||
ptype = getattr(p, 'type', 'low')
|
||||
if ts is None or price is None:
|
||||
continue
|
||||
# Convert pivot timestamp to local tz to match chart axes
|
||||
try:
|
||||
if hasattr(ts, 'tzinfo') and ts.tzinfo is not None:
|
||||
pt = ts.astimezone(_local_tz) if _local_tz else ts
|
||||
else:
|
||||
# Assume UTC then convert
|
||||
pt = ts.replace(tzinfo=timezone.utc)
|
||||
pt = pt.astimezone(_local_tz) if _local_tz else pt
|
||||
except Exception:
|
||||
pt = ts
|
||||
if start_ts <= pt <= end_ts:
|
||||
if str(ptype).lower() == 'high':
|
||||
xs_high.append(pt)
|
||||
ys_high.append(price)
|
||||
else:
|
||||
xs_low.append(pt)
|
||||
ys_low.append(price)
|
||||
if xs_high or xs_low:
|
||||
fig.add_trace(
|
||||
go.Scatter(x=xs_high, y=ys_high, mode='markers', name='Pivot High',
|
||||
marker=dict(color='#ff7043', size=7, symbol='triangle-up'),
|
||||
hoverinfo='skip'),
|
||||
row=1, col=1
|
||||
)
|
||||
fig.add_trace(
|
||||
go.Scatter(x=xs_low, y=ys_low, mode='markers', name='Pivot Low',
|
||||
marker=dict(color='#42a5f5', size=7, symbol='triangle-down'),
|
||||
hoverinfo='skip'),
|
||||
row=1, col=1
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(f"Error overlaying pivot points: {e}")
|
||||
|
||||
# Mini 1-second chart (if available)
|
||||
if has_mini_chart and ws_data_1s is not None:
|
||||
fig.add_trace(
|
||||
@ -2097,7 +2177,11 @@ class CleanTradingDashboard:
|
||||
)
|
||||
|
||||
# ADD ALL SIGNALS TO 1S MINI CHART
|
||||
self._add_signals_to_mini_chart(fig, symbol, ws_data_1s, row=2)
|
||||
# Ensure signals use same local tz index
|
||||
try:
|
||||
self._add_signals_to_mini_chart(fig, symbol, ws_data_1s, row=2)
|
||||
except Exception as e:
|
||||
logger.debug(f"Error adding signals to mini chart: {e}")
|
||||
|
||||
# Volume bars (bottom subplot)
|
||||
volume_row = 3 if has_mini_chart else 2
|
||||
@ -3324,8 +3408,39 @@ class CleanTradingDashboard:
|
||||
cob_data = self.data_provider.get_latest_cob_data(symbol)
|
||||
|
||||
if cob_data and isinstance(cob_data, dict):
|
||||
# Validate COB data structure
|
||||
stats = cob_data.get('stats', {})
|
||||
# Validate/derive COB stats
|
||||
stats = cob_data.get('stats', {}) if isinstance(cob_data.get('stats', {}), dict) else {}
|
||||
bids_raw = cob_data.get('bids', []) or []
|
||||
asks_raw = cob_data.get('asks', []) or []
|
||||
mid_price = float(stats.get('mid_price', 0) or 0)
|
||||
# Derive when missing and we have both sides
|
||||
if (mid_price <= 0) and bids_raw and asks_raw:
|
||||
try:
|
||||
# bids/asks expected as [[price, size], ...]
|
||||
best_bid = max([float(b[0]) for b in bids_raw if isinstance(b, (list, tuple)) and len(b) >= 2] or [0])
|
||||
best_ask = min([float(a[0]) for a in asks_raw if isinstance(a, (list, tuple)) and len(a) >= 2] or [0])
|
||||
if best_bid > 0 and best_ask > 0:
|
||||
mid_price = (best_bid + best_ask) / 2.0
|
||||
# compute spread and imbalance in USD terms
|
||||
spread_bps = ((best_ask - best_bid) / mid_price) * 10000 if mid_price > 0 else 0
|
||||
bid_liq = sum([float(b[0]) * float(b[1]) for b in bids_raw if isinstance(b, (list, tuple)) and len(b) >= 2])
|
||||
ask_liq = sum([float(a[0]) * float(a[1]) for a in asks_raw if isinstance(a, (list, tuple)) and len(a) >= 2])
|
||||
imbalance = ((bid_liq - ask_liq) / (bid_liq + ask_liq)) if (bid_liq + ask_liq) > 0 else 0.0
|
||||
# Update/compose stats
|
||||
stats = {
|
||||
'mid_price': mid_price,
|
||||
'spread_bps': spread_bps,
|
||||
'imbalance': imbalance,
|
||||
'best_bid': best_bid,
|
||||
'best_ask': best_ask,
|
||||
'total_bid_liquidity': bid_liq,
|
||||
'total_ask_liquidity': ask_liq,
|
||||
'bid_levels': len(bids_raw),
|
||||
'ask_levels': len(asks_raw)
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if stats and stats.get('mid_price', 0) > 0:
|
||||
logger.debug(f"COB snapshot available for {symbol} from data provider")
|
||||
|
||||
@ -3357,8 +3472,15 @@ class CleanTradingDashboard:
|
||||
'total_volume_usd': ask[0] * ask[1]
|
||||
})
|
||||
|
||||
# Use stats from data and calculate liquidity properly
|
||||
# Use stats (possibly derived) and calculate liquidity properly
|
||||
self.stats = stats.copy()
|
||||
# Propagate staleness if present from provider
|
||||
try:
|
||||
s_stats = data.get('stats', {})
|
||||
if isinstance(s_stats, dict) and 'age_ms' in s_stats:
|
||||
self.stats['age_ms'] = s_stats['age_ms']
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Calculate total liquidity from order book if not provided
|
||||
bid_liquidity = stats.get('bid_liquidity', 0) or stats.get('total_bid_liquidity', 0)
|
||||
@ -6773,8 +6895,6 @@ class CleanTradingDashboard:
|
||||
logger.info("💾 Saving DQN model checkpoint...")
|
||||
dqn_stats = self.orchestrator.get_model_statistics('dqn')
|
||||
performance_score = dqn_stats.accuracy if dqn_stats and dqn_stats.accuracy else current_performance
|
||||
|
||||
from datetime import datetime
|
||||
checkpoint_data = {
|
||||
'model_state_dict': self.orchestrator.rl_agent.get_model_state() if hasattr(self.orchestrator.rl_agent, 'get_model_state') else None,
|
||||
'performance_score': performance_score,
|
||||
|
@ -366,6 +366,15 @@ class DashboardComponentManager:
|
||||
'bid_levels': len(bids),
|
||||
'ask_levels': len(asks)
|
||||
}
|
||||
# Show staleness if provided via provider (age_ms)
|
||||
try:
|
||||
age_ms = None
|
||||
if hasattr(cob_snapshot, 'stats') and isinstance(cob_snapshot.stats, dict):
|
||||
age_ms = cob_snapshot.stats.get('age_ms')
|
||||
if age_ms is not None:
|
||||
stats['age_ms'] = age_ms
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# --- Left Panel: Overview and Stats ---
|
||||
# Prepend update info to overview
|
||||
|
Reference in New Issue
Block a user