From ded7e7f0089af10e45fd31ef973c42a51573606c Mon Sep 17 00:00:00 2001 From: Dobromir Popov Date: Fri, 8 Aug 2025 01:41:30 +0300 Subject: [PATCH] TZ fix again - wip --- core/data_provider.py | 81 ++++++++++---- core/standardized_data_provider.py | 8 +- data/trading_system.db-journal | Bin 0 -> 21032 bytes web/clean_dashboard.py | 172 ++++++++++++++++++++++++----- web/component_manager.py | 9 ++ 5 files changed, 216 insertions(+), 54 deletions(-) create mode 100644 data/trading_system.db-journal diff --git a/core/data_provider.py b/core/data_provider.py index 73edacb..8724cbc 100644 --- a/core/data_provider.py +++ b/core/data_provider.py @@ -729,11 +729,19 @@ class DataProvider: logger.error(f"Error getting current COB imbalance for {symbol}: {e}") return {'imbalance': 0.0, 'price_range': price_range, 'error': str(e)} - def _calculate_cob_imbalance(self, cob_data: Dict, price_range: float) -> float: - """Calculate order book imbalance within specified price range around mid price""" + def _calculate_cob_imbalance(self, cob_data: Any, price_range: float) -> float: + """Calculate order book imbalance within specified price range around mid price. + Accepts dict snapshot or COBData-like objects (with bids/asks as list of [price, size]). + """ try: - bids = cob_data.get('bids', []) - asks = cob_data.get('asks', []) + # Normalize input + if isinstance(cob_data, dict): + bids = cob_data.get('bids', []) + asks = cob_data.get('asks', []) + else: + # Try attribute access (COBData-like or snapshot objects) + bids = getattr(cob_data, 'bids', []) or [] + asks = getattr(cob_data, 'asks', []) or [] if not bids or not asks: return 0.0 @@ -1207,17 +1215,15 @@ class DataProvider: logger.warning(f"No valid candles generated for {symbol}") return None - # Convert to DataFrame + # Convert to DataFrame (timestamps remain UTC tz-aware) df = pd.DataFrame(candles) # Ensure timestamps are timezone-aware (UTC to match COB WebSocket data) if not df.empty and 'timestamp' in df.columns: - import pytz - utc = pytz.UTC - # If timestamps are not timezone-aware, make them UTC + # Normalize to UTC tz-aware using pandas idioms if df['timestamp'].dt.tz is None: - df['timestamp'] = df['timestamp'].dt.tz_localize(utc) + df['timestamp'] = pd.to_datetime(df['timestamp'], utc=True) else: - df['timestamp'] = df['timestamp'].dt.tz_convert(utc) + df['timestamp'] = df['timestamp'].dt.tz_convert('UTC') df = df.sort_values('timestamp').reset_index(drop=True) @@ -2153,7 +2159,7 @@ class DataProvider: # Get cached data (fast lookups) technical_indicators = self._get_latest_technical_indicators(symbol) cob_data = self._get_latest_cob_data_object(symbol) - last_predictions = {} # TODO: Implement model prediction caching + last_predictions = {} # Build BaseDataInput (no validation for speed - assume data is good) base_data = BaseDataInput( @@ -4655,26 +4661,53 @@ class DataProvider: return subscriber_id def get_latest_cob_data(self, symbol: str) -> dict: - """Get latest COB data for a symbol""" + """Get the most recent valid COB snapshot. + Falls back to the last valid snapshot in cache if the most recent is invalid. + A snapshot is considered valid if bids and asks are non-empty and stats.mid_price > 0. + """ with self.subscriber_lock: - # Use the original symbol format for cache lookup (matches how data is stored) logger.debug(f"Getting COB data for {symbol}") - - if not hasattr(self, 'cob_data_cache'): + + cache = getattr(self, 'cob_data_cache', None) + if not cache: logger.debug("COB data cache not initialized") return {} - - if symbol not in self.cob_data_cache: - logger.debug(f"Symbol {symbol} not in COB cache. Available: {list(self.cob_data_cache.keys())}") + if symbol not in cache: + logger.debug(f"Symbol {symbol} not in COB cache. Available: {list(cache.keys())}") return {} - - if not self.cob_data_cache[symbol]: + snapshots = cache.get(symbol) or [] + if not snapshots: logger.debug(f"COB cache for {symbol} is empty") return {} - - latest_data = self.cob_data_cache[symbol][-1] - logger.debug(f"Latest COB data type for {symbol}: {type(latest_data)}") - return latest_data + + def is_valid(snap: dict) -> bool: + try: + bids = snap.get('bids') or [] + asks = snap.get('asks') or [] + stats = snap.get('stats') or {} + mid_price = float(stats.get('mid_price', 0) or 0) + return bool(bids) and bool(asks) and mid_price > 0 + except Exception: + return False + + # Walk cache backwards to find the most recent valid snapshot + for snap in reversed(snapshots): + if is_valid(snap): + # Annotate staleness info in stats if timestamp present + try: + ts_ms = snap.get('timestamp') + if isinstance(ts_ms, (int, float)): + import time + age_ms = int(time.time() * 1000) - int(ts_ms) + if isinstance(snap.get('stats'), dict): + snap['stats']['age_ms'] = max(age_ms, 0) + except Exception: + pass + return snap + + # No valid snapshot found + logger.debug(f"No valid COB snapshot found for {symbol}") + return {} def get_cob_raw_ticks(self, symbol: str, count: int = 100) -> List[dict]: """Get raw COB ticks for a symbol (100+ updates per second)""" diff --git a/core/standardized_data_provider.py b/core/standardized_data_provider.py index cadbec1..d53ae93 100644 --- a/core/standardized_data_provider.py +++ b/core/standardized_data_provider.py @@ -33,9 +33,9 @@ class StandardizedDataProvider(DataProvider): """Initialize the standardized data provider""" super().__init__(symbols, timeframes) - # Standardized data storage + # Standardized data storage (separate COB cache to avoid colliding with parent caches) self.base_data_cache: Dict[str, BaseDataInput] = {} # {symbol: BaseDataInput} - self.cob_data_cache: Dict[str, COBData] = {} # {symbol: COBData} + self.standardized_cob_data_cache: Dict[str, COBData] = {} # {symbol: COBData} # Model output management with extensible storage self.model_output_manager = ModelOutputManager( @@ -50,7 +50,7 @@ class StandardizedDataProvider(DataProvider): # Initialize caches for each symbol for symbol in self.symbols: self.base_data_cache[symbol] = None - self.cob_data_cache[symbol] = None + self.standardized_cob_data_cache[symbol] = None self.cob_imbalance_history[symbol] = deque(maxlen=300) # 5 minutes of 1s data # Ensure live price cache exists (in case parent didn't initialize it) @@ -253,7 +253,7 @@ class StandardizedDataProvider(DataProvider): cob_obj.ma_60s_imbalance = ma_data.get('60s', {}) # Cache and return - self.cob_data_cache[symbol] = cob_obj + self.standardized_cob_data_cache[symbol] = cob_obj return cob_obj except Exception as e: diff --git a/data/trading_system.db-journal b/data/trading_system.db-journal new file mode 100644 index 0000000000000000000000000000000000000000..7b9b4166442e3652178640462d530152dbb06bdb GIT binary patch literal 21032 zcmeI43w&F3*~gQdBq#Tr0i)oYJ9G?I)}Eg8KldWCj%BQ~YqvIqO~fp1(=9J;(j_U| zoC7*GUx9f^#8<(ITq?*#!3#PR1dBK>1LWl<0)l`t6l5YIAQzvLG)y)&E}$P>xQI_ivW{)&myr zAH0ux@ATf}y~g`>?=J5)Z^9e#uJX2eiFdyDFt5Y&n&)}XuRV8pe&V^-v)l7|&sma!c+7?!(=*>vh)) zuHU%saoz0tj_VTF7hUJLHoAIU-L9oB*>#Mo&gG)tp#MlePVb{{p}$Lill~GtN~h>P zI!uRXg+7)(f_6LKbpFZtg!5kK&z#pgFLh>}+ns4=zq7}=%&9sTI*)XE9RF~<=y=j` zpW{}?4UWqkImfw?nDX1+vAt}2+V+5L z-1Y<8nC&aJ&)PQGHrUqMR@jcWHQMId0@in}2ZDbIJ`;Q}_>16;!7GDb4Sp`TCHNn~ z(}L~66M|fDUXTe|1Fr<04LlUMBk-fZRe`Ss&I^1xa7N&hft7)e1)2kFU{=89|EvEw z|HJ-Y`hV>Iw*Mmk`Tni`LI3Ig4*!XM-hY(;5I^O6)%SbfBfh=9J-(}b7yB;oea1J` z+|#`@%;XjZsJT=qyu7*S#6>P_ZooLHE^Iv(Z#@R%0*psvoR6^{<53t{jPo#l1mj$c zAI3Nb-Q;F#aCna~Pk+_zcG1VSF0nQy71X@kxwNV0;|oZ!kWF z@z)q1#rO!uhcP~c@j;CLjqw4DzruJw#``edi*X;udobRO@h*&aV%&@Iml*HB_zR4; zV;sl$bBwoPycOfmFy4akW{f|@_!EpbVcdi9#~6Qv@kWe4#P|b@-^chpj5lDs9^-d0 zeh1@q7_Y^64aTc6ejDRe7_Y?mEsSFrufTXY#>+5Xit(EmFTwZ?jJq*@9pl9qFT(gW zj9TUKZWshjGx4K8pcmxT#IoH z##1q#g0Tl<7-KiaE{v-&uEN-fu><2ujO`ddj&TLXHjK+Lo{VuB#t_D(7*E2u1Y;}4 z#TZY-_%VzpVEib?<1w~i)G=xpRg4No8KZvh*Iw6dSl2Ie&7lv_chDEpNqVW}7Ta9wE7rZ% z-PVoPIo4&C1Hp%b*96ZFhHY=y?z3HC+iY7IYzVv)c#yi9I*aP2k8r-|yv=!`bA$7v zPR8+^V~=B(W1T~Cxa?2ZZ-Djs8hfMNLOnzsZ8^?9&-mX;^>0i>qU&Rc4B?5~#B1;` z%nL0PQ~ z&r6!lSq4 z!Ebmt>Zcaj7qnVqi4D<2U#vgUmq-+5hT25bFweILN(<3AMV16vuB%E*M3so-@G7(_ zr}Ho;l-X5j!3ac_4y#H_R7sHdL#xojv`|E%@P|~TRYZv>;;gE)BH<;OFjZ(}PS=R8 ztHG+Yil(U|AE-(z>Vy}GzbY+HR9)nKRcIxc^ok^j-m0`Pg(XSyRG}3)oe)_U+*N5+ znSevMs?_o-k!Ulm+#+k7ph=JjoCnv+VL^p-(Zb7|CJN|A zpRUSqTV6A%z=3Z;IAwA!6RD&@QZwQslXI9!VWLYA_aReJW>Se06qsTlt(c^U*dp-| z!URY@O!jS)ql!ip)fJvF**DCjGN&jS4;4W6H8ZKm5m}Z+kiKdrg=-Zg4f0v`fQghq zEkvLIP?_vYW>WA0L4?YJ$-ZbN1s_lZ2nI~{1v4q(R7eb(#$=x}lNz@rD3JWKzcZ0S zwF1WCjSG0nOsX1%P2K{Jn@JUp*96r#>SJb73F-u*z=X*@VkRYUR7he9lYPic%5!j` zkkFaz{U%bLaI!>*0Hd|9qLdhMfJjge!?oURCIw?DFa|IS?lhCioFwXs22OapiBwWK zkw6v)3yhmdWljZa89s2UnH20QXo4|gZ#I#NDyKmdfbu?jlZg~2444%vCnoztGpWQu z7$h>>*7asmaBV22G>~3vA{A5)3OZ2(AGpd)3Wc>KOUA%{%SB=Xi|pklQm8&v z8Ac0?b*Y&Ys!v6ONW^4!n@Lp;P7hO-$zBA-^eI&o)bCMJ=>m+Ug=*vi7%wLG2{S2V zZe5a$w6VraDuWbaCLA?fQ3^{74JM4j8!4{UOsaAU(V+BYa*NHR3I_p0H!k1=GpWqU z8WD|jtcB9cZZbaq;RhqT>`z(($NLZUc|1?MAEj?}UI%Z~ergt|S)gWt53&W$T`@j3 zFw3%_b?$8U==?b@mu1Il(K53`QtTZswgD z5m)U zBsV3t)(^y@nUPd1l8%2m2HR9k=0VvX%|s*V^n0er0Zeh#Ot9c zuQyD2-EX|^&GZ?+`jWlIuXv(A-WSazQ|bEC0f$rZzF1^96@zy(@npg{16)+2ab>0L z{qa<+xOr>+RLX1N*olF7e+-&T6ZZs1j&07QVnfks$8OSfqwbQ3&Z{acSR|>DZGw^k z+CQ)|7QoQ3G*RI6gg}k13z`Z|SQUQBr*E6=>Z&XuE%}kzZXX|u6^4@@m5t#XoiAG8 zRwJpw$WXFBHaNLcz7;14_}s?foKHIocoHB}MC()dDWItJ!0To)KBlNyP^jw6ZqKpeVr z@x<^*ru25v8_opFCsR|MqMRXi%pD)=FB+oNFvQL_Grul&AB%pSH^j^DTxrbjqJKk0 z4=q(492{;&hNesmn(A=T6YdQ)tgdUK49i&&Djr3c;@fa|9`==VUQu+7hhD1;Ym<^W ziZ}RvyE`<*byzdVBDCoQT?8L&WR3e0cpx5{U1v9TC2h^|0~LzZA6AVPKWhS3fprG=>b{SsZK7V{{ zUD0LKyvxKYxlDO0Jj?UG=`N!y(BOnlH%w+oC&*(oXum@HQ-g*Ilo%33rIN=80(c5! zC=p_S(ewFVdkiEwSySPmhAxWGZ!=DYk~$B)0_a3ScS%yfr&OVg!|?jKi^sV+lO$BuQpMy06A!LqBGnWsjmZ@JZ&)|wlcBClP&B~wQzZrFU6o~2xn1!9z=0~H z3R#50Lnfju>rkqKS3s(e;Hk9H4ut0&&}Snsqzb$R#f&5~MoCcnsac?Aftm$s7N}XEW`UXoY8I$jpk{%Z1!@+kS)gWte;*6f zSt-%lw;|Sd#&9y8$V7%>nW*s)iH1u&eCB1=*{EhJo*0OwjORBIc+itf^{1KKK>qt6 zEEa3`>h^dh#ttM?L(vQ?!Z$-$t(L_sYhiMm3f~toc~IkX_4Y}EeW$!G|I#=Hch(xe z?^cV)7KDC-Z?F4y-)7qDKF7Jq>2O@&sC^MddCR?D@9sRyO4l_vTQ69YiS`c0W_Zqw zH*6<$g<8WQHr%?TJ;ct~%r5k>Wq#uQY+FY-baJSRUDegLvbAe1`|;3PwzVhR+13F^ zTp8*JHTZVd*gViXb`NyTz zC`)Zt7VOk+a=F8)WN)-LJ{Zr$W9ey!XW6DEc13q*M_IGvA1_8G9=fF?8=~nA)AldB z8=MdxJg4J{^%28Vk)d=ompK$0N~X3%M$+)-IFX^=YId0tv0&_tkt;u7#c)=TRKDlF z6g*OgN2?IG@~*Hnw5+wKJy6;3@Hn<)uin9AFT13@ zb4kM%4_&8<*3sGUC_T10-oH6A<j7PNIN4Xt6z?!|U?ux0jK7*9tE zhi%yGrt9Q`ojULLW%l<}I&*%fgPpiElNyFyaOP&~sC)9JOB1;;D@r_(LfkXXTarvn zB)H+ydlS1YENp8Soki1iC&MZAGN&wD+2q`vaS&I9b5GmXxcM3TFD%`9BRhGTh8-zK zKK7k=Xg>B8M8mOEJ~u-4obiTp;yMbkZ^q{LIrc$SP+g2nW}<^rKFE=t49&#}#h0I} zP)Zmd9QmK@8tv;FNk#j%6hg=3dEklAjJaxLkLa1}Bhw>kRXZIlzM1Y^b!;hr!95*q zt9wGo&y3+{m~nRvtxjiMPqWpMp9blT5c*@0=tu@?I|x6850gZu_+Uhs;b!zsLeX`i zXg%MX_uDc-qRd-%Uu6OKyMQ_W1W^w1NB{r; literal 0 HcmV?d00001 diff --git a/web/clean_dashboard.py b/web/clean_dashboard.py index de19afb..abecf84 100644 --- a/web/clean_dashboard.py +++ b/web/clean_dashboard.py @@ -1966,6 +1966,11 @@ class CleanTradingDashboard: # 1. Get historical 1-minute data as base (180 candles = 3 hours) - FORCE REFRESH on first load is_startup = not hasattr(self, '_chart_initialized') or not self._chart_initialized df_historical = self.data_provider.get_historical_data(symbol, '1m', limit=180, refresh=is_startup) + # Determine local timezone + try: + _local_tz = datetime.now().astimezone().tzinfo + except Exception: + _local_tz = None # Mark chart as initialized to use cache on subsequent loads if is_startup: @@ -1987,31 +1992,42 @@ class CleanTradingDashboard: # 3. Merge historical + live data intelligently if df_historical is not None and not df_historical.empty: - if df_live is not None and not df_live.empty: - # Find overlap point - where live data starts - live_start = df_live.index[0] - - # FIXED: Normalize timezone for comparison - # Convert both to UTC timezone-naive for safe comparison - if hasattr(live_start, 'tz') and live_start.tz is not None: - live_start = live_start.tz_localize(None) - - # Normalize historical index timezone + # Convert historical to local timezone + try: if hasattr(df_historical.index, 'tz') and df_historical.index.tz is not None: - df_historical_normalized = df_historical.copy() - df_historical_normalized.index = df_historical_normalized.index.tz_localize(None) + df_historical_local = df_historical.tz_convert(_local_tz) if _local_tz else df_historical else: - df_historical_normalized = df_historical - + # Treat as UTC then convert to local + df_historical_local = df_historical.copy() + df_historical_local.index = df_historical_local.index.tz_localize('UTC') + if _local_tz: + df_historical_local = df_historical_local.tz_convert(_local_tz) + except Exception: + df_historical_local = df_historical + + if df_live is not None and not df_live.empty: + # Convert live to local timezone + try: + if hasattr(df_live.index, 'tz') and df_live.index.tz is not None: + df_live_local = df_live.tz_convert(_local_tz) if _local_tz else df_live + else: + df_live_local = df_live.copy() + df_live_local.index = df_live_local.index.tz_localize('UTC') + if _local_tz: + df_live_local = df_live_local.tz_convert(_local_tz) + except Exception: + df_live_local = df_live + + # Find overlap point - where live data starts (in local tz) + live_start = df_live_local.index[0] # Keep historical data up to live data start - df_historical_clean = df_historical_normalized[df_historical_normalized.index < live_start] - + df_historical_clean = df_historical_local[df_historical_local.index < live_start] # Combine: historical (older) + live (newer) - df_main = pd.concat([df_historical_clean, df_live]).tail(180) - main_source = f"Historical + Live ({len(df_historical_clean)} + {len(df_live)} bars)" + df_main = pd.concat([df_historical_clean, df_live_local]).tail(180) + main_source = f"Historical + Live ({len(df_historical_clean)} + {len(df_live_local)} bars)" else: - # No live data, use historical only - df_main = df_historical + # No live data, use historical only (local tz) + df_main = df_historical_local main_source = "Historical 1m" elif df_live is not None and not df_live.empty: # No historical data, use live only @@ -2024,6 +2040,16 @@ class CleanTradingDashboard: # Get 1-second data (mini chart) ws_data_1s = self._get_websocket_chart_data(symbol, '1s') + if ws_data_1s is not None and not ws_data_1s.empty: + try: + if hasattr(ws_data_1s.index, 'tz') and ws_data_1s.index.tz is not None: + ws_data_1s = ws_data_1s.tz_convert(_local_tz) if _local_tz else ws_data_1s + else: + ws_data_1s.index = ws_data_1s.index.tz_localize('UTC') + if _local_tz: + ws_data_1s = ws_data_1s.tz_convert(_local_tz) + except Exception: + pass if df_main is None or df_main.empty: return go.Figure().add_annotation(text="No data available", @@ -2081,6 +2107,60 @@ class CleanTradingDashboard: # ADD TRADES TO MAIN CHART self._add_trades_to_chart(fig, symbol, df_main, row=1) + # ADD PIVOT POINTS TO MAIN CHART (overlay on 1m) + try: + pivots_input = None + if hasattr(self.data_provider, 'get_base_data_input'): + bdi = self.data_provider.get_base_data_input(symbol) + if bdi and getattr(bdi, 'pivot_points', None): + pivots_input = bdi.pivot_points + if pivots_input: + # Filter pivots within the visible time range of df_main + start_ts = df_main.index.min() + end_ts = df_main.index.max() + xs_high = [] + ys_high = [] + xs_low = [] + ys_low = [] + for p in pivots_input: + ts = getattr(p, 'timestamp', None) + price = getattr(p, 'price', None) + ptype = getattr(p, 'type', 'low') + if ts is None or price is None: + continue + # Convert pivot timestamp to local tz to match chart axes + try: + if hasattr(ts, 'tzinfo') and ts.tzinfo is not None: + pt = ts.astimezone(_local_tz) if _local_tz else ts + else: + # Assume UTC then convert + pt = ts.replace(tzinfo=timezone.utc) + pt = pt.astimezone(_local_tz) if _local_tz else pt + except Exception: + pt = ts + if start_ts <= pt <= end_ts: + if str(ptype).lower() == 'high': + xs_high.append(pt) + ys_high.append(price) + else: + xs_low.append(pt) + ys_low.append(price) + if xs_high or xs_low: + fig.add_trace( + go.Scatter(x=xs_high, y=ys_high, mode='markers', name='Pivot High', + marker=dict(color='#ff7043', size=7, symbol='triangle-up'), + hoverinfo='skip'), + row=1, col=1 + ) + fig.add_trace( + go.Scatter(x=xs_low, y=ys_low, mode='markers', name='Pivot Low', + marker=dict(color='#42a5f5', size=7, symbol='triangle-down'), + hoverinfo='skip'), + row=1, col=1 + ) + except Exception as e: + logger.debug(f"Error overlaying pivot points: {e}") + # Mini 1-second chart (if available) if has_mini_chart and ws_data_1s is not None: fig.add_trace( @@ -2097,7 +2177,11 @@ class CleanTradingDashboard: ) # ADD ALL SIGNALS TO 1S MINI CHART - self._add_signals_to_mini_chart(fig, symbol, ws_data_1s, row=2) + # Ensure signals use same local tz index + try: + self._add_signals_to_mini_chart(fig, symbol, ws_data_1s, row=2) + except Exception as e: + logger.debug(f"Error adding signals to mini chart: {e}") # Volume bars (bottom subplot) volume_row = 3 if has_mini_chart else 2 @@ -3324,8 +3408,39 @@ class CleanTradingDashboard: cob_data = self.data_provider.get_latest_cob_data(symbol) if cob_data and isinstance(cob_data, dict): - # Validate COB data structure - stats = cob_data.get('stats', {}) + # Validate/derive COB stats + stats = cob_data.get('stats', {}) if isinstance(cob_data.get('stats', {}), dict) else {} + bids_raw = cob_data.get('bids', []) or [] + asks_raw = cob_data.get('asks', []) or [] + mid_price = float(stats.get('mid_price', 0) or 0) + # Derive when missing and we have both sides + if (mid_price <= 0) and bids_raw and asks_raw: + try: + # bids/asks expected as [[price, size], ...] + best_bid = max([float(b[0]) for b in bids_raw if isinstance(b, (list, tuple)) and len(b) >= 2] or [0]) + best_ask = min([float(a[0]) for a in asks_raw if isinstance(a, (list, tuple)) and len(a) >= 2] or [0]) + if best_bid > 0 and best_ask > 0: + mid_price = (best_bid + best_ask) / 2.0 + # compute spread and imbalance in USD terms + spread_bps = ((best_ask - best_bid) / mid_price) * 10000 if mid_price > 0 else 0 + bid_liq = sum([float(b[0]) * float(b[1]) for b in bids_raw if isinstance(b, (list, tuple)) and len(b) >= 2]) + ask_liq = sum([float(a[0]) * float(a[1]) for a in asks_raw if isinstance(a, (list, tuple)) and len(a) >= 2]) + imbalance = ((bid_liq - ask_liq) / (bid_liq + ask_liq)) if (bid_liq + ask_liq) > 0 else 0.0 + # Update/compose stats + stats = { + 'mid_price': mid_price, + 'spread_bps': spread_bps, + 'imbalance': imbalance, + 'best_bid': best_bid, + 'best_ask': best_ask, + 'total_bid_liquidity': bid_liq, + 'total_ask_liquidity': ask_liq, + 'bid_levels': len(bids_raw), + 'ask_levels': len(asks_raw) + } + except Exception: + pass + if stats and stats.get('mid_price', 0) > 0: logger.debug(f"COB snapshot available for {symbol} from data provider") @@ -3357,8 +3472,15 @@ class CleanTradingDashboard: 'total_volume_usd': ask[0] * ask[1] }) - # Use stats from data and calculate liquidity properly + # Use stats (possibly derived) and calculate liquidity properly self.stats = stats.copy() + # Propagate staleness if present from provider + try: + s_stats = data.get('stats', {}) + if isinstance(s_stats, dict) and 'age_ms' in s_stats: + self.stats['age_ms'] = s_stats['age_ms'] + except Exception: + pass # Calculate total liquidity from order book if not provided bid_liquidity = stats.get('bid_liquidity', 0) or stats.get('total_bid_liquidity', 0) @@ -6773,8 +6895,6 @@ class CleanTradingDashboard: logger.info("💾 Saving DQN model checkpoint...") dqn_stats = self.orchestrator.get_model_statistics('dqn') performance_score = dqn_stats.accuracy if dqn_stats and dqn_stats.accuracy else current_performance - - from datetime import datetime checkpoint_data = { 'model_state_dict': self.orchestrator.rl_agent.get_model_state() if hasattr(self.orchestrator.rl_agent, 'get_model_state') else None, 'performance_score': performance_score, diff --git a/web/component_manager.py b/web/component_manager.py index 8504726..506fb59 100644 --- a/web/component_manager.py +++ b/web/component_manager.py @@ -366,6 +366,15 @@ class DashboardComponentManager: 'bid_levels': len(bids), 'ask_levels': len(asks) } + # Show staleness if provided via provider (age_ms) + try: + age_ms = None + if hasattr(cob_snapshot, 'stats') and isinstance(cob_snapshot.stats, dict): + age_ms = cob_snapshot.stats.get('age_ms') + if age_ms is not None: + stats['age_ms'] = age_ms + except Exception: + pass # --- Left Panel: Overview and Stats --- # Prepend update info to overview