try to fix chart udates - wip
This commit is contained in:
@@ -17,7 +17,7 @@ import asyncio
|
||||
import logging
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict, List, Optional, Any, Callable
|
||||
from threading import Thread
|
||||
import json
|
||||
@@ -94,10 +94,24 @@ class COBIntegration:
|
||||
|
||||
# Initialize Enhanced WebSocket first
|
||||
try:
|
||||
# Enhanced WebSocket initialization would go here
|
||||
logger.info("Enhanced WebSocket initialized successfully")
|
||||
from .enhanced_cob_websocket import EnhancedCOBWebSocket
|
||||
|
||||
# Initialize Enhanced WebSocket with dashboard callback
|
||||
self.enhanced_websocket = EnhancedCOBWebSocket(
|
||||
symbols=self.symbols,
|
||||
dashboard_callback=self._on_websocket_status_update
|
||||
)
|
||||
|
||||
# Add callback for COB data updates
|
||||
self.enhanced_websocket.add_cob_callback(self._on_enhanced_cob_update)
|
||||
|
||||
# Start the WebSocket connection
|
||||
await self.enhanced_websocket.start()
|
||||
|
||||
logger.info("Enhanced WebSocket initialized and started successfully")
|
||||
except Exception as e:
|
||||
logger.error(f" Error starting Enhanced WebSocket: {e}")
|
||||
# Continue without WebSocket - will use API fallback
|
||||
|
||||
# Skip COB provider backup since Enhanced WebSocket is working perfectly
|
||||
logger.info("Skipping COB provider backup - Enhanced WebSocket provides all needed data")
|
||||
@@ -118,7 +132,23 @@ class COBIntegration:
|
||||
async def _on_enhanced_cob_update(self, symbol: str, cob_data: Dict):
|
||||
"""Handle COB updates from Enhanced WebSocket"""
|
||||
try:
|
||||
logger.debug(f"Enhanced WebSocket COB update for {symbol}")
|
||||
logger.debug(f"Enhanced WebSocket COB update for {symbol}: {cob_data.get('type', 'unknown')}")
|
||||
|
||||
# Handle candlestick data - convert to OHLCV and update data provider
|
||||
if cob_data.get('type') == 'candlestick' and self.data_provider:
|
||||
candlestick = cob_data.get('data', {})
|
||||
if candlestick:
|
||||
# Convert WebSocket candlestick to tick format for data provider
|
||||
tick = {
|
||||
'timestamp': datetime.fromtimestamp(candlestick.get('close_time', 0) / 1000, tz=timezone.utc),
|
||||
'price': float(candlestick.get('close_price', 0)),
|
||||
'volume': float(candlestick.get('volume', 0))
|
||||
}
|
||||
|
||||
# Update data provider with live tick (this will update real_time_data)
|
||||
if hasattr(self.data_provider, '_process_tick'):
|
||||
self.data_provider._process_tick(symbol, tick)
|
||||
logger.debug(f"Updated data provider with live candle: {symbol} @ {tick['price']}")
|
||||
|
||||
# Convert enhanced WebSocket data to COB format for existing callbacks
|
||||
# Notify CNN callbacks
|
||||
|
||||
@@ -3775,10 +3775,22 @@ class DataProvider:
|
||||
logger.error(f"Error emitting pivot event: {e}", exc_info=True)
|
||||
|
||||
def get_latest_candles(self, symbol: str, timeframe: str, limit: int = 100) -> pd.DataFrame:
|
||||
"""Get the latest candles from cached data only"""
|
||||
"""Get the latest candles combining cached data with real-time data"""
|
||||
try:
|
||||
# Get cached data
|
||||
cached_df = self.get_historical_data(symbol, timeframe, limit=limit)
|
||||
# Check for real-time data first
|
||||
has_real_time_data = False
|
||||
with self.data_lock:
|
||||
if symbol in self.real_time_data and timeframe in self.real_time_data[symbol]:
|
||||
real_time_candles = list(self.real_time_data[symbol][timeframe])
|
||||
has_real_time_data = bool(real_time_candles)
|
||||
|
||||
# If no real-time data available, force refresh from API for live updates
|
||||
if not has_real_time_data and limit <= 10: # Small limit suggests live update request
|
||||
logger.debug(f"No real-time data for {symbol} {timeframe}, forcing API refresh for live update")
|
||||
cached_df = self.get_historical_data(symbol, timeframe, limit=limit, refresh=True)
|
||||
else:
|
||||
# Get cached data normally
|
||||
cached_df = self.get_historical_data(symbol, timeframe, limit=limit)
|
||||
|
||||
# Get real-time data if available
|
||||
with self.data_lock:
|
||||
@@ -3786,24 +3798,29 @@ class DataProvider:
|
||||
real_time_candles = list(self.real_time_data[symbol][timeframe])
|
||||
|
||||
if real_time_candles:
|
||||
# Convert to DataFrame
|
||||
# Convert to DataFrame and ensure proper format
|
||||
rt_df = pd.DataFrame(real_time_candles)
|
||||
rt_df = self._ensure_datetime_index(rt_df)
|
||||
|
||||
if cached_df is not None and not cached_df.empty:
|
||||
# Combine cached and real-time
|
||||
# Remove overlapping candles from cached data
|
||||
if not rt_df.empty:
|
||||
cutoff_time = rt_df['timestamp'].min()
|
||||
cutoff_time = rt_df.index.min()
|
||||
cached_df = cached_df[cached_df.index < cutoff_time]
|
||||
|
||||
# Concatenate
|
||||
combined_df = pd.concat([cached_df, rt_df], ignore_index=True)
|
||||
# Concatenate and sort by index
|
||||
combined_df = pd.concat([cached_df, rt_df])
|
||||
combined_df = combined_df.sort_index()
|
||||
combined_df = combined_df[~combined_df.index.duplicated(keep='last')]
|
||||
else:
|
||||
combined_df = rt_df
|
||||
|
||||
logger.debug(f"Combined data for {symbol} {timeframe}: {len(cached_df) if cached_df is not None else 0} cached + {len(rt_df)} real-time")
|
||||
return combined_df.tail(limit)
|
||||
|
||||
# Return just cached data if no real-time data
|
||||
logger.debug(f"Returning cached data only for {symbol} {timeframe}: {len(cached_df) if cached_df is not None else 0} candles")
|
||||
return cached_df.tail(limit) if cached_df is not None else pd.DataFrame()
|
||||
|
||||
except Exception as e:
|
||||
|
||||
Reference in New Issue
Block a user