order flow WIP, chart broken
This commit is contained in:
204
web/dashboard.py
204
web/dashboard.py
@ -748,10 +748,10 @@ class TradingDashboard:
|
||||
className="text-light mb-0 opacity-75 small")
|
||||
], className="bg-dark p-2 mb-2"),
|
||||
|
||||
# Auto-refresh component
|
||||
# Auto-refresh component - optimized for sub-1s responsiveness
|
||||
dcc.Interval(
|
||||
id='interval-component',
|
||||
interval=1000, # Update every 1 second for real-time tick updates
|
||||
interval=300, # Update every 300ms for real-time trading
|
||||
n_intervals=0
|
||||
),
|
||||
|
||||
@ -1016,13 +1016,15 @@ class TradingDashboard:
|
||||
data_source = "CACHED"
|
||||
logger.debug(f"[CACHED] Using cached price for {symbol}: ${current_price:.2f}")
|
||||
else:
|
||||
# Only try fresh API call if we have no data at all
|
||||
# If no cached data, fetch fresh data
|
||||
try:
|
||||
fresh_data = self.data_provider.get_historical_data(symbol, '1m', limit=1, refresh=False)
|
||||
fresh_data = self.data_provider.get_historical_data(symbol, '1m', limit=1, refresh=True)
|
||||
if fresh_data is not None and not fresh_data.empty:
|
||||
current_price = float(fresh_data['close'].iloc[-1])
|
||||
data_source = "API"
|
||||
logger.debug(f"[API] Fresh price for {symbol}: ${current_price:.2f}")
|
||||
logger.info(f"[API] Fresh price for {symbol}: ${current_price:.2f}")
|
||||
else:
|
||||
logger.warning(f"[API_ERROR] No data returned from API")
|
||||
except Exception as api_error:
|
||||
logger.warning(f"[API_ERROR] Failed to fetch fresh data: {api_error}")
|
||||
|
||||
@ -1040,14 +1042,19 @@ class TradingDashboard:
|
||||
chart_data = None
|
||||
try:
|
||||
if not is_lightweight_update: # Only refresh charts every 10 seconds
|
||||
# Use cached data only (limited to 30 bars for performance)
|
||||
# Try cached data first (limited to 30 bars for performance)
|
||||
chart_data = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=False)
|
||||
if chart_data is not None and not chart_data.empty:
|
||||
logger.debug(f"[CHART] Using cached 1m data: {len(chart_data)} bars")
|
||||
else:
|
||||
# Wait for real data - no synthetic data
|
||||
logger.debug("[CHART] No chart data available - waiting for data provider")
|
||||
chart_data = None
|
||||
# If no cached data, fetch fresh data (especially important on first load)
|
||||
logger.debug("[CHART] No cached data available - fetching fresh data")
|
||||
chart_data = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=True)
|
||||
if chart_data is not None and not chart_data.empty:
|
||||
logger.info(f"[CHART] Fetched fresh 1m data: {len(chart_data)} bars")
|
||||
else:
|
||||
logger.warning("[CHART] No data available - waiting for data provider")
|
||||
chart_data = None
|
||||
else:
|
||||
# Use cached chart data for lightweight updates
|
||||
chart_data = getattr(self, '_cached_chart_data', None)
|
||||
@ -1419,37 +1426,81 @@ class TradingDashboard:
|
||||
def _create_price_chart(self, symbol: str) -> go.Figure:
|
||||
"""Create price chart with volume and Williams pivot points from cached data"""
|
||||
try:
|
||||
# Use cached data from data provider (optimized for performance)
|
||||
df = self.data_provider.get_historical_data(symbol, '1m', limit=50, refresh=False)
|
||||
# For Williams Market Structure, we need 1s data for proper recursive analysis
|
||||
# Get 5 minutes (300 seconds) of 1s data for accurate pivot calculation
|
||||
df_1s = None
|
||||
df_1m = None
|
||||
|
||||
if df is None or df.empty:
|
||||
logger.warning("[CHART] No cached data available, trying fresh data")
|
||||
try:
|
||||
df = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=True)
|
||||
if df is not None and not df.empty:
|
||||
# Ensure timezone consistency for fresh data
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
# Add volume column if missing
|
||||
if 'volume' not in df.columns:
|
||||
df['volume'] = 100 # Default volume for demo
|
||||
actual_timeframe = '1m'
|
||||
else:
|
||||
# Try to get 1s data first for Williams analysis
|
||||
try:
|
||||
df_1s = self.data_provider.get_historical_data(symbol, '1s', limit=300, refresh=False)
|
||||
if df_1s is None or df_1s.empty:
|
||||
logger.warning("[CHART] No 1s cached data available, trying fresh 1s data")
|
||||
df_1s = self.data_provider.get_historical_data(symbol, '1s', limit=300, refresh=True)
|
||||
|
||||
if df_1s is not None and not df_1s.empty:
|
||||
logger.debug(f"[CHART] Using {len(df_1s)} 1s bars for Williams analysis")
|
||||
# Aggregate 1s data to 1m for chart display (cleaner visualization)
|
||||
df = self._aggregate_1s_to_1m(df_1s)
|
||||
actual_timeframe = '1s→1m'
|
||||
else:
|
||||
df_1s = None
|
||||
except Exception as e:
|
||||
logger.warning(f"[CHART] Error getting 1s data: {e}")
|
||||
df_1s = None
|
||||
|
||||
# Fallback to 1m data if 1s not available
|
||||
if df_1s is None:
|
||||
df = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=False)
|
||||
|
||||
if df is None or df.empty:
|
||||
logger.warning("[CHART] No cached 1m data available, trying fresh 1m data")
|
||||
try:
|
||||
df = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=True)
|
||||
if df is not None and not df.empty:
|
||||
# Ensure timezone consistency for fresh data
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
# Add volume column if missing
|
||||
if 'volume' not in df.columns:
|
||||
df['volume'] = 100 # Default volume for demo
|
||||
actual_timeframe = '1m'
|
||||
else:
|
||||
return self._create_empty_chart(
|
||||
f"{symbol} Chart",
|
||||
f"No data available for {symbol}\nWaiting for data provider..."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"[ERROR] Error getting fresh 1m data: {e}")
|
||||
return self._create_empty_chart(
|
||||
f"{symbol} Chart",
|
||||
f"No data available for {symbol}\nWaiting for data provider..."
|
||||
f"Chart Error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"[ERROR] Error getting fresh data: {e}")
|
||||
return self._create_empty_chart(
|
||||
f"{symbol} Chart",
|
||||
f"Chart Error: {str(e)}"
|
||||
)
|
||||
else:
|
||||
# Ensure timezone consistency for cached data
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
actual_timeframe = '1m'
|
||||
logger.debug(f"[CHART] Using {len(df)} 1m bars from cached data in {self.timezone}")
|
||||
else:
|
||||
# Ensure timezone consistency for cached data
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
actual_timeframe = '1m'
|
||||
logger.debug(f"[CHART] Using {len(df)} 1m bars from cached data in {self.timezone}")
|
||||
|
||||
# Final check: ensure we have valid data with proper index
|
||||
if df is None or df.empty:
|
||||
return self._create_empty_chart(
|
||||
f"{symbol} Chart",
|
||||
"No valid chart data available"
|
||||
)
|
||||
|
||||
# Ensure we have a proper DatetimeIndex for chart operations
|
||||
if not isinstance(df.index, pd.DatetimeIndex):
|
||||
logger.warning(f"[CHART] Data has {type(df.index)} instead of DatetimeIndex, converting...")
|
||||
try:
|
||||
# Try to convert to datetime index if possible
|
||||
df.index = pd.to_datetime(df.index)
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
except Exception as e:
|
||||
logger.warning(f"[CHART] Could not convert index to DatetimeIndex: {e}")
|
||||
# Create a fallback datetime index
|
||||
df.index = pd.date_range(start=pd.Timestamp.now() - pd.Timedelta(minutes=len(df)),
|
||||
periods=len(df), freq='1min')
|
||||
|
||||
# Create subplot with secondary y-axis for volume
|
||||
fig = make_subplots(
|
||||
rows=2, cols=1,
|
||||
@ -1472,11 +1523,16 @@ class TradingDashboard:
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
# Add Williams Market Structure pivot points
|
||||
# Add Williams Market Structure pivot points using 1s data if available
|
||||
try:
|
||||
pivot_points = self._get_williams_pivot_points_for_chart(df)
|
||||
# Use 1s data for Williams analysis, 1m data for chart display
|
||||
williams_data = df_1s if df_1s is not None and not df_1s.empty else df
|
||||
pivot_points = self._get_williams_pivot_points_for_chart(williams_data, chart_df=df)
|
||||
if pivot_points:
|
||||
self._add_williams_pivot_points_to_chart(fig, pivot_points, row=1)
|
||||
logger.info(f"[CHART] Added Williams pivot points using {actual_timeframe} data")
|
||||
else:
|
||||
logger.debug("[CHART] No Williams pivot points calculated")
|
||||
except Exception as e:
|
||||
logger.debug(f"Error adding Williams pivot points to chart: {e}")
|
||||
|
||||
@ -1522,10 +1578,10 @@ class TradingDashboard:
|
||||
hovertemplate='<b>Volume: %{y:.0f}</b><br>%{x}<extra></extra>'
|
||||
),
|
||||
row=2, col=1
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
# Mark recent trading decisions with proper markers
|
||||
if self.recent_decisions and not df.empty:
|
||||
if self.recent_decisions and df is not None and not df.empty:
|
||||
# Get the timeframe of displayed candles
|
||||
chart_start_time = df.index.min()
|
||||
chart_end_time = df.index.max()
|
||||
@ -1559,10 +1615,10 @@ class TradingDashboard:
|
||||
decision_time_pd = pd.to_datetime(decision_time_utc)
|
||||
if chart_start_utc <= decision_time_pd <= chart_end_utc:
|
||||
signal_type = decision.get('signal_type', 'UNKNOWN')
|
||||
if decision['action'] == 'BUY':
|
||||
buy_decisions.append((decision, signal_type))
|
||||
elif decision['action'] == 'SELL':
|
||||
sell_decisions.append((decision, signal_type))
|
||||
if decision['action'] == 'BUY':
|
||||
buy_decisions.append((decision, signal_type))
|
||||
elif decision['action'] == 'SELL':
|
||||
sell_decisions.append((decision, signal_type))
|
||||
|
||||
logger.debug(f"[CHART] Showing {len(buy_decisions)} BUY and {len(sell_decisions)} SELL signals in chart timeframe")
|
||||
|
||||
@ -1655,7 +1711,7 @@ class TradingDashboard:
|
||||
)
|
||||
|
||||
# Add closed trades markers with profit/loss styling and connecting lines
|
||||
if self.closed_trades and not df.empty:
|
||||
if self.closed_trades and df is not None and not df.empty:
|
||||
# Get the timeframe of displayed chart
|
||||
chart_start_time = df.index.min()
|
||||
chart_end_time = df.index.max()
|
||||
@ -5415,7 +5471,7 @@ class TradingDashboard:
|
||||
logger.warning(f"Error extracting features for {timeframe}: {e}")
|
||||
return [0.0] * 50
|
||||
|
||||
def _get_williams_pivot_points_for_chart(self, df: pd.DataFrame) -> Optional[Dict]:
|
||||
def _get_williams_pivot_points_for_chart(self, df: pd.DataFrame, chart_df: pd.DataFrame = None) -> Optional[Dict]:
|
||||
"""Calculate Williams pivot points specifically for chart visualization with consistent timezone"""
|
||||
try:
|
||||
# Use existing Williams Market Structure instance instead of creating new one
|
||||
@ -5423,9 +5479,12 @@ class TradingDashboard:
|
||||
logger.warning("Williams Market Structure not available for chart")
|
||||
return None
|
||||
|
||||
# Reduced requirement to match Williams minimum
|
||||
if len(df) < 20:
|
||||
logger.debug(f"[WILLIAMS_CHART] Insufficient data for pivot calculation: {len(df)} bars (need 20+)")
|
||||
# Use chart_df for timestamp mapping if provided, otherwise use df
|
||||
display_df = chart_df if chart_df is not None else df
|
||||
|
||||
# Williams requires minimum data for recursive analysis
|
||||
if len(df) < 50:
|
||||
logger.debug(f"[WILLIAMS_CHART] Insufficient data for Williams pivot calculation: {len(df)} bars (need 50+ for proper recursive analysis)")
|
||||
return None
|
||||
|
||||
# Ensure timezone consistency for the chart data
|
||||
@ -5539,12 +5598,12 @@ class TradingDashboard:
|
||||
if isinstance(timestamp, datetime):
|
||||
# Williams Market Structure creates naive datetimes that are actually in local time
|
||||
# but without timezone info, so we need to localize them to our configured timezone
|
||||
if timestamp.tzinfo is None:
|
||||
# Williams creates timestamps in local time (Europe/Sofia), so localize directly
|
||||
local_timestamp = self.timezone.localize(timestamp)
|
||||
else:
|
||||
# If it has timezone info, convert to local timezone
|
||||
local_timestamp = timestamp.astimezone(self.timezone)
|
||||
if timestamp.tzinfo is None:
|
||||
# Williams creates timestamps in local time (Europe/Sofia), so localize directly
|
||||
local_timestamp = self.timezone.localize(timestamp)
|
||||
else:
|
||||
# If it has timezone info, convert to local timezone
|
||||
local_timestamp = timestamp.astimezone(self.timezone)
|
||||
else:
|
||||
# Fallback if timestamp is not a datetime
|
||||
local_timestamp = self._now_local()
|
||||
@ -5822,6 +5881,41 @@ class TradingDashboard:
|
||||
)
|
||||
return fig
|
||||
|
||||
def _aggregate_1s_to_1m(self, df_1s):
|
||||
"""Aggregate 1s data to 1m for chart display while preserving 1s data for Williams analysis"""
|
||||
try:
|
||||
if df_1s is None or df_1s.empty:
|
||||
return None
|
||||
|
||||
# Check if the index is a DatetimeIndex - if not, we can't resample
|
||||
if not isinstance(df_1s.index, pd.DatetimeIndex):
|
||||
logger.warning(f"Cannot aggregate data: index is {type(df_1s.index)} instead of DatetimeIndex")
|
||||
return df_1s # Return original data if we can't aggregate
|
||||
|
||||
# Ensure timezone consistency
|
||||
df_1s = self._ensure_timezone_consistency(df_1s)
|
||||
|
||||
# Calculate OHLCV for 1m from 1s data for cleaner chart visualization
|
||||
# Use 'min' instead of deprecated 'T'
|
||||
ohlcv_1m = df_1s.resample('1min').agg({
|
||||
'open': 'first',
|
||||
'high': 'max',
|
||||
'low': 'min',
|
||||
'close': 'last',
|
||||
'volume': 'sum'
|
||||
}).dropna()
|
||||
|
||||
# Ensure proper timezone formatting
|
||||
ohlcv_1m = self._ensure_timezone_consistency(ohlcv_1m)
|
||||
|
||||
logger.debug(f"[CHART] Aggregated {len(df_1s)} 1s bars to {len(ohlcv_1m)} 1m bars for display")
|
||||
return ohlcv_1m
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error aggregating 1s data to 1m: {e}")
|
||||
# Return original data as fallback
|
||||
return df_1s
|
||||
|
||||
def create_dashboard(data_provider: DataProvider = None, orchestrator: TradingOrchestrator = None, trading_executor: TradingExecutor = None) -> TradingDashboard:
|
||||
"""Factory function to create a trading dashboard"""
|
||||
return TradingDashboard(data_provider=data_provider, orchestrator=orchestrator, trading_executor=trading_executor)
|
Reference in New Issue
Block a user