order flow WIP, chart broken
This commit is contained in:
@@ -1,267 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
CNN Trading Dashboard - Web UI Layer
|
||||
|
||||
This is a lightweight Dash application that provides the web interface
|
||||
for CNN pivot predictions. All business logic is handled by core modules.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
# Add core modules to path
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
import dash
|
||||
from dash import dcc, html, Input, Output, callback
|
||||
import dash_bootstrap_components as dbc
|
||||
|
||||
from core.chart_data_provider import ChartDataProvider
|
||||
|
||||
# Setup logging with ASCII-only output
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class CNNTradingDashboard:
|
||||
"""Lightweight Dash web interface for CNN trading predictions"""
|
||||
|
||||
def __init__(self):
|
||||
# Initialize Dash app
|
||||
self.app = dash.Dash(
|
||||
__name__,
|
||||
external_stylesheets=[dbc.themes.BOOTSTRAP],
|
||||
title="CNN Trading Dashboard"
|
||||
)
|
||||
|
||||
# Initialize core data provider
|
||||
self.data_provider = ChartDataProvider()
|
||||
|
||||
# Setup web interface
|
||||
self._setup_layout()
|
||||
self._setup_callbacks()
|
||||
|
||||
logger.info("CNN Trading Dashboard web interface initialized")
|
||||
|
||||
def _setup_layout(self):
|
||||
"""Setup the web dashboard layout"""
|
||||
self.app.layout = dbc.Container([
|
||||
# Header
|
||||
dbc.Row([
|
||||
dbc.Col([
|
||||
html.H1("CNN Trading Dashboard",
|
||||
className="text-center text-primary mb-2"),
|
||||
html.P("Real-time CNN pivot predictions for ETH/USDT trading",
|
||||
className="text-center text-muted mb-4")
|
||||
])
|
||||
]),
|
||||
|
||||
# Main chart
|
||||
dbc.Row([
|
||||
dbc.Col([
|
||||
dbc.Card([
|
||||
dbc.CardHeader([
|
||||
html.H4("Price Chart with CNN Predictions", className="mb-0")
|
||||
]),
|
||||
dbc.CardBody([
|
||||
dcc.Graph(
|
||||
id='main-chart',
|
||||
style={'height': '600px'},
|
||||
config={'displayModeBar': True}
|
||||
)
|
||||
])
|
||||
])
|
||||
], width=12)
|
||||
], className="mb-4"),
|
||||
|
||||
# Status panels
|
||||
dbc.Row([
|
||||
# CNN Status
|
||||
dbc.Col([
|
||||
dbc.Card([
|
||||
dbc.CardHeader([
|
||||
html.H5("CNN Prediction Status", className="mb-0")
|
||||
]),
|
||||
dbc.CardBody([
|
||||
html.Div(id='cnn-status')
|
||||
])
|
||||
])
|
||||
], width=4),
|
||||
|
||||
# Pivot Detection Status
|
||||
dbc.Col([
|
||||
dbc.Card([
|
||||
dbc.CardHeader([
|
||||
html.H5("Pivot Detection Status", className="mb-0")
|
||||
]),
|
||||
dbc.CardBody([
|
||||
html.Div(id='pivot-status')
|
||||
])
|
||||
])
|
||||
], width=4),
|
||||
|
||||
# Training Data Status
|
||||
dbc.Col([
|
||||
dbc.Card([
|
||||
dbc.CardHeader([
|
||||
html.H5("Training Data Capture", className="mb-0")
|
||||
]),
|
||||
dbc.CardBody([
|
||||
html.Div(id='training-status')
|
||||
])
|
||||
])
|
||||
], width=4)
|
||||
], className="mb-4"),
|
||||
|
||||
# System info
|
||||
dbc.Row([
|
||||
dbc.Col([
|
||||
dbc.Alert([
|
||||
html.H6("Legend:", className="mb-2"),
|
||||
html.Ul([
|
||||
html.Li("Hollow Red Circles: CNN HIGH pivot predictions"),
|
||||
html.Li("Hollow Green Circles: CNN LOW pivot predictions"),
|
||||
html.Li("Red Triangles: Actual HIGH pivots detected"),
|
||||
html.Li("Green Triangles: Actual LOW pivots detected"),
|
||||
html.Li("Circle/Triangle size indicates confidence/strength")
|
||||
], className="mb-0")
|
||||
], color="info", className="mb-3")
|
||||
])
|
||||
]),
|
||||
|
||||
# Auto-refresh interval
|
||||
dcc.Interval(
|
||||
id='refresh-interval',
|
||||
interval=5000, # Update every 5 seconds
|
||||
n_intervals=0
|
||||
)
|
||||
|
||||
], fluid=True)
|
||||
|
||||
def _setup_callbacks(self):
|
||||
"""Setup Dash callbacks for web interface updates"""
|
||||
|
||||
@self.app.callback(
|
||||
[Output('main-chart', 'figure'),
|
||||
Output('cnn-status', 'children'),
|
||||
Output('pivot-status', 'children'),
|
||||
Output('training-status', 'children')],
|
||||
[Input('refresh-interval', 'n_intervals')]
|
||||
)
|
||||
def update_dashboard(n_intervals):
|
||||
"""Main callback to update all dashboard components"""
|
||||
try:
|
||||
# Simulate price update
|
||||
self.data_provider.simulate_price_update()
|
||||
|
||||
# Get updated predictions and pivots
|
||||
predictions, pivots = self.data_provider.update_predictions_and_pivots()
|
||||
|
||||
# Create main chart
|
||||
fig = self.data_provider.create_price_chart()
|
||||
|
||||
# Add predictions and pivots to chart
|
||||
fig = self.data_provider.add_cnn_predictions_to_chart(fig, predictions)
|
||||
fig = self.data_provider.add_actual_pivots_to_chart(fig, pivots)
|
||||
|
||||
# Get status for info panels
|
||||
status = self.data_provider.get_current_status()
|
||||
|
||||
# Create status displays
|
||||
cnn_status = self._create_cnn_status_display(status.get('predictions', {}))
|
||||
pivot_status = self._create_pivot_status_display(status.get('pivots', {}))
|
||||
training_status = self._create_training_status_display(status.get('training', {}))
|
||||
|
||||
return fig, cnn_status, pivot_status, training_status
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating dashboard: {e}")
|
||||
# Return empty/default values on error
|
||||
return {}, "Error loading CNN status", "Error loading pivot status", "Error loading training status"
|
||||
|
||||
def _create_cnn_status_display(self, stats: dict) -> list:
|
||||
"""Create CNN status display components"""
|
||||
try:
|
||||
active_predictions = stats.get('active_predictions', 0)
|
||||
high_confidence = stats.get('high_confidence', 0)
|
||||
avg_confidence = stats.get('avg_confidence', 0)
|
||||
|
||||
return [
|
||||
html.P(f"Active Predictions: {active_predictions}", className="mb-1"),
|
||||
html.P(f"High Confidence: {high_confidence}", className="mb-1"),
|
||||
html.P(f"Average Confidence: {avg_confidence:.1%}", className="mb-1"),
|
||||
dbc.Progress(
|
||||
value=avg_confidence * 100,
|
||||
color="success" if avg_confidence > 0.7 else "warning" if avg_confidence > 0.5 else "danger",
|
||||
className="mb-2"
|
||||
),
|
||||
html.Small(f"Last Update: {datetime.now().strftime('%H:%M:%S')}",
|
||||
className="text-muted")
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating CNN status display: {e}")
|
||||
return [html.P("Error loading CNN status")]
|
||||
|
||||
def _create_pivot_status_display(self, stats: dict) -> list:
|
||||
"""Create pivot detection status display components"""
|
||||
try:
|
||||
total_pivots = stats.get('total_pivots', 0)
|
||||
high_pivots = stats.get('high_pivots', 0)
|
||||
low_pivots = stats.get('low_pivots', 0)
|
||||
confirmed = stats.get('confirmed_pivots', 0)
|
||||
|
||||
return [
|
||||
html.P(f"Total Pivots: {total_pivots}", className="mb-1"),
|
||||
html.P(f"HIGH Pivots: {high_pivots}", className="mb-1"),
|
||||
html.P(f"LOW Pivots: {low_pivots}", className="mb-1"),
|
||||
html.P(f"Confirmed: {confirmed}", className="mb-1"),
|
||||
dbc.Progress(
|
||||
value=(confirmed / max(total_pivots, 1)) * 100,
|
||||
color="success",
|
||||
className="mb-2"
|
||||
),
|
||||
html.Small("Williams Market Structure", className="text-muted")
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating pivot status display: {e}")
|
||||
return [html.P("Error loading pivot status")]
|
||||
|
||||
def _create_training_status_display(self, stats: dict) -> list:
|
||||
"""Create training data status display components"""
|
||||
try:
|
||||
captured_points = stats.get('captured_points', 0)
|
||||
price_accuracy = stats.get('avg_price_accuracy', 0)
|
||||
time_accuracy = stats.get('avg_time_accuracy', 0)
|
||||
|
||||
return [
|
||||
html.P(f"Data Points: {captured_points}", className="mb-1"),
|
||||
html.P(f"Price Accuracy: {price_accuracy:.1%}", className="mb-1"),
|
||||
html.P(f"Time Accuracy: {time_accuracy:.1%}", className="mb-1"),
|
||||
dbc.Progress(
|
||||
value=price_accuracy * 100,
|
||||
color="success" if price_accuracy > 0.8 else "warning" if price_accuracy > 0.6 else "danger",
|
||||
className="mb-2"
|
||||
),
|
||||
html.Small("Auto-saved every 5 points", className="text-muted")
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating training status display: {e}")
|
||||
return [html.P("Error loading training status")]
|
||||
|
||||
def run(self, host='127.0.0.1', port=8050, debug=False):
|
||||
"""Run the dashboard web server"""
|
||||
try:
|
||||
logger.info(f"Starting CNN Trading Dashboard at http://{host}:{port}")
|
||||
self.app.run_server(host=host, port=port, debug=debug)
|
||||
except Exception as e:
|
||||
logger.error(f"Error starting dashboard server: {e}")
|
||||
raise
|
||||
|
||||
def main():
|
||||
"""Main entry point"""
|
||||
dashboard = CNNTradingDashboard()
|
||||
dashboard.run(debug=True)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
204
web/dashboard.py
204
web/dashboard.py
@@ -748,10 +748,10 @@ class TradingDashboard:
|
||||
className="text-light mb-0 opacity-75 small")
|
||||
], className="bg-dark p-2 mb-2"),
|
||||
|
||||
# Auto-refresh component
|
||||
# Auto-refresh component - optimized for sub-1s responsiveness
|
||||
dcc.Interval(
|
||||
id='interval-component',
|
||||
interval=1000, # Update every 1 second for real-time tick updates
|
||||
interval=300, # Update every 300ms for real-time trading
|
||||
n_intervals=0
|
||||
),
|
||||
|
||||
@@ -1016,13 +1016,15 @@ class TradingDashboard:
|
||||
data_source = "CACHED"
|
||||
logger.debug(f"[CACHED] Using cached price for {symbol}: ${current_price:.2f}")
|
||||
else:
|
||||
# Only try fresh API call if we have no data at all
|
||||
# If no cached data, fetch fresh data
|
||||
try:
|
||||
fresh_data = self.data_provider.get_historical_data(symbol, '1m', limit=1, refresh=False)
|
||||
fresh_data = self.data_provider.get_historical_data(symbol, '1m', limit=1, refresh=True)
|
||||
if fresh_data is not None and not fresh_data.empty:
|
||||
current_price = float(fresh_data['close'].iloc[-1])
|
||||
data_source = "API"
|
||||
logger.debug(f"[API] Fresh price for {symbol}: ${current_price:.2f}")
|
||||
logger.info(f"[API] Fresh price for {symbol}: ${current_price:.2f}")
|
||||
else:
|
||||
logger.warning(f"[API_ERROR] No data returned from API")
|
||||
except Exception as api_error:
|
||||
logger.warning(f"[API_ERROR] Failed to fetch fresh data: {api_error}")
|
||||
|
||||
@@ -1040,14 +1042,19 @@ class TradingDashboard:
|
||||
chart_data = None
|
||||
try:
|
||||
if not is_lightweight_update: # Only refresh charts every 10 seconds
|
||||
# Use cached data only (limited to 30 bars for performance)
|
||||
# Try cached data first (limited to 30 bars for performance)
|
||||
chart_data = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=False)
|
||||
if chart_data is not None and not chart_data.empty:
|
||||
logger.debug(f"[CHART] Using cached 1m data: {len(chart_data)} bars")
|
||||
else:
|
||||
# Wait for real data - no synthetic data
|
||||
logger.debug("[CHART] No chart data available - waiting for data provider")
|
||||
chart_data = None
|
||||
# If no cached data, fetch fresh data (especially important on first load)
|
||||
logger.debug("[CHART] No cached data available - fetching fresh data")
|
||||
chart_data = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=True)
|
||||
if chart_data is not None and not chart_data.empty:
|
||||
logger.info(f"[CHART] Fetched fresh 1m data: {len(chart_data)} bars")
|
||||
else:
|
||||
logger.warning("[CHART] No data available - waiting for data provider")
|
||||
chart_data = None
|
||||
else:
|
||||
# Use cached chart data for lightweight updates
|
||||
chart_data = getattr(self, '_cached_chart_data', None)
|
||||
@@ -1419,37 +1426,81 @@ class TradingDashboard:
|
||||
def _create_price_chart(self, symbol: str) -> go.Figure:
|
||||
"""Create price chart with volume and Williams pivot points from cached data"""
|
||||
try:
|
||||
# Use cached data from data provider (optimized for performance)
|
||||
df = self.data_provider.get_historical_data(symbol, '1m', limit=50, refresh=False)
|
||||
# For Williams Market Structure, we need 1s data for proper recursive analysis
|
||||
# Get 5 minutes (300 seconds) of 1s data for accurate pivot calculation
|
||||
df_1s = None
|
||||
df_1m = None
|
||||
|
||||
if df is None or df.empty:
|
||||
logger.warning("[CHART] No cached data available, trying fresh data")
|
||||
try:
|
||||
df = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=True)
|
||||
if df is not None and not df.empty:
|
||||
# Ensure timezone consistency for fresh data
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
# Add volume column if missing
|
||||
if 'volume' not in df.columns:
|
||||
df['volume'] = 100 # Default volume for demo
|
||||
actual_timeframe = '1m'
|
||||
else:
|
||||
# Try to get 1s data first for Williams analysis
|
||||
try:
|
||||
df_1s = self.data_provider.get_historical_data(symbol, '1s', limit=300, refresh=False)
|
||||
if df_1s is None or df_1s.empty:
|
||||
logger.warning("[CHART] No 1s cached data available, trying fresh 1s data")
|
||||
df_1s = self.data_provider.get_historical_data(symbol, '1s', limit=300, refresh=True)
|
||||
|
||||
if df_1s is not None and not df_1s.empty:
|
||||
logger.debug(f"[CHART] Using {len(df_1s)} 1s bars for Williams analysis")
|
||||
# Aggregate 1s data to 1m for chart display (cleaner visualization)
|
||||
df = self._aggregate_1s_to_1m(df_1s)
|
||||
actual_timeframe = '1s→1m'
|
||||
else:
|
||||
df_1s = None
|
||||
except Exception as e:
|
||||
logger.warning(f"[CHART] Error getting 1s data: {e}")
|
||||
df_1s = None
|
||||
|
||||
# Fallback to 1m data if 1s not available
|
||||
if df_1s is None:
|
||||
df = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=False)
|
||||
|
||||
if df is None or df.empty:
|
||||
logger.warning("[CHART] No cached 1m data available, trying fresh 1m data")
|
||||
try:
|
||||
df = self.data_provider.get_historical_data(symbol, '1m', limit=30, refresh=True)
|
||||
if df is not None and not df.empty:
|
||||
# Ensure timezone consistency for fresh data
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
# Add volume column if missing
|
||||
if 'volume' not in df.columns:
|
||||
df['volume'] = 100 # Default volume for demo
|
||||
actual_timeframe = '1m'
|
||||
else:
|
||||
return self._create_empty_chart(
|
||||
f"{symbol} Chart",
|
||||
f"No data available for {symbol}\nWaiting for data provider..."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"[ERROR] Error getting fresh 1m data: {e}")
|
||||
return self._create_empty_chart(
|
||||
f"{symbol} Chart",
|
||||
f"No data available for {symbol}\nWaiting for data provider..."
|
||||
f"Chart Error: {str(e)}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"[ERROR] Error getting fresh data: {e}")
|
||||
return self._create_empty_chart(
|
||||
f"{symbol} Chart",
|
||||
f"Chart Error: {str(e)}"
|
||||
)
|
||||
else:
|
||||
# Ensure timezone consistency for cached data
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
actual_timeframe = '1m'
|
||||
logger.debug(f"[CHART] Using {len(df)} 1m bars from cached data in {self.timezone}")
|
||||
else:
|
||||
# Ensure timezone consistency for cached data
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
actual_timeframe = '1m'
|
||||
logger.debug(f"[CHART] Using {len(df)} 1m bars from cached data in {self.timezone}")
|
||||
|
||||
# Final check: ensure we have valid data with proper index
|
||||
if df is None or df.empty:
|
||||
return self._create_empty_chart(
|
||||
f"{symbol} Chart",
|
||||
"No valid chart data available"
|
||||
)
|
||||
|
||||
# Ensure we have a proper DatetimeIndex for chart operations
|
||||
if not isinstance(df.index, pd.DatetimeIndex):
|
||||
logger.warning(f"[CHART] Data has {type(df.index)} instead of DatetimeIndex, converting...")
|
||||
try:
|
||||
# Try to convert to datetime index if possible
|
||||
df.index = pd.to_datetime(df.index)
|
||||
df = self._ensure_timezone_consistency(df)
|
||||
except Exception as e:
|
||||
logger.warning(f"[CHART] Could not convert index to DatetimeIndex: {e}")
|
||||
# Create a fallback datetime index
|
||||
df.index = pd.date_range(start=pd.Timestamp.now() - pd.Timedelta(minutes=len(df)),
|
||||
periods=len(df), freq='1min')
|
||||
|
||||
# Create subplot with secondary y-axis for volume
|
||||
fig = make_subplots(
|
||||
rows=2, cols=1,
|
||||
@@ -1472,11 +1523,16 @@ class TradingDashboard:
|
||||
row=1, col=1
|
||||
)
|
||||
|
||||
# Add Williams Market Structure pivot points
|
||||
# Add Williams Market Structure pivot points using 1s data if available
|
||||
try:
|
||||
pivot_points = self._get_williams_pivot_points_for_chart(df)
|
||||
# Use 1s data for Williams analysis, 1m data for chart display
|
||||
williams_data = df_1s if df_1s is not None and not df_1s.empty else df
|
||||
pivot_points = self._get_williams_pivot_points_for_chart(williams_data, chart_df=df)
|
||||
if pivot_points:
|
||||
self._add_williams_pivot_points_to_chart(fig, pivot_points, row=1)
|
||||
logger.info(f"[CHART] Added Williams pivot points using {actual_timeframe} data")
|
||||
else:
|
||||
logger.debug("[CHART] No Williams pivot points calculated")
|
||||
except Exception as e:
|
||||
logger.debug(f"Error adding Williams pivot points to chart: {e}")
|
||||
|
||||
@@ -1522,10 +1578,10 @@ class TradingDashboard:
|
||||
hovertemplate='<b>Volume: %{y:.0f}</b><br>%{x}<extra></extra>'
|
||||
),
|
||||
row=2, col=1
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
# Mark recent trading decisions with proper markers
|
||||
if self.recent_decisions and not df.empty:
|
||||
if self.recent_decisions and df is not None and not df.empty:
|
||||
# Get the timeframe of displayed candles
|
||||
chart_start_time = df.index.min()
|
||||
chart_end_time = df.index.max()
|
||||
@@ -1559,10 +1615,10 @@ class TradingDashboard:
|
||||
decision_time_pd = pd.to_datetime(decision_time_utc)
|
||||
if chart_start_utc <= decision_time_pd <= chart_end_utc:
|
||||
signal_type = decision.get('signal_type', 'UNKNOWN')
|
||||
if decision['action'] == 'BUY':
|
||||
buy_decisions.append((decision, signal_type))
|
||||
elif decision['action'] == 'SELL':
|
||||
sell_decisions.append((decision, signal_type))
|
||||
if decision['action'] == 'BUY':
|
||||
buy_decisions.append((decision, signal_type))
|
||||
elif decision['action'] == 'SELL':
|
||||
sell_decisions.append((decision, signal_type))
|
||||
|
||||
logger.debug(f"[CHART] Showing {len(buy_decisions)} BUY and {len(sell_decisions)} SELL signals in chart timeframe")
|
||||
|
||||
@@ -1655,7 +1711,7 @@ class TradingDashboard:
|
||||
)
|
||||
|
||||
# Add closed trades markers with profit/loss styling and connecting lines
|
||||
if self.closed_trades and not df.empty:
|
||||
if self.closed_trades and df is not None and not df.empty:
|
||||
# Get the timeframe of displayed chart
|
||||
chart_start_time = df.index.min()
|
||||
chart_end_time = df.index.max()
|
||||
@@ -5415,7 +5471,7 @@ class TradingDashboard:
|
||||
logger.warning(f"Error extracting features for {timeframe}: {e}")
|
||||
return [0.0] * 50
|
||||
|
||||
def _get_williams_pivot_points_for_chart(self, df: pd.DataFrame) -> Optional[Dict]:
|
||||
def _get_williams_pivot_points_for_chart(self, df: pd.DataFrame, chart_df: pd.DataFrame = None) -> Optional[Dict]:
|
||||
"""Calculate Williams pivot points specifically for chart visualization with consistent timezone"""
|
||||
try:
|
||||
# Use existing Williams Market Structure instance instead of creating new one
|
||||
@@ -5423,9 +5479,12 @@ class TradingDashboard:
|
||||
logger.warning("Williams Market Structure not available for chart")
|
||||
return None
|
||||
|
||||
# Reduced requirement to match Williams minimum
|
||||
if len(df) < 20:
|
||||
logger.debug(f"[WILLIAMS_CHART] Insufficient data for pivot calculation: {len(df)} bars (need 20+)")
|
||||
# Use chart_df for timestamp mapping if provided, otherwise use df
|
||||
display_df = chart_df if chart_df is not None else df
|
||||
|
||||
# Williams requires minimum data for recursive analysis
|
||||
if len(df) < 50:
|
||||
logger.debug(f"[WILLIAMS_CHART] Insufficient data for Williams pivot calculation: {len(df)} bars (need 50+ for proper recursive analysis)")
|
||||
return None
|
||||
|
||||
# Ensure timezone consistency for the chart data
|
||||
@@ -5539,12 +5598,12 @@ class TradingDashboard:
|
||||
if isinstance(timestamp, datetime):
|
||||
# Williams Market Structure creates naive datetimes that are actually in local time
|
||||
# but without timezone info, so we need to localize them to our configured timezone
|
||||
if timestamp.tzinfo is None:
|
||||
# Williams creates timestamps in local time (Europe/Sofia), so localize directly
|
||||
local_timestamp = self.timezone.localize(timestamp)
|
||||
else:
|
||||
# If it has timezone info, convert to local timezone
|
||||
local_timestamp = timestamp.astimezone(self.timezone)
|
||||
if timestamp.tzinfo is None:
|
||||
# Williams creates timestamps in local time (Europe/Sofia), so localize directly
|
||||
local_timestamp = self.timezone.localize(timestamp)
|
||||
else:
|
||||
# If it has timezone info, convert to local timezone
|
||||
local_timestamp = timestamp.astimezone(self.timezone)
|
||||
else:
|
||||
# Fallback if timestamp is not a datetime
|
||||
local_timestamp = self._now_local()
|
||||
@@ -5822,6 +5881,41 @@ class TradingDashboard:
|
||||
)
|
||||
return fig
|
||||
|
||||
def _aggregate_1s_to_1m(self, df_1s):
|
||||
"""Aggregate 1s data to 1m for chart display while preserving 1s data for Williams analysis"""
|
||||
try:
|
||||
if df_1s is None or df_1s.empty:
|
||||
return None
|
||||
|
||||
# Check if the index is a DatetimeIndex - if not, we can't resample
|
||||
if not isinstance(df_1s.index, pd.DatetimeIndex):
|
||||
logger.warning(f"Cannot aggregate data: index is {type(df_1s.index)} instead of DatetimeIndex")
|
||||
return df_1s # Return original data if we can't aggregate
|
||||
|
||||
# Ensure timezone consistency
|
||||
df_1s = self._ensure_timezone_consistency(df_1s)
|
||||
|
||||
# Calculate OHLCV for 1m from 1s data for cleaner chart visualization
|
||||
# Use 'min' instead of deprecated 'T'
|
||||
ohlcv_1m = df_1s.resample('1min').agg({
|
||||
'open': 'first',
|
||||
'high': 'max',
|
||||
'low': 'min',
|
||||
'close': 'last',
|
||||
'volume': 'sum'
|
||||
}).dropna()
|
||||
|
||||
# Ensure proper timezone formatting
|
||||
ohlcv_1m = self._ensure_timezone_consistency(ohlcv_1m)
|
||||
|
||||
logger.debug(f"[CHART] Aggregated {len(df_1s)} 1s bars to {len(ohlcv_1m)} 1m bars for display")
|
||||
return ohlcv_1m
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error aggregating 1s data to 1m: {e}")
|
||||
# Return original data as fallback
|
||||
return df_1s
|
||||
|
||||
def create_dashboard(data_provider: DataProvider = None, orchestrator: TradingOrchestrator = None, trading_executor: TradingExecutor = None) -> TradingDashboard:
|
||||
"""Factory function to create a trading dashboard"""
|
||||
return TradingDashboard(data_provider=data_provider, orchestrator=orchestrator, trading_executor=trading_executor)
|
||||
Reference in New Issue
Block a user