data stream working

This commit is contained in:
Dobromir Popov
2025-09-02 17:59:12 +03:00
parent 8068e554f3
commit c55175c44d
8 changed files with 1000 additions and 132 deletions

View File

@@ -232,6 +232,9 @@ class CleanTradingDashboard:
</html>
'''
# Add API endpoints to the Flask server
self._add_api_endpoints()
# Suppress Dash development mode logging
self.app.enable_dev_tools(debug=False, dev_tools_silence_routes_logging=True)
@@ -265,6 +268,300 @@ class CleanTradingDashboard:
logger.debug("Clean Trading Dashboard initialized with HIGH-FREQUENCY COB integration and signal generation")
def _add_api_endpoints(self):
"""Add API endpoints to the Flask server for data access"""
from flask import jsonify, request
@self.app.server.route('/api/stream-status', methods=['GET'])
def get_stream_status():
"""Get data stream status"""
try:
status = self.orchestrator.get_data_stream_status()
summary = self.orchestrator.get_stream_summary()
return jsonify({
'status': status,
'summary': summary,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@self.app.server.route('/api/ohlcv-data', methods=['GET'])
def get_ohlcv_data():
"""Get OHLCV data with indicators"""
try:
symbol = request.args.get('symbol', 'ETH/USDT')
timeframe = request.args.get('timeframe', '1m')
limit = int(request.args.get('limit', 300))
# Get OHLCV data from orchestrator
ohlcv_data = self._get_ohlcv_data_with_indicators(symbol, timeframe, limit)
return jsonify({
'symbol': symbol,
'timeframe': timeframe,
'data': ohlcv_data,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@self.app.server.route('/api/cob-data', methods=['GET'])
def get_cob_data():
"""Get COB data with price buckets"""
try:
symbol = request.args.get('symbol', 'ETH/USDT')
limit = int(request.args.get('limit', 300))
# Get COB data from orchestrator
cob_data = self._get_cob_data_with_buckets(symbol, limit)
return jsonify({
'symbol': symbol,
'data': cob_data,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@self.app.server.route('/api/snapshot', methods=['POST'])
def create_snapshot():
"""Create a data snapshot"""
try:
filepath = self.orchestrator.save_data_snapshot()
return jsonify({
'filepath': filepath,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@self.app.server.route('/api/health', methods=['GET'])
def health_check():
"""Health check endpoint"""
return jsonify({
'status': 'healthy',
'dashboard_running': True,
'orchestrator_active': hasattr(self, 'orchestrator'),
'timestamp': datetime.now().isoformat()
})
def _get_ohlcv_data_with_indicators(self, symbol: str, timeframe: str, limit: int = 300):
"""Get OHLCV data with technical indicators from data stream monitor"""
try:
# Get OHLCV data from data stream monitor
if hasattr(self.orchestrator, 'data_stream_monitor') and self.orchestrator.data_stream_monitor:
stream_key = f"ohlcv_{timeframe}"
if stream_key in self.orchestrator.data_stream_monitor.data_streams:
ohlcv_data = list(self.orchestrator.data_stream_monitor.data_streams[stream_key])
# Take the last 'limit' items
ohlcv_data = ohlcv_data[-limit:] if len(ohlcv_data) > limit else ohlcv_data
if not ohlcv_data:
return []
# Convert to DataFrame for indicator calculation
df_data = []
for item in ohlcv_data:
df_data.append({
'timestamp': item.get('timestamp', ''),
'open': float(item.get('open', 0)),
'high': float(item.get('high', 0)),
'low': float(item.get('low', 0)),
'close': float(item.get('close', 0)),
'volume': float(item.get('volume', 0))
})
if not df_data:
return []
df = pd.DataFrame(df_data)
df['timestamp'] = pd.to_datetime(df['timestamp'])
df.set_index('timestamp', inplace=True)
# Add technical indicators
df['sma_20'] = df['close'].rolling(window=20).mean()
df['sma_50'] = df['close'].rolling(window=50).mean()
df['ema_12'] = df['close'].ewm(span=12).mean()
df['ema_26'] = df['close'].ewm(span=26).mean()
# RSI
delta = df['close'].diff()
gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
rs = gain / loss
df['rsi'] = 100 - (100 / (1 + rs))
# MACD
df['macd'] = df['ema_12'] - df['ema_26']
df['macd_signal'] = df['macd'].ewm(span=9).mean()
df['macd_histogram'] = df['macd'] - df['macd_signal']
# Bollinger Bands
df['bb_middle'] = df['close'].rolling(window=20).mean()
bb_std = df['close'].rolling(window=20).std()
df['bb_upper'] = df['bb_middle'] + (bb_std * 2)
df['bb_lower'] = df['bb_middle'] - (bb_std * 2)
# Volume indicators
df['volume_sma'] = df['volume'].rolling(window=20).mean()
df['volume_ratio'] = df['volume'] / df['volume_sma']
# Convert to list of dictionaries
result = []
for _, row in df.iterrows():
data_point = {
'timestamp': row.name.isoformat() if hasattr(row.name, 'isoformat') else str(row.name),
'open': float(row['open']),
'high': float(row['high']),
'low': float(row['low']),
'close': float(row['close']),
'volume': float(row['volume']),
'indicators': {
'sma_20': float(row['sma_20']) if pd.notna(row['sma_20']) else None,
'sma_50': float(row['sma_50']) if pd.notna(row['sma_50']) else None,
'ema_12': float(row['ema_12']) if pd.notna(row['ema_12']) else None,
'ema_26': float(row['ema_26']) if pd.notna(row['ema_26']) else None,
'rsi': float(row['rsi']) if pd.notna(row['rsi']) else None,
'macd': float(row['macd']) if pd.notna(row['macd']) else None,
'macd_signal': float(row['macd_signal']) if pd.notna(row['macd_signal']) else None,
'macd_histogram': float(row['macd_histogram']) if pd.notna(row['macd_histogram']) else None,
'bb_upper': float(row['bb_upper']) if pd.notna(row['bb_upper']) else None,
'bb_middle': float(row['bb_middle']) if pd.notna(row['bb_middle']) else None,
'bb_lower': float(row['bb_lower']) if pd.notna(row['bb_lower']) else None,
'volume_ratio': float(row['volume_ratio']) if pd.notna(row['volume_ratio']) else None
}
}
result.append(data_point)
return result
# Fallback to data provider if stream monitor not available
ohlcv_data = self.data_provider.get_ohlcv(symbol, timeframe, limit=limit)
if ohlcv_data is None or ohlcv_data.empty:
return []
# Add technical indicators
df = ohlcv_data.copy()
# Basic indicators
df['sma_20'] = df['close'].rolling(window=20).mean()
df['sma_50'] = df['close'].rolling(window=50).mean()
df['ema_12'] = df['close'].ewm(span=12).mean()
df['ema_26'] = df['close'].ewm(span=26).mean()
# RSI
delta = df['close'].diff()
gain = (delta.where(delta > 0, 0)).rolling(window=14).mean()
loss = (-delta.where(delta < 0, 0)).rolling(window=14).mean()
rs = gain / loss
df['rsi'] = 100 - (100 / (1 + rs))
# MACD
df['macd'] = df['ema_12'] - df['ema_26']
df['macd_signal'] = df['macd'].ewm(span=9).mean()
df['macd_histogram'] = df['macd'] - df['macd_signal']
# Bollinger Bands
df['bb_middle'] = df['close'].rolling(window=20).mean()
bb_std = df['close'].rolling(window=20).std()
df['bb_upper'] = df['bb_middle'] + (bb_std * 2)
df['bb_lower'] = df['bb_middle'] - (bb_std * 2)
# Volume indicators
df['volume_sma'] = df['volume'].rolling(window=20).mean()
df['volume_ratio'] = df['volume'] / df['volume_sma']
# Convert to list of dictionaries
result = []
for _, row in df.iterrows():
data_point = {
'timestamp': row.name.isoformat() if hasattr(row.name, 'isoformat') else str(row.name),
'open': float(row['open']),
'high': float(row['high']),
'low': float(row['low']),
'close': float(row['close']),
'volume': float(row['volume']),
'indicators': {
'sma_20': float(row['sma_20']) if pd.notna(row['sma_20']) else None,
'sma_50': float(row['sma_50']) if pd.notna(row['sma_50']) else None,
'ema_12': float(row['ema_12']) if pd.notna(row['ema_12']) else None,
'ema_26': float(row['ema_26']) if pd.notna(row['ema_26']) else None,
'rsi': float(row['rsi']) if pd.notna(row['rsi']) else None,
'macd': float(row['macd']) if pd.notna(row['macd']) else None,
'macd_signal': float(row['macd_signal']) if pd.notna(row['macd_signal']) else None,
'macd_histogram': float(row['macd_histogram']) if pd.notna(row['macd_histogram']) else None,
'bb_upper': float(row['bb_upper']) if pd.notna(row['bb_upper']) else None,
'bb_middle': float(row['bb_middle']) if pd.notna(row['bb_middle']) else None,
'bb_lower': float(row['bb_lower']) if pd.notna(row['bb_lower']) else None,
'volume_ratio': float(row['volume_ratio']) if pd.notna(row['volume_ratio']) else None
}
}
result.append(data_point)
return result
except Exception as e:
logger.error(f"Error getting OHLCV data: {e}")
return []
def _get_cob_data_with_buckets(self, symbol: str, limit: int = 300):
"""Get COB data with price buckets ($1 increments)"""
try:
# Get COB data from orchestrator
cob_data = self.orchestrator.get_cob_data(symbol, limit)
if not cob_data:
return []
# Process COB data into price buckets
result = []
for cob_snapshot in cob_data:
# Create price buckets ($1 increments)
price_buckets = {}
mid_price = cob_snapshot.mid_price
# Create buckets around mid price
for i in range(-50, 51): # -$50 to +$50 from mid price
bucket_price = mid_price + i
bucket_key = f"{bucket_price:.2f}"
price_buckets[bucket_key] = {
'bid_volume': 0,
'ask_volume': 0,
'bid_count': 0,
'ask_count': 0
}
# Fill buckets with order book data
for level in cob_snapshot.bids:
bucket_price = f"{level.price:.2f}"
if bucket_price in price_buckets:
price_buckets[bucket_price]['bid_volume'] += level.volume
price_buckets[bucket_price]['bid_count'] += 1
for level in cob_snapshot.asks:
bucket_price = f"{level.price:.2f}"
if bucket_price in price_buckets:
price_buckets[bucket_price]['ask_volume'] += level.volume
price_buckets[bucket_price]['ask_count'] += 1
data_point = {
'timestamp': cob_snapshot.timestamp.isoformat() if hasattr(cob_snapshot.timestamp, 'isoformat') else str(cob_snapshot.timestamp),
'mid_price': float(cob_snapshot.mid_price),
'spread': float(cob_snapshot.spread),
'imbalance': float(cob_snapshot.imbalance),
'price_buckets': price_buckets,
'total_bid_volume': float(cob_snapshot.total_bid_volume),
'total_ask_volume': float(cob_snapshot.total_ask_volume)
}
result.append(data_point)
return result
except Exception as e:
logger.error(f"Error getting COB data: {e}")
return []
def _get_universal_data_from_orchestrator(self) -> Optional[UniversalDataStream]:
"""Get universal data through orchestrator as per architecture."""
try: