replay system

This commit is contained in:
Dobromir Popov
2025-07-20 12:37:02 +03:00
parent 469269e809
commit 12865fd3ef
13 changed files with 6132 additions and 465 deletions

View File

@ -46,6 +46,53 @@ import aiohttp.resolver
logger = logging.getLogger(__name__)
class SimpleRateLimiter:
"""Simple rate limiter to prevent 418 errors"""
def __init__(self, requests_per_second: float = 0.5): # Much more conservative
self.requests_per_second = requests_per_second
self.last_request_time = 0
self.min_interval = 1.0 / requests_per_second
self.consecutive_errors = 0
self.blocked_until = 0
def can_make_request(self) -> bool:
"""Check if we can make a request"""
now = time.time()
# Check if we're in a blocked state
if now < self.blocked_until:
return False
return (now - self.last_request_time) >= self.min_interval
def record_request(self, success: bool = True):
"""Record that a request was made"""
self.last_request_time = time.time()
if success:
self.consecutive_errors = 0
else:
self.consecutive_errors += 1
# Exponential backoff for errors
if self.consecutive_errors >= 3:
backoff_time = min(300, 10 * (2 ** (self.consecutive_errors - 3))) # Max 5 min
self.blocked_until = time.time() + backoff_time
logger.warning(f"Rate limiter blocked for {backoff_time}s after {self.consecutive_errors} errors")
def get_wait_time(self) -> float:
"""Get time to wait before next request"""
now = time.time()
# Check if blocked
if now < self.blocked_until:
return self.blocked_until - now
time_since_last = now - self.last_request_time
if time_since_last < self.min_interval:
return self.min_interval - time_since_last
return 0.0
class ExchangeType(Enum):
BINANCE = "binance"
COINBASE = "coinbase"
@ -125,13 +172,16 @@ class MultiExchangeCOBProvider:
self.bucket_update_frequency = 100 # ms
self.consolidation_frequency = 100 # ms
# REST API configuration for deep order book
self.rest_api_frequency = 1000 # ms - full snapshot every 1 second
self.rest_depth_limit = 500 # Increased from 100 to 500 levels via REST for maximum depth
# REST API configuration for deep order book - REDUCED to prevent 418 errors
self.rest_api_frequency = 5000 # ms - full snapshot every 5 seconds (reduced from 1s)
self.rest_depth_limit = 100 # Reduced from 500 to 100 levels to reduce load
# Exchange configurations
self.exchange_configs = self._initialize_exchange_configs()
# Rate limiter for REST API calls
self.rest_rate_limiter = SimpleRateLimiter(requests_per_second=2.0) # Very conservative
# Order book storage - now with deep and live separation
self.exchange_order_books = {
symbol: {
@ -291,7 +341,7 @@ class MultiExchangeCOBProvider:
return configs
async def start_streaming(self):
"""Start real-time order book streaming from all configured exchanges"""
"""Start real-time order book streaming from all configured exchanges using only WebSocket"""
logger.info(f"Starting COB streaming for symbols: {self.symbols}")
self.is_streaming = True
@ -303,21 +353,32 @@ class MultiExchangeCOBProvider:
for symbol in self.symbols:
for exchange_name, config in self.exchange_configs.items():
if config.enabled and exchange_name in self.active_exchanges:
# Start WebSocket stream
tasks.append(self._stream_exchange_orderbook(exchange_name, symbol))
# Start deep order book (REST API) stream
tasks.append(self._stream_deep_orderbook(exchange_name, symbol))
# Start trade stream (for SVP)
if exchange_name == 'binance': # Only Binance for now
if exchange_name == 'binance':
# Enhanced Binance WebSocket streams (NO REST API)
# 1. Partial depth stream (20 levels, 100ms updates) - for real-time updates
tasks.append(self._stream_binance_orderbook(symbol, config))
# 2. Full depth stream (1000 levels, 1000ms updates) - replaces REST API
tasks.append(self._stream_binance_full_depth(symbol))
# 3. Trade stream for order flow analysis
tasks.append(self._stream_binance_trades(symbol))
# 4. Book ticker stream for best bid/ask real-time
tasks.append(self._stream_binance_book_ticker(symbol))
# 5. Aggregate trade stream for large order detection
tasks.append(self._stream_binance_agg_trades(symbol))
else:
# Other exchanges - WebSocket only
tasks.append(self._stream_exchange_orderbook(exchange_name, symbol))
# Start continuous consolidation and bucket updates
tasks.append(self._continuous_consolidation())
tasks.append(self._continuous_bucket_updates())
logger.info(f"Starting {len(tasks)} COB streaming tasks")
logger.info(f"Starting {len(tasks)} COB streaming tasks (WebSocket only - NO REST API)")
await asyncio.gather(*tasks)
async def _setup_http_session(self):
@ -371,11 +432,19 @@ class MultiExchangeCOBProvider:
await asyncio.sleep(5) # Wait 5 seconds on error
async def _fetch_binance_deep_orderbook(self, symbol: str):
"""Fetch deep order book from Binance REST API"""
"""Fetch deep order book from Binance REST API with rate limiting"""
try:
if not self.rest_session:
return
# Check rate limiter before making request
if not self.rest_rate_limiter.can_make_request():
wait_time = self.rest_rate_limiter.get_wait_time()
if wait_time > 0:
logger.debug(f"Rate limited, waiting {wait_time:.1f}s before {symbol} request")
await asyncio.sleep(wait_time)
return # Skip this cycle
# Convert symbol format for Binance
binance_symbol = symbol.replace('/', '').upper()
url = f"https://api.binance.com/api/v3/depth"
@ -384,10 +453,21 @@ class MultiExchangeCOBProvider:
'limit': self.rest_depth_limit
}
async with self.rest_session.get(url, params=params) as response:
# Add headers to reduce detection
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
'Accept': 'application/json'
}
async with self.rest_session.get(url, params=params, headers=headers) as response:
if response.status == 200:
data = await response.json()
await self._process_binance_deep_orderbook(symbol, data)
self.rest_rate_limiter.record_request() # Record successful request
elif response.status in [418, 429, 451]:
logger.warning(f"Binance REST API rate limited (HTTP {response.status}) for {symbol}")
# Increase wait time for next request
await asyncio.sleep(10) # Wait 10 seconds on rate limit
else:
logger.error(f"Binance REST API error {response.status} for {symbol}")
@ -1571,4 +1651,262 @@ class MultiExchangeCOBProvider:
return self.realtime_stats.get(symbol, {})
except Exception as e:
logger.error(f"Error getting real-time stats for {symbol}: {e}")
return {}
return {}
async def _stream_binance_full_depth(self, symbol: str):
"""Stream full depth order book from Binance WebSocket (replaces REST API)"""
try:
binance_symbol = symbol.replace('/', '').upper()
# Full depth stream with 1000 levels, updated every 1000ms
ws_url = f"wss://stream.binance.com:9443/ws/{binance_symbol.lower()}@depth@1000ms"
logger.info(f"Connecting to Binance full depth WebSocket: {ws_url}")
if websockets is None or websockets_connect is None:
raise ImportError("websockets module not available")
async with websockets_connect(ws_url) as websocket:
logger.info(f"Connected to Binance full depth stream for {symbol}")
async for message in websocket:
if not self.is_streaming:
break
try:
data = json.loads(message)
await self._process_binance_full_depth(symbol, data)
except json.JSONDecodeError as e:
logger.error(f"Error parsing Binance full depth message: {e}")
except Exception as e:
logger.error(f"Error processing Binance full depth: {e}")
except Exception as e:
logger.error(f"Binance full depth WebSocket error for {symbol}: {e}")
finally:
logger.info(f"Disconnected from Binance full depth stream for {symbol}")
async def _stream_binance_book_ticker(self, symbol: str):
"""Stream best bid/ask prices from Binance WebSocket"""
try:
binance_symbol = symbol.replace('/', '').upper()
ws_url = f"wss://stream.binance.com:9443/ws/{binance_symbol.lower()}@bookTicker"
logger.info(f"Connecting to Binance book ticker WebSocket: {ws_url}")
if websockets is None or websockets_connect is None:
raise ImportError("websockets module not available")
async with websockets_connect(ws_url) as websocket:
logger.info(f"Connected to Binance book ticker stream for {symbol}")
async for message in websocket:
if not self.is_streaming:
break
try:
data = json.loads(message)
await self._process_binance_book_ticker(symbol, data)
except json.JSONDecodeError as e:
logger.error(f"Error parsing Binance book ticker message: {e}")
except Exception as e:
logger.error(f"Error processing Binance book ticker: {e}")
except Exception as e:
logger.error(f"Binance book ticker WebSocket error for {symbol}: {e}")
finally:
logger.info(f"Disconnected from Binance book ticker stream for {symbol}")
async def _stream_binance_agg_trades(self, symbol: str):
"""Stream aggregated trades from Binance WebSocket for large order detection"""
try:
binance_symbol = symbol.replace('/', '').upper()
ws_url = f"wss://stream.binance.com:9443/ws/{binance_symbol.lower()}@aggTrade"
logger.info(f"Connecting to Binance aggregate trades WebSocket: {ws_url}")
if websockets is None or websockets_connect is None:
raise ImportError("websockets module not available")
async with websockets_connect(ws_url) as websocket:
logger.info(f"Connected to Binance aggregate trades stream for {symbol}")
async for message in websocket:
if not self.is_streaming:
break
try:
data = json.loads(message)
await self._process_binance_agg_trade(symbol, data)
except json.JSONDecodeError as e:
logger.error(f"Error parsing Binance agg trade message: {e}")
except Exception as e:
logger.error(f"Error processing Binance agg trade: {e}")
except Exception as e:
logger.error(f"Binance aggregate trades WebSocket error for {symbol}: {e}")
finally:
logger.info(f"Disconnected from Binance aggregate trades stream for {symbol}")
async def _process_binance_full_depth(self, symbol: str, data: Dict):
"""Process full depth order book data from WebSocket (replaces REST API)"""
try:
timestamp = datetime.now()
exchange_name = 'binance'
# Parse full depth bids and asks (up to 1000 levels)
full_bids = {}
full_asks = {}
for bid_data in data.get('bids', []):
price = float(bid_data[0])
size = float(bid_data[1])
if size > 0:
full_bids[price] = ExchangeOrderBookLevel(
exchange=exchange_name,
price=price,
size=size,
volume_usd=price * size,
orders_count=1,
side='bid',
timestamp=timestamp
)
for ask_data in data.get('asks', []):
price = float(ask_data[0])
size = float(ask_data[1])
if size > 0:
full_asks[price] = ExchangeOrderBookLevel(
exchange=exchange_name,
price=price,
size=size,
volume_usd=price * size,
orders_count=1,
side='ask',
timestamp=timestamp
)
# Update full depth storage (replaces REST API data)
async with self.data_lock:
self.exchange_order_books[symbol][exchange_name]['deep_bids'] = full_bids
self.exchange_order_books[symbol][exchange_name]['deep_asks'] = full_asks
self.exchange_order_books[symbol][exchange_name]['deep_timestamp'] = timestamp
self.exchange_order_books[symbol][exchange_name]['last_update_id'] = data.get('lastUpdateId')
logger.debug(f"Updated full depth via WebSocket for {symbol}: {len(full_bids)} bids, {len(full_asks)} asks")
except Exception as e:
logger.error(f"Error processing full depth WebSocket data for {symbol}: {e}")
async def _process_binance_book_ticker(self, symbol: str, data: Dict):
"""Process book ticker data for best bid/ask tracking"""
try:
timestamp = datetime.now()
best_bid_price = float(data.get('b', 0))
best_bid_qty = float(data.get('B', 0))
best_ask_price = float(data.get('a', 0))
best_ask_qty = float(data.get('A', 0))
# Store best bid/ask data
async with self.data_lock:
if symbol not in self.realtime_stats:
self.realtime_stats[symbol] = {}
self.realtime_stats[symbol].update({
'best_bid_price': best_bid_price,
'best_bid_qty': best_bid_qty,
'best_ask_price': best_ask_price,
'best_ask_qty': best_ask_qty,
'spread': best_ask_price - best_bid_price,
'mid_price': (best_bid_price + best_ask_price) / 2,
'book_ticker_timestamp': timestamp
})
logger.debug(f"Book ticker update for {symbol}: Bid {best_bid_price}@{best_bid_qty}, Ask {best_ask_price}@{best_ask_qty}")
except Exception as e:
logger.error(f"Error processing book ticker for {symbol}: {e}")
async def _process_binance_agg_trade(self, symbol: str, data: Dict):
"""Process aggregate trade data for large order detection"""
try:
timestamp = datetime.fromtimestamp(int(data['T']) / 1000)
price = float(data['p'])
quantity = float(data['q'])
is_buyer_maker = data['m']
agg_trade_id = data['a']
first_trade_id = data['f']
last_trade_id = data['l']
# Calculate trade value and size
trade_value_usd = price * quantity
trade_count = last_trade_id - first_trade_id + 1
# Detect large orders (institutional activity)
is_large_order = trade_value_usd > 10000 # $10k+ trades
is_whale_order = trade_value_usd > 100000 # $100k+ trades
agg_trade = {
'symbol': symbol,
'timestamp': timestamp,
'price': price,
'quantity': quantity,
'value_usd': trade_value_usd,
'trade_count': trade_count,
'is_buyer_maker': is_buyer_maker,
'side': 'sell' if is_buyer_maker else 'buy', # Opposite of maker
'is_large_order': is_large_order,
'is_whale_order': is_whale_order,
'agg_trade_id': agg_trade_id
}
# Add to aggregate trade tracking
await self._add_agg_trade_to_analysis(symbol, agg_trade)
# Log significant trades
if is_whale_order:
logger.info(f"WHALE ORDER detected for {symbol}: ${trade_value_usd:,.0f} {agg_trade['side'].upper()} at ${price}")
elif is_large_order:
logger.debug(f"Large order for {symbol}: ${trade_value_usd:,.0f} {agg_trade['side'].upper()}")
except Exception as e:
logger.error(f"Error processing aggregate trade for {symbol}: {e}")
async def _add_agg_trade_to_analysis(self, symbol: str, agg_trade: Dict):
"""Add aggregate trade to analysis queues"""
try:
async with self.data_lock:
# Initialize if needed
if symbol not in self.realtime_stats:
self.realtime_stats[symbol] = {}
if 'agg_trades' not in self.realtime_stats[symbol]:
self.realtime_stats[symbol]['agg_trades'] = deque(maxlen=1000)
# Add to aggregate trade history
self.realtime_stats[symbol]['agg_trades'].append(agg_trade)
# Update real-time aggregate statistics
recent_trades = list(self.realtime_stats[symbol]['agg_trades'])[-100:] # Last 100 trades
if recent_trades:
total_buy_volume = sum(t['value_usd'] for t in recent_trades if t['side'] == 'buy')
total_sell_volume = sum(t['value_usd'] for t in recent_trades if t['side'] == 'sell')
total_volume = total_buy_volume + total_sell_volume
large_buy_count = sum(1 for t in recent_trades if t['side'] == 'buy' and t['is_large_order'])
large_sell_count = sum(1 for t in recent_trades if t['side'] == 'sell' and t['is_large_order'])
whale_buy_count = sum(1 for t in recent_trades if t['side'] == 'buy' and t['is_whale_order'])
whale_sell_count = sum(1 for t in recent_trades if t['side'] == 'sell' and t['is_whale_order'])
# Calculate order flow metrics
self.realtime_stats[symbol].update({
'buy_sell_ratio': total_buy_volume / total_sell_volume if total_sell_volume > 0 else float('inf'),
'total_volume_100': total_volume,
'large_order_ratio': (large_buy_count + large_sell_count) / len(recent_trades),
'whale_activity': whale_buy_count + whale_sell_count,
'institutional_flow': 'BULLISH' if total_buy_volume > total_sell_volume * 1.2 else 'BEARISH' if total_sell_volume > total_buy_volume * 1.2 else 'NEUTRAL'
})
except Exception as e:
logger.error(f"Error adding aggregate trade to analysis for {symbol}: {e}")