integratoin fixes - COB and CNN
This commit is contained in:
@ -38,6 +38,20 @@
|
|||||||
- Ensure thread-safe access to multi-rate data streams
|
- Ensure thread-safe access to multi-rate data streams
|
||||||
- _Requirements: 1.6, 8.5_
|
- _Requirements: 1.6, 8.5_
|
||||||
|
|
||||||
|
- [ ] 1.5. Fix WebSocket COB data processing errors
|
||||||
|
- Fix 'NoneType' object has no attribute 'append' errors in COB data processing
|
||||||
|
- Ensure proper initialization of data structures in MultiExchangeCOBProvider
|
||||||
|
- Add validation and defensive checks before accessing data structures
|
||||||
|
- Implement proper error handling for WebSocket data processing
|
||||||
|
- _Requirements: 1.1, 1.6, 8.5_
|
||||||
|
|
||||||
|
- [ ] 1.6. Enhance error handling in COB data processing
|
||||||
|
- Add validation for incoming WebSocket data
|
||||||
|
- Implement reconnection logic with exponential backoff
|
||||||
|
- Add detailed logging for debugging COB data issues
|
||||||
|
- Ensure system continues operation with last valid data during failures
|
||||||
|
- _Requirements: 1.6, 8.5_
|
||||||
|
|
||||||
## Enhanced CNN Model Implementation
|
## Enhanced CNN Model Implementation
|
||||||
|
|
||||||
- [ ] 2. Enhance the existing CNN model with standardized inputs/outputs
|
- [ ] 2. Enhance the existing CNN model with standardized inputs/outputs
|
||||||
|
293
.kiro/specs/websocket-cob-data-fix/design.md
Normal file
293
.kiro/specs/websocket-cob-data-fix/design.md
Normal file
@ -0,0 +1,293 @@
|
|||||||
|
# WebSocket COB Data Fix Design Document
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This design document outlines the approach to fix the WebSocket COB (Change of Basis) data processing issue in the trading system. The current implementation is failing with `'NoneType' object has no attribute 'append'` errors for both BTC/USDT and ETH/USDT pairs, which indicates that a data structure expected to be a list is actually None. This issue is preventing the dashboard from functioning properly and needs to be addressed to ensure reliable real-time market data processing.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
The COB data processing pipeline involves several components:
|
||||||
|
|
||||||
|
1. **MultiExchangeCOBProvider**: Collects order book data from exchanges via WebSockets
|
||||||
|
2. **StandardizedDataProvider**: Extends DataProvider with standardized BaseDataInput functionality
|
||||||
|
3. **Dashboard Components**: Display COB data in the UI
|
||||||
|
|
||||||
|
The error occurs during WebSocket data processing, specifically when trying to append data to a collection that hasn't been properly initialized. The fix will focus on ensuring proper initialization of data structures and implementing robust error handling.
|
||||||
|
|
||||||
|
## Components and Interfaces
|
||||||
|
|
||||||
|
### 1. MultiExchangeCOBProvider
|
||||||
|
|
||||||
|
The `MultiExchangeCOBProvider` class is responsible for collecting order book data from exchanges and distributing it to subscribers. The issue appears to be in the WebSocket data processing logic, where data structures may not be properly initialized before use.
|
||||||
|
|
||||||
|
#### Key Issues to Address
|
||||||
|
|
||||||
|
1. **Data Structure Initialization**: Ensure all data structures (particularly collections that will have `append` called on them) are properly initialized during object creation.
|
||||||
|
2. **Subscriber Notification**: Fix the `_notify_cob_subscribers` method to handle edge cases and ensure data is properly formatted before notification.
|
||||||
|
3. **WebSocket Processing**: Enhance error handling in WebSocket processing methods to prevent cascading failures.
|
||||||
|
|
||||||
|
#### Implementation Details
|
||||||
|
|
||||||
|
```python
|
||||||
|
class MultiExchangeCOBProvider:
|
||||||
|
def __init__(self, symbols: List[str], exchange_configs: Dict[str, ExchangeConfig]):
|
||||||
|
# Existing initialization code...
|
||||||
|
|
||||||
|
# Ensure all data structures are properly initialized
|
||||||
|
self.cob_data_cache = {} # Cache for COB data
|
||||||
|
self.cob_subscribers = [] # List of callback functions
|
||||||
|
self.exchange_order_books = {}
|
||||||
|
self.session_trades = {}
|
||||||
|
self.svp_cache = {}
|
||||||
|
|
||||||
|
# Initialize data structures for each symbol
|
||||||
|
for symbol in symbols:
|
||||||
|
self.cob_data_cache[symbol] = {}
|
||||||
|
self.exchange_order_books[symbol] = {}
|
||||||
|
self.session_trades[symbol] = []
|
||||||
|
self.svp_cache[symbol] = {}
|
||||||
|
|
||||||
|
# Initialize exchange-specific data structures
|
||||||
|
for exchange_name in self.active_exchanges:
|
||||||
|
self.exchange_order_books[symbol][exchange_name] = {
|
||||||
|
'bids': {},
|
||||||
|
'asks': {},
|
||||||
|
'deep_bids': {},
|
||||||
|
'deep_asks': {},
|
||||||
|
'timestamp': datetime.now(),
|
||||||
|
'deep_timestamp': datetime.now(),
|
||||||
|
'connected': False,
|
||||||
|
'last_update_id': 0
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Multi-exchange COB provider initialized for symbols: {symbols}")
|
||||||
|
|
||||||
|
async def _notify_cob_subscribers(self, symbol: str, cob_snapshot: Dict):
|
||||||
|
"""Notify all subscribers of COB data updates with improved error handling"""
|
||||||
|
try:
|
||||||
|
if not cob_snapshot:
|
||||||
|
logger.warning(f"Attempted to notify subscribers with empty COB snapshot for {symbol}")
|
||||||
|
return
|
||||||
|
|
||||||
|
for callback in self.cob_subscribers:
|
||||||
|
try:
|
||||||
|
if asyncio.iscoroutinefunction(callback):
|
||||||
|
await callback(symbol, cob_snapshot)
|
||||||
|
else:
|
||||||
|
callback(symbol, cob_snapshot)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in COB subscriber callback: {e}", exc_info=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error notifying COB subscribers: {e}", exc_info=True)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. StandardizedDataProvider
|
||||||
|
|
||||||
|
The `StandardizedDataProvider` class extends the base `DataProvider` with standardized data input functionality. It needs to properly handle COB data and ensure all data structures are initialized.
|
||||||
|
|
||||||
|
#### Key Issues to Address
|
||||||
|
|
||||||
|
1. **COB Data Handling**: Ensure proper initialization and validation of COB data structures.
|
||||||
|
2. **Error Handling**: Improve error handling when processing COB data.
|
||||||
|
3. **Data Structure Consistency**: Maintain consistent data structures throughout the processing pipeline.
|
||||||
|
|
||||||
|
#### Implementation Details
|
||||||
|
|
||||||
|
```python
|
||||||
|
class StandardizedDataProvider(DataProvider):
|
||||||
|
def __init__(self, symbols: List[str] = None, timeframes: List[str] = None):
|
||||||
|
"""Initialize the standardized data provider with proper data structure initialization"""
|
||||||
|
super().__init__(symbols, timeframes)
|
||||||
|
|
||||||
|
# Standardized data storage
|
||||||
|
self.base_data_cache = {} # {symbol: BaseDataInput}
|
||||||
|
self.cob_data_cache = {} # {symbol: COBData}
|
||||||
|
|
||||||
|
# Model output management with extensible storage
|
||||||
|
self.model_output_manager = ModelOutputManager(
|
||||||
|
cache_dir=str(self.cache_dir / "model_outputs"),
|
||||||
|
max_history=1000
|
||||||
|
)
|
||||||
|
|
||||||
|
# COB moving averages calculation
|
||||||
|
self.cob_imbalance_history = {} # {symbol: deque of (timestamp, imbalance_data)}
|
||||||
|
self.ma_calculation_lock = Lock()
|
||||||
|
|
||||||
|
# Initialize caches for each symbol
|
||||||
|
for symbol in self.symbols:
|
||||||
|
self.base_data_cache[symbol] = None
|
||||||
|
self.cob_data_cache[symbol] = None
|
||||||
|
self.cob_imbalance_history[symbol] = deque(maxlen=300) # 5 minutes of 1s data
|
||||||
|
|
||||||
|
# COB provider integration
|
||||||
|
self.cob_provider = None
|
||||||
|
self._initialize_cob_provider()
|
||||||
|
|
||||||
|
logger.info("StandardizedDataProvider initialized with BaseDataInput support")
|
||||||
|
|
||||||
|
def _process_cob_data(self, symbol: str, cob_snapshot: Dict):
|
||||||
|
"""Process COB data with improved error handling"""
|
||||||
|
try:
|
||||||
|
if not cob_snapshot:
|
||||||
|
logger.warning(f"Received empty COB snapshot for {symbol}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Process COB data and update caches
|
||||||
|
# ...
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error processing COB data for {symbol}: {e}", exc_info=True)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. WebSocket COB Data Processing
|
||||||
|
|
||||||
|
The WebSocket COB data processing logic needs to be enhanced to handle edge cases and ensure proper data structure initialization.
|
||||||
|
|
||||||
|
#### Key Issues to Address
|
||||||
|
|
||||||
|
1. **WebSocket Connection Management**: Improve connection management to handle disconnections gracefully.
|
||||||
|
2. **Data Processing**: Ensure data is properly validated before processing.
|
||||||
|
3. **Error Recovery**: Implement recovery mechanisms for WebSocket failures.
|
||||||
|
|
||||||
|
#### Implementation Details
|
||||||
|
|
||||||
|
```python
|
||||||
|
async def _stream_binance_orderbook(self, symbol: str, config: ExchangeConfig):
|
||||||
|
"""Stream order book data from Binance with improved error handling"""
|
||||||
|
reconnect_delay = 1 # Start with 1 second delay
|
||||||
|
max_reconnect_delay = 60 # Maximum delay of 60 seconds
|
||||||
|
|
||||||
|
while self.is_streaming:
|
||||||
|
try:
|
||||||
|
ws_url = f"{config.websocket_url}{config.symbols_mapping[symbol].lower()}@depth20@100ms"
|
||||||
|
logger.info(f"Connecting to Binance WebSocket: {ws_url}")
|
||||||
|
|
||||||
|
if websockets is None or websockets_connect is None:
|
||||||
|
raise ImportError("websockets module not available")
|
||||||
|
|
||||||
|
async with websockets_connect(ws_url) as websocket:
|
||||||
|
# Ensure data structures are initialized
|
||||||
|
if symbol not in self.exchange_order_books:
|
||||||
|
self.exchange_order_books[symbol] = {}
|
||||||
|
|
||||||
|
if 'binance' not in self.exchange_order_books[symbol]:
|
||||||
|
self.exchange_order_books[symbol]['binance'] = {
|
||||||
|
'bids': {},
|
||||||
|
'asks': {},
|
||||||
|
'deep_bids': {},
|
||||||
|
'deep_asks': {},
|
||||||
|
'timestamp': datetime.now(),
|
||||||
|
'deep_timestamp': datetime.now(),
|
||||||
|
'connected': False,
|
||||||
|
'last_update_id': 0
|
||||||
|
}
|
||||||
|
|
||||||
|
self.exchange_order_books[symbol]['binance']['connected'] = True
|
||||||
|
logger.info(f"Connected to Binance order book stream for {symbol}")
|
||||||
|
|
||||||
|
# Reset reconnect delay on successful connection
|
||||||
|
reconnect_delay = 1
|
||||||
|
|
||||||
|
async for message in websocket:
|
||||||
|
if not self.is_streaming:
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = json.loads(message)
|
||||||
|
await self._process_binance_orderbook(symbol, data)
|
||||||
|
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
logger.error(f"Error parsing Binance message: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error processing Binance data: {e}", exc_info=True)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Binance WebSocket error for {symbol}: {e}", exc_info=True)
|
||||||
|
|
||||||
|
# Mark as disconnected
|
||||||
|
if symbol in self.exchange_order_books and 'binance' in self.exchange_order_books[symbol]:
|
||||||
|
self.exchange_order_books[symbol]['binance']['connected'] = False
|
||||||
|
|
||||||
|
# Implement exponential backoff for reconnection
|
||||||
|
logger.info(f"Reconnecting to Binance WebSocket for {symbol} in {reconnect_delay}s")
|
||||||
|
await asyncio.sleep(reconnect_delay)
|
||||||
|
reconnect_delay = min(reconnect_delay * 2, max_reconnect_delay)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Data Models
|
||||||
|
|
||||||
|
The data models remain unchanged, but we need to ensure they are properly initialized and validated throughout the system.
|
||||||
|
|
||||||
|
### COBSnapshot
|
||||||
|
|
||||||
|
```python
|
||||||
|
@dataclass
|
||||||
|
class COBSnapshot:
|
||||||
|
"""Complete Consolidated Order Book snapshot"""
|
||||||
|
symbol: str
|
||||||
|
timestamp: datetime
|
||||||
|
consolidated_bids: List[ConsolidatedOrderBookLevel]
|
||||||
|
consolidated_asks: List[ConsolidatedOrderBookLevel]
|
||||||
|
exchanges_active: List[str]
|
||||||
|
volume_weighted_mid: float
|
||||||
|
total_bid_liquidity: float
|
||||||
|
total_ask_liquidity: float
|
||||||
|
spread_bps: float
|
||||||
|
liquidity_imbalance: float
|
||||||
|
price_buckets: Dict[str, Dict[str, float]] # Fine-grain volume buckets
|
||||||
|
```
|
||||||
|
|
||||||
|
## Error Handling
|
||||||
|
|
||||||
|
### WebSocket Connection Errors
|
||||||
|
|
||||||
|
- Implement exponential backoff for reconnection attempts
|
||||||
|
- Log detailed error information
|
||||||
|
- Maintain system operation with last valid data
|
||||||
|
|
||||||
|
### Data Processing Errors
|
||||||
|
|
||||||
|
- Validate data before processing
|
||||||
|
- Handle edge cases gracefully
|
||||||
|
- Log detailed error information
|
||||||
|
- Continue operation with last valid data
|
||||||
|
|
||||||
|
### Subscriber Notification Errors
|
||||||
|
|
||||||
|
- Catch and log errors in subscriber callbacks
|
||||||
|
- Prevent errors in one subscriber from affecting others
|
||||||
|
- Ensure data is properly formatted before notification
|
||||||
|
|
||||||
|
## Testing Strategy
|
||||||
|
|
||||||
|
### Unit Testing
|
||||||
|
|
||||||
|
- Test data structure initialization
|
||||||
|
- Test error handling in WebSocket processing
|
||||||
|
- Test subscriber notification with various edge cases
|
||||||
|
|
||||||
|
### Integration Testing
|
||||||
|
|
||||||
|
- Test end-to-end COB data flow
|
||||||
|
- Test recovery from WebSocket disconnections
|
||||||
|
- Test handling of malformed data
|
||||||
|
|
||||||
|
### System Testing
|
||||||
|
|
||||||
|
- Test dashboard operation with COB data
|
||||||
|
- Test system stability under high load
|
||||||
|
- Test recovery from various failure scenarios
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
1. Fix data structure initialization in `MultiExchangeCOBProvider`
|
||||||
|
2. Enhance error handling in WebSocket processing
|
||||||
|
3. Improve subscriber notification logic
|
||||||
|
4. Update `StandardizedDataProvider` to properly handle COB data
|
||||||
|
5. Add comprehensive logging for debugging
|
||||||
|
6. Implement recovery mechanisms for WebSocket failures
|
||||||
|
7. Test all changes thoroughly
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
This design addresses the WebSocket COB data processing issue by ensuring proper initialization of data structures, implementing robust error handling, and adding recovery mechanisms for WebSocket failures. These changes will improve the reliability and stability of the trading system, allowing traders to monitor market data in real-time without interruptions.
|
43
.kiro/specs/websocket-cob-data-fix/requirements.md
Normal file
43
.kiro/specs/websocket-cob-data-fix/requirements.md
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
# Requirements Document
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
The WebSocket COB Data Fix is needed to address a critical issue in the trading system where WebSocket COB (Change of Basis) data processing is failing with the error `'NoneType' object has no attribute 'append'`. This error is occurring for both BTC/USDT and ETH/USDT pairs and is preventing the dashboard from functioning properly. The fix will ensure proper initialization and handling of data structures in the COB data processing pipeline.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
### Requirement 1: Fix WebSocket COB Data Processing
|
||||||
|
|
||||||
|
**User Story:** As a trader, I want the WebSocket COB data processing to work reliably without errors, so that I can monitor market data in real-time and make informed trading decisions.
|
||||||
|
|
||||||
|
#### Acceptance Criteria
|
||||||
|
|
||||||
|
1. WHEN WebSocket COB data is received for any trading pair THEN the system SHALL process it without throwing 'NoneType' object has no attribute 'append' errors
|
||||||
|
2. WHEN the dashboard is started THEN all data structures for COB processing SHALL be properly initialized
|
||||||
|
3. WHEN COB data is processed THEN the system SHALL handle edge cases such as missing or incomplete data gracefully
|
||||||
|
4. WHEN a WebSocket connection is established THEN the system SHALL verify that all required data structures are initialized before processing data
|
||||||
|
5. WHEN COB data is being processed THEN the system SHALL log appropriate debug information to help diagnose any issues
|
||||||
|
|
||||||
|
### Requirement 2: Ensure Data Structure Consistency
|
||||||
|
|
||||||
|
**User Story:** As a system administrator, I want consistent data structures throughout the COB processing pipeline, so that data can flow smoothly between components without errors.
|
||||||
|
|
||||||
|
#### Acceptance Criteria
|
||||||
|
|
||||||
|
1. WHEN the multi_exchange_cob_provider initializes THEN it SHALL properly initialize all required data structures
|
||||||
|
2. WHEN the standardized_data_provider receives COB data THEN it SHALL validate the data structure before processing
|
||||||
|
3. WHEN COB data is passed between components THEN the system SHALL ensure type consistency
|
||||||
|
4. WHEN new COB data arrives THEN the system SHALL update the data structures atomically to prevent race conditions
|
||||||
|
5. WHEN a component subscribes to COB updates THEN the system SHALL verify the subscriber can handle the data format
|
||||||
|
|
||||||
|
### Requirement 3: Improve Error Handling and Recovery
|
||||||
|
|
||||||
|
**User Story:** As a system operator, I want robust error handling and recovery mechanisms in the COB data processing pipeline, so that temporary failures don't cause the entire system to crash.
|
||||||
|
|
||||||
|
#### Acceptance Criteria
|
||||||
|
|
||||||
|
1. WHEN an error occurs in COB data processing THEN the system SHALL log detailed error information
|
||||||
|
2. WHEN a WebSocket connection fails THEN the system SHALL attempt to reconnect automatically
|
||||||
|
3. WHEN data processing fails THEN the system SHALL continue operation with the last valid data
|
||||||
|
4. WHEN the system recovers from an error THEN it SHALL restore normal operation without manual intervention
|
||||||
|
5. WHEN multiple consecutive errors occur THEN the system SHALL implement exponential backoff to prevent overwhelming the system
|
115
.kiro/specs/websocket-cob-data-fix/tasks.md
Normal file
115
.kiro/specs/websocket-cob-data-fix/tasks.md
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
# Implementation Plan
|
||||||
|
|
||||||
|
- [ ] 1. Fix data structure initialization in MultiExchangeCOBProvider
|
||||||
|
- Ensure all collections are properly initialized during object creation
|
||||||
|
- Add defensive checks before accessing data structures
|
||||||
|
- Implement proper initialization for symbol-specific data structures
|
||||||
|
- _Requirements: 1.1, 1.2, 2.1_
|
||||||
|
|
||||||
|
- [ ] 1.1. Update MultiExchangeCOBProvider constructor
|
||||||
|
- Modify __init__ method to properly initialize all data structures
|
||||||
|
- Ensure exchange_order_books is initialized for each symbol and exchange
|
||||||
|
- Initialize session_trades and svp_cache for each symbol
|
||||||
|
- Add defensive checks to prevent NoneType errors
|
||||||
|
- _Requirements: 1.2, 2.1_
|
||||||
|
|
||||||
|
- [ ] 1.2. Fix _notify_cob_subscribers method
|
||||||
|
- Add validation to ensure cob_snapshot is not None before processing
|
||||||
|
- Add defensive checks before accessing cob_snapshot attributes
|
||||||
|
- Improve error handling for subscriber callbacks
|
||||||
|
- Add detailed logging for debugging
|
||||||
|
- _Requirements: 1.1, 1.5, 2.3_
|
||||||
|
|
||||||
|
- [ ] 2. Enhance WebSocket data processing in MultiExchangeCOBProvider
|
||||||
|
- Improve error handling in WebSocket connection methods
|
||||||
|
- Add validation for incoming data
|
||||||
|
- Implement reconnection logic with exponential backoff
|
||||||
|
- _Requirements: 1.3, 1.4, 3.1, 3.2_
|
||||||
|
|
||||||
|
- [ ] 2.1. Update _stream_binance_orderbook method
|
||||||
|
- Add data structure initialization checks
|
||||||
|
- Implement exponential backoff for reconnection attempts
|
||||||
|
- Add detailed error logging
|
||||||
|
- Ensure proper cleanup on disconnection
|
||||||
|
- _Requirements: 1.4, 3.2, 3.4_
|
||||||
|
|
||||||
|
- [ ] 2.2. Fix _process_binance_orderbook method
|
||||||
|
- Add validation for incoming data
|
||||||
|
- Ensure data structures exist before updating
|
||||||
|
- Add defensive checks to prevent NoneType errors
|
||||||
|
- Improve error handling and logging
|
||||||
|
- _Requirements: 1.1, 1.3, 3.1_
|
||||||
|
|
||||||
|
- [ ] 3. Update StandardizedDataProvider to handle COB data properly
|
||||||
|
- Improve initialization of COB-related data structures
|
||||||
|
- Add validation for COB data
|
||||||
|
- Enhance error handling for COB data processing
|
||||||
|
- _Requirements: 1.3, 2.2, 2.3_
|
||||||
|
|
||||||
|
- [ ] 3.1. Fix _get_cob_data method
|
||||||
|
- Add validation for COB provider availability
|
||||||
|
- Ensure proper initialization of COB data structures
|
||||||
|
- Add defensive checks to prevent NoneType errors
|
||||||
|
- Improve error handling and logging
|
||||||
|
- _Requirements: 1.3, 2.2, 3.3_
|
||||||
|
|
||||||
|
- [ ] 3.2. Update _calculate_cob_moving_averages method
|
||||||
|
- Add validation for input data
|
||||||
|
- Ensure proper initialization of moving average data structures
|
||||||
|
- Add defensive checks to prevent NoneType errors
|
||||||
|
- Improve error handling for edge cases
|
||||||
|
- _Requirements: 1.3, 2.2, 3.3_
|
||||||
|
|
||||||
|
- [ ] 4. Implement recovery mechanisms for WebSocket failures
|
||||||
|
- Add state tracking for WebSocket connections
|
||||||
|
- Implement automatic reconnection with exponential backoff
|
||||||
|
- Add fallback mechanisms for temporary failures
|
||||||
|
- _Requirements: 3.2, 3.3, 3.4_
|
||||||
|
|
||||||
|
- [ ] 4.1. Add connection state management
|
||||||
|
- Track connection state for each WebSocket
|
||||||
|
- Implement health check mechanism
|
||||||
|
- Add reconnection logic based on connection state
|
||||||
|
- _Requirements: 3.2, 3.4_
|
||||||
|
|
||||||
|
- [ ] 4.2. Implement data recovery mechanisms
|
||||||
|
- Add caching for last valid data
|
||||||
|
- Implement fallback to cached data during connection issues
|
||||||
|
- Add mechanism to rebuild state after reconnection
|
||||||
|
- _Requirements: 3.3, 3.4_
|
||||||
|
|
||||||
|
- [ ] 5. Add comprehensive logging for debugging
|
||||||
|
- Add detailed logging throughout the COB processing pipeline
|
||||||
|
- Include context information in log messages
|
||||||
|
- Add performance metrics logging
|
||||||
|
- _Requirements: 1.5, 3.1_
|
||||||
|
|
||||||
|
- [ ] 5.1. Enhance logging in MultiExchangeCOBProvider
|
||||||
|
- Add detailed logging for WebSocket connections
|
||||||
|
- Log data processing steps and outcomes
|
||||||
|
- Add performance metrics for data processing
|
||||||
|
- _Requirements: 1.5, 3.1_
|
||||||
|
|
||||||
|
- [ ] 5.2. Add logging in StandardizedDataProvider
|
||||||
|
- Log COB data processing steps
|
||||||
|
- Add validation logging
|
||||||
|
- Include performance metrics for data processing
|
||||||
|
- _Requirements: 1.5, 3.1_
|
||||||
|
|
||||||
|
- [ ] 6. Test all changes thoroughly
|
||||||
|
- Write unit tests for fixed components
|
||||||
|
- Test integration between components
|
||||||
|
- Verify dashboard operation with COB data
|
||||||
|
- _Requirements: 1.1, 2.3, 3.4_
|
||||||
|
|
||||||
|
- [ ] 6.1. Write unit tests for MultiExchangeCOBProvider
|
||||||
|
- Test data structure initialization
|
||||||
|
- Test WebSocket processing with mock data
|
||||||
|
- Test error handling and recovery
|
||||||
|
- _Requirements: 1.1, 1.3, 3.1_
|
||||||
|
|
||||||
|
- [ ] 6.2. Test integration with dashboard
|
||||||
|
- Verify COB data display in dashboard
|
||||||
|
- Test system stability under load
|
||||||
|
- Verify recovery from failures
|
||||||
|
- _Requirements: 1.1, 3.3, 3.4_
|
@ -12,7 +12,7 @@ PyTorch implementation only.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Import core models
|
# Import core models
|
||||||
from NN.models.dqn_agent import DQNAgent, MassiveRLNetwork
|
from NN.models.dqn_agent import DQNAgent
|
||||||
from NN.models.cob_rl_model import COBRLModelInterface
|
from NN.models.cob_rl_model import COBRLModelInterface
|
||||||
from NN.models.advanced_transformer_trading import AdvancedTradingTransformer, TradingTransformerConfig
|
from NN.models.advanced_transformer_trading import AdvancedTradingTransformer, TradingTransformerConfig
|
||||||
from NN.models.standardized_cnn import StandardizedCNN # Use the unified CNN model
|
from NN.models.standardized_cnn import StandardizedCNN # Use the unified CNN model
|
||||||
@ -23,5 +23,5 @@ from NN.models.model_interfaces import ModelInterface, CNNModelInterface, RLAgen
|
|||||||
# Export the unified StandardizedCNN as CNNModel for compatibility
|
# Export the unified StandardizedCNN as CNNModel for compatibility
|
||||||
CNNModel = StandardizedCNN
|
CNNModel = StandardizedCNN
|
||||||
|
|
||||||
__all__ = ['CNNModel', 'StandardizedCNN', 'DQNAgent', 'MassiveRLNetwork', 'COBRLModelInterface', 'AdvancedTradingTransformer', 'TradingTransformerConfig',
|
__all__ = ['CNNModel', 'StandardizedCNN', 'DQNAgent', 'COBRLModelInterface', 'AdvancedTradingTransformer', 'TradingTransformerConfig',
|
||||||
'ModelInterface', 'CNNModelInterface', 'RLAgentInterface', 'ExtremaTrainerInterface']
|
'ModelInterface', 'CNNModelInterface', 'RLAgentInterface', 'ExtremaTrainerInterface']
|
||||||
|
@ -1453,17 +1453,37 @@ class DataProvider:
|
|||||||
async def _on_enhanced_cob_data(self, symbol: str, cob_data: Dict):
|
async def _on_enhanced_cob_data(self, symbol: str, cob_data: Dict):
|
||||||
"""Handle COB data from Enhanced WebSocket"""
|
"""Handle COB data from Enhanced WebSocket"""
|
||||||
try:
|
try:
|
||||||
|
# Ensure cob_websocket_data is initialized
|
||||||
|
if not hasattr(self, 'cob_websocket_data'):
|
||||||
|
self.cob_websocket_data = {}
|
||||||
|
|
||||||
# Store the latest COB data
|
# Store the latest COB data
|
||||||
self.cob_websocket_data[symbol] = cob_data
|
self.cob_websocket_data[symbol] = cob_data
|
||||||
|
|
||||||
|
# Ensure cob_data_cache is initialized
|
||||||
|
if not hasattr(self, 'cob_data_cache'):
|
||||||
|
self.cob_data_cache = {}
|
||||||
|
|
||||||
# Update COB data cache for distribution
|
# Update COB data cache for distribution
|
||||||
binance_symbol = symbol.replace('/', '').upper()
|
binance_symbol = symbol.replace('/', '').upper()
|
||||||
if binance_symbol in self.cob_data_cache:
|
if binance_symbol not in self.cob_data_cache or self.cob_data_cache[binance_symbol] is None:
|
||||||
self.cob_data_cache[binance_symbol].append({
|
from collections import deque
|
||||||
'timestamp': cob_data.get('timestamp', datetime.now()),
|
self.cob_data_cache[binance_symbol] = deque(maxlen=300)
|
||||||
'data': cob_data,
|
|
||||||
'source': 'enhanced_websocket'
|
# Ensure the deque is properly initialized
|
||||||
})
|
if not isinstance(self.cob_data_cache[binance_symbol], deque):
|
||||||
|
from collections import deque
|
||||||
|
self.cob_data_cache[binance_symbol] = deque(maxlen=300)
|
||||||
|
|
||||||
|
self.cob_data_cache[binance_symbol].append({
|
||||||
|
'timestamp': cob_data.get('timestamp', datetime.now()),
|
||||||
|
'data': cob_data,
|
||||||
|
'source': 'enhanced_websocket'
|
||||||
|
})
|
||||||
|
|
||||||
|
# Ensure cob_data_callbacks is initialized
|
||||||
|
if not hasattr(self, 'cob_data_callbacks'):
|
||||||
|
self.cob_data_callbacks = []
|
||||||
|
|
||||||
# Distribute to COB data callbacks
|
# Distribute to COB data callbacks
|
||||||
for callback in self.cob_data_callbacks:
|
for callback in self.cob_data_callbacks:
|
||||||
@ -1472,6 +1492,13 @@ class DataProvider:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in COB data callback: {e}")
|
logger.error(f"Error in COB data callback: {e}")
|
||||||
|
|
||||||
|
# Ensure distribution_stats is initialized
|
||||||
|
if not hasattr(self, 'distribution_stats'):
|
||||||
|
self.distribution_stats = {
|
||||||
|
'total_ticks_received': 0,
|
||||||
|
'last_tick_time': {}
|
||||||
|
}
|
||||||
|
|
||||||
# Update distribution stats
|
# Update distribution stats
|
||||||
self.distribution_stats['total_ticks_received'] += 1
|
self.distribution_stats['total_ticks_received'] += 1
|
||||||
self.distribution_stats['last_tick_time'][symbol] = datetime.now()
|
self.distribution_stats['last_tick_time'][symbol] = datetime.now()
|
||||||
@ -1479,7 +1506,7 @@ class DataProvider:
|
|||||||
logger.debug(f"Enhanced COB data received for {symbol}: {len(cob_data.get('bids', []))} bids, {len(cob_data.get('asks', []))} asks")
|
logger.debug(f"Enhanced COB data received for {symbol}: {len(cob_data.get('bids', []))} bids, {len(cob_data.get('asks', []))} asks")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error handling enhanced COB data for {symbol}: {e}")
|
logger.error(f"Error handling enhanced COB data for {symbol}: {e}", exc_info=True)
|
||||||
|
|
||||||
async def _on_websocket_status_update(self, status_data: Dict):
|
async def _on_websocket_status_update(self, status_data: Dict):
|
||||||
"""Handle WebSocket status updates"""
|
"""Handle WebSocket status updates"""
|
||||||
@ -1488,12 +1515,16 @@ class DataProvider:
|
|||||||
status = status_data.get('status')
|
status = status_data.get('status')
|
||||||
message = status_data.get('message', '')
|
message = status_data.get('message', '')
|
||||||
|
|
||||||
|
# Ensure cob_websocket_status is initialized
|
||||||
|
if not hasattr(self, 'cob_websocket_status'):
|
||||||
|
self.cob_websocket_status = {}
|
||||||
|
|
||||||
if symbol:
|
if symbol:
|
||||||
self.cob_websocket_status[symbol] = status
|
self.cob_websocket_status[symbol] = status
|
||||||
logger.info(f"🔌 Enhanced WebSocket status for {symbol}: {status} - {message}")
|
logger.info(f"🔌 Enhanced WebSocket status for {symbol}: {status} - {message}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error handling WebSocket status update: {e}")
|
logger.error(f"Error handling WebSocket status update: {e}", exc_info=True)
|
||||||
|
|
||||||
async def _start_fallback_websocket_streaming(self):
|
async def _start_fallback_websocket_streaming(self):
|
||||||
"""Fallback to old WebSocket method if Enhanced COB WebSocket fails"""
|
"""Fallback to old WebSocket method if Enhanced COB WebSocket fails"""
|
||||||
@ -3499,9 +3530,14 @@ class DataProvider:
|
|||||||
# Convert datetime to timestamp
|
# Convert datetime to timestamp
|
||||||
cob_data['timestamp'] = cob_data['timestamp'].timestamp()
|
cob_data['timestamp'] = cob_data['timestamp'].timestamp()
|
||||||
|
|
||||||
# Store raw tick
|
# Store raw tick - ensure proper initialization
|
||||||
if not hasattr(self, 'cob_raw_ticks'):
|
if not hasattr(self, 'cob_raw_ticks'):
|
||||||
self.cob_raw_ticks = {'ETH/USDT': [], 'BTC/USDT': []}
|
self.cob_raw_ticks = {}
|
||||||
|
|
||||||
|
# Ensure symbol keys exist in the dictionary
|
||||||
|
for sym in ['ETH/USDT', 'BTC/USDT']:
|
||||||
|
if sym not in self.cob_raw_ticks:
|
||||||
|
self.cob_raw_ticks[sym] = []
|
||||||
|
|
||||||
# Add to raw ticks with size limit (keep last 10 seconds of data)
|
# Add to raw ticks with size limit (keep last 10 seconds of data)
|
||||||
max_ticks = 1000 # ~10 seconds at 100 updates/sec
|
max_ticks = 1000 # ~10 seconds at 100 updates/sec
|
||||||
@ -3514,6 +3550,7 @@ class DataProvider:
|
|||||||
if not hasattr(self, 'cob_data_cache'):
|
if not hasattr(self, 'cob_data_cache'):
|
||||||
self.cob_data_cache = {}
|
self.cob_data_cache = {}
|
||||||
|
|
||||||
|
# Ensure symbol key exists in the cache
|
||||||
if symbol not in self.cob_data_cache:
|
if symbol not in self.cob_data_cache:
|
||||||
self.cob_data_cache[symbol] = []
|
self.cob_data_cache[symbol] = []
|
||||||
|
|
||||||
@ -3537,7 +3574,7 @@ class DataProvider:
|
|||||||
logger.debug(f"Processed WebSocket COB tick for {symbol}: {len(cob_data.get('bids', []))} bids, {len(cob_data.get('asks', []))} asks")
|
logger.debug(f"Processed WebSocket COB tick for {symbol}: {len(cob_data.get('bids', []))} bids, {len(cob_data.get('asks', []))} asks")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error processing WebSocket COB data for {symbol}: {e}")
|
logger.error(f"Error processing WebSocket COB data for {symbol}: {e}", exc_info=True)
|
||||||
|
|
||||||
def _on_cob_websocket_status(self, status_data: dict):
|
def _on_cob_websocket_status(self, status_data: dict):
|
||||||
"""Handle WebSocket status updates"""
|
"""Handle WebSocket status updates"""
|
||||||
|
@ -1,261 +1,401 @@
|
|||||||
"""
|
"""
|
||||||
Trading Executor Fix
|
Trading Executor Fix - Addresses issues with entry/exit prices and P&L calculations
|
||||||
|
|
||||||
This module provides fixes for the trading executor to address:
|
This module provides fixes for:
|
||||||
1. Duplicate entry prices
|
1. Identical entry prices issue
|
||||||
2. P&L calculation issues
|
2. Price caching problems
|
||||||
3. Position tracking problems
|
3. Position tracking reset logic
|
||||||
|
4. Trade cooldown implementation
|
||||||
|
5. P&L calculation verification
|
||||||
|
|
||||||
Apply these fixes by importing and applying the patch in main.py
|
Apply these fixes to the TradingExecutor class to improve trade execution reliability.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime, timedelta
|
||||||
from typing import Dict, Any, Optional
|
from typing import Dict, List, Optional, Any, Union
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class TradingExecutorFix:
|
class TradingExecutorFix:
|
||||||
"""Fixes for the TradingExecutor class"""
|
"""
|
||||||
|
Fixes for the TradingExecutor class to address entry/exit price issues
|
||||||
|
and improve P&L calculation accuracy.
|
||||||
|
"""
|
||||||
|
|
||||||
@staticmethod
|
def __init__(self, trading_executor):
|
||||||
def apply_fixes(trading_executor):
|
"""
|
||||||
|
Initialize the fix with a reference to the trading executor
|
||||||
|
|
||||||
|
Args:
|
||||||
|
trading_executor: The TradingExecutor instance to fix
|
||||||
|
"""
|
||||||
|
self.trading_executor = trading_executor
|
||||||
|
|
||||||
|
# Add cooldown tracking
|
||||||
|
self.last_trade_time = {} # {symbol: timestamp}
|
||||||
|
self.min_trade_cooldown = 30 # 30 seconds minimum between trades
|
||||||
|
|
||||||
|
# Add price history for validation
|
||||||
|
self.recent_entry_prices = {} # {symbol: [recent_prices]}
|
||||||
|
self.max_price_history = 10 # Keep last 10 entry prices
|
||||||
|
|
||||||
|
# Add position reset tracking
|
||||||
|
self.position_reset_flags = {} # {symbol: bool}
|
||||||
|
|
||||||
|
# Add price update tracking
|
||||||
|
self.last_price_update = {} # {symbol: timestamp}
|
||||||
|
self.price_update_threshold = 5 # 5 seconds max since last price update
|
||||||
|
|
||||||
|
# Add P&L verification
|
||||||
|
self.trade_history = {} # {symbol: [trade_records]}
|
||||||
|
|
||||||
|
logger.info("TradingExecutorFix initialized - addressing entry/exit price issues")
|
||||||
|
|
||||||
|
def apply_fixes(self):
|
||||||
"""Apply all fixes to the trading executor"""
|
"""Apply all fixes to the trading executor"""
|
||||||
logger.info("Applying TradingExecutor fixes...")
|
self._patch_execute_action()
|
||||||
|
self._patch_close_position()
|
||||||
|
self._patch_calculate_pnl()
|
||||||
|
self._patch_update_prices()
|
||||||
|
|
||||||
# Store original methods for patching
|
logger.info("All trading executor fixes applied successfully")
|
||||||
original_execute_action = trading_executor.execute_action
|
|
||||||
original_calculate_pnl = getattr(trading_executor, '_calculate_pnl', None)
|
|
||||||
|
|
||||||
# Apply fixes
|
|
||||||
TradingExecutorFix._fix_price_caching(trading_executor)
|
|
||||||
TradingExecutorFix._fix_pnl_calculation(trading_executor, original_calculate_pnl)
|
|
||||||
TradingExecutorFix._fix_execute_action(trading_executor, original_execute_action)
|
|
||||||
TradingExecutorFix._add_trade_cooldown(trading_executor)
|
|
||||||
TradingExecutorFix._fix_position_tracking(trading_executor)
|
|
||||||
|
|
||||||
logger.info("TradingExecutor fixes applied successfully")
|
|
||||||
return trading_executor
|
|
||||||
|
|
||||||
@staticmethod
|
def _patch_execute_action(self):
|
||||||
def _fix_price_caching(trading_executor):
|
"""Patch the execute_action method to add price validation and cooldown"""
|
||||||
"""Fix price caching to prevent duplicate entry prices"""
|
original_execute_action = self.trading_executor.execute_action
|
||||||
# Add a price cache timestamp to track when prices were last updated
|
|
||||||
trading_executor.price_cache_timestamp = {}
|
|
||||||
|
|
||||||
# Store original get_current_price method
|
def execute_action_with_fixes(decision):
|
||||||
original_get_current_price = trading_executor.get_current_price
|
"""Enhanced execute_action with price validation and cooldown"""
|
||||||
|
|
||||||
def get_current_price_fixed(self, symbol):
|
|
||||||
"""Fixed get_current_price method with cache invalidation"""
|
|
||||||
now = time.time()
|
|
||||||
|
|
||||||
# Force price refresh if cache is older than 5 seconds
|
|
||||||
if symbol in self.price_cache_timestamp:
|
|
||||||
cache_age = now - self.price_cache_timestamp.get(symbol, 0)
|
|
||||||
if cache_age > 5: # 5 seconds max cache age
|
|
||||||
# Clear price cache for this symbol
|
|
||||||
if hasattr(self, 'current_prices') and symbol in self.current_prices:
|
|
||||||
del self.current_prices[symbol]
|
|
||||||
logger.debug(f"Price cache for {symbol} invalidated (age: {cache_age:.1f}s)")
|
|
||||||
|
|
||||||
# Call original method to get fresh price
|
|
||||||
price = original_get_current_price(symbol)
|
|
||||||
|
|
||||||
# Update cache timestamp
|
|
||||||
self.price_cache_timestamp[symbol] = now
|
|
||||||
|
|
||||||
return price
|
|
||||||
|
|
||||||
# Apply the patch
|
|
||||||
trading_executor.get_current_price = get_current_price_fixed.__get__(trading_executor)
|
|
||||||
logger.info("Price caching fix applied")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _fix_pnl_calculation(trading_executor, original_calculate_pnl):
|
|
||||||
"""Fix P&L calculation to ensure accuracy"""
|
|
||||||
def calculate_pnl_fixed(self, position, current_price=None):
|
|
||||||
"""Fixed P&L calculation with proper handling of position side"""
|
|
||||||
try:
|
|
||||||
# Get position details
|
|
||||||
entry_price = position.entry_price
|
|
||||||
size = position.size
|
|
||||||
side = position.side
|
|
||||||
|
|
||||||
# Use provided price or get current price
|
|
||||||
if current_price is None:
|
|
||||||
current_price = self.get_current_price(position.symbol)
|
|
||||||
|
|
||||||
# Calculate P&L based on position side
|
|
||||||
if side == 'LONG':
|
|
||||||
pnl = (current_price - entry_price) * size
|
|
||||||
else: # SHORT
|
|
||||||
pnl = (entry_price - current_price) * size
|
|
||||||
|
|
||||||
# Calculate fees (if available)
|
|
||||||
fees = getattr(position, 'fees', 0.0)
|
|
||||||
|
|
||||||
# Return both gross and net P&L
|
|
||||||
return {
|
|
||||||
'gross_pnl': pnl,
|
|
||||||
'fees': fees,
|
|
||||||
'net_pnl': pnl - fees
|
|
||||||
}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error calculating P&L: {e}")
|
|
||||||
return {'gross_pnl': 0.0, 'fees': 0.0, 'net_pnl': 0.0}
|
|
||||||
|
|
||||||
# Apply the patch if original method exists
|
|
||||||
if original_calculate_pnl:
|
|
||||||
trading_executor._calculate_pnl = calculate_pnl_fixed.__get__(trading_executor)
|
|
||||||
logger.info("P&L calculation fix applied")
|
|
||||||
else:
|
|
||||||
# Add the method if it doesn't exist
|
|
||||||
trading_executor._calculate_pnl = calculate_pnl_fixed.__get__(trading_executor)
|
|
||||||
logger.info("P&L calculation method added")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _fix_execute_action(trading_executor, original_execute_action):
|
|
||||||
"""Fix execute_action to prevent duplicate entries and ensure proper price updates"""
|
|
||||||
def execute_action_fixed(self, decision):
|
|
||||||
"""Fixed execute_action with duplicate entry prevention"""
|
|
||||||
try:
|
try:
|
||||||
symbol = decision.symbol
|
symbol = decision.symbol
|
||||||
action = decision.action
|
action = decision.action
|
||||||
|
current_time = datetime.now()
|
||||||
|
|
||||||
# Check for duplicate entry (same price as recent entry)
|
# 1. Check cooldown period
|
||||||
if hasattr(self, 'recent_entries') and symbol in self.recent_entries:
|
if symbol in self.last_trade_time:
|
||||||
recent_entry = self.recent_entries[symbol]
|
time_since_last_trade = (current_time - self.last_trade_time[symbol]).total_seconds()
|
||||||
current_price = self.get_current_price(symbol)
|
if time_since_last_trade < self.min_trade_cooldown:
|
||||||
|
logger.warning(f"Trade rejected: Cooldown period ({time_since_last_trade:.1f}s < {self.min_trade_cooldown}s) for {symbol}")
|
||||||
# If price is within 0.1% of recent entry, consider it a duplicate
|
|
||||||
price_diff_pct = abs(current_price - recent_entry['price']) / recent_entry['price'] * 100
|
|
||||||
time_diff = time.time() - recent_entry['timestamp']
|
|
||||||
|
|
||||||
if price_diff_pct < 0.1 and time_diff < 60: # Within 0.1% and 60 seconds
|
|
||||||
logger.warning(f"Preventing duplicate entry for {symbol} at ${current_price:.2f} "
|
|
||||||
f"(recent entry: ${recent_entry['price']:.2f}, {time_diff:.1f}s ago)")
|
|
||||||
|
|
||||||
# Mark decision as blocked
|
|
||||||
decision.blocked = True
|
|
||||||
decision.blocked_reason = "Duplicate entry prevention"
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Check trade cooldown
|
# 2. Validate price freshness
|
||||||
if hasattr(self, '_check_trade_cooldown'):
|
if symbol in self.last_price_update:
|
||||||
if not self._check_trade_cooldown(symbol, action):
|
time_since_update = (current_time - self.last_price_update[symbol]).total_seconds()
|
||||||
# Mark decision as blocked
|
if time_since_update > self.price_update_threshold:
|
||||||
decision.blocked = True
|
logger.warning(f"Trade rejected: Price data stale ({time_since_update:.1f}s > {self.price_update_threshold}s) for {symbol}")
|
||||||
decision.blocked_reason = "Trade cooldown active"
|
# Force price refresh
|
||||||
|
self._refresh_price(symbol)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Force price refresh before execution
|
# 3. Validate entry price against recent history
|
||||||
fresh_price = self.get_current_price(symbol)
|
current_price = self._get_current_price(symbol)
|
||||||
logger.info(f"Using fresh price for {symbol}: ${fresh_price:.2f}")
|
if symbol in self.recent_entry_prices and len(self.recent_entry_prices[symbol]) > 0:
|
||||||
|
# Check if price is identical to any recent entry
|
||||||
|
if current_price in self.recent_entry_prices[symbol]:
|
||||||
|
logger.warning(f"Trade rejected: Duplicate entry price ${current_price} for {symbol}")
|
||||||
|
return False
|
||||||
|
|
||||||
# Update decision price with fresh price
|
# 4. Ensure position is properly reset before new entry
|
||||||
decision.price = fresh_price
|
if not self._ensure_position_reset(symbol):
|
||||||
|
logger.warning(f"Trade rejected: Position not properly reset for {symbol}")
|
||||||
|
return False
|
||||||
|
|
||||||
# Call original execute_action
|
# Execute the original action
|
||||||
result = original_execute_action(decision)
|
result = original_execute_action(decision)
|
||||||
|
|
||||||
# If execution was successful, record the entry
|
# If successful, update tracking
|
||||||
if result and not getattr(decision, 'blocked', False):
|
if result:
|
||||||
if not hasattr(self, 'recent_entries'):
|
# Update cooldown timestamp
|
||||||
self.recent_entries = {}
|
self.last_trade_time[symbol] = current_time
|
||||||
|
|
||||||
self.recent_entries[symbol] = {
|
# Update price history
|
||||||
'price': fresh_price,
|
if symbol not in self.recent_entry_prices:
|
||||||
'timestamp': time.time(),
|
self.recent_entry_prices[symbol] = []
|
||||||
'action': action
|
|
||||||
}
|
|
||||||
|
|
||||||
# Record last trade time for cooldown
|
self.recent_entry_prices[symbol].append(current_price)
|
||||||
if not hasattr(self, 'last_trade_time'):
|
# Keep only the most recent prices
|
||||||
self.last_trade_time = {}
|
if len(self.recent_entry_prices[symbol]) > self.max_price_history:
|
||||||
|
self.recent_entry_prices[symbol] = self.recent_entry_prices[symbol][-self.max_price_history:]
|
||||||
|
|
||||||
self.last_trade_time[symbol] = time.time()
|
# Mark position as active
|
||||||
|
self.position_reset_flags[symbol] = False
|
||||||
|
|
||||||
|
logger.info(f"Trade executed: {action} {symbol} at ${current_price} with validation")
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in execute_action_fixed: {e}")
|
logger.error(f"Error in execute_action_with_fixes: {e}")
|
||||||
|
return original_execute_action(decision)
|
||||||
|
|
||||||
|
# Replace the original method
|
||||||
|
self.trading_executor.execute_action = execute_action_with_fixes
|
||||||
|
logger.info("Patched execute_action with price validation and cooldown")
|
||||||
|
|
||||||
|
def _patch_close_position(self):
|
||||||
|
"""Patch the close_position method to ensure proper position reset"""
|
||||||
|
original_close_position = self.trading_executor.close_position
|
||||||
|
|
||||||
|
def close_position_with_fixes(symbol, **kwargs):
|
||||||
|
"""Enhanced close_position with proper reset logic"""
|
||||||
|
try:
|
||||||
|
# Get current price for P&L verification
|
||||||
|
exit_price = self._get_current_price(symbol)
|
||||||
|
|
||||||
|
# Call original close position
|
||||||
|
result = original_close_position(symbol, **kwargs)
|
||||||
|
|
||||||
|
if result:
|
||||||
|
# Mark position as reset
|
||||||
|
self.position_reset_flags[symbol] = True
|
||||||
|
|
||||||
|
# Record trade for verification
|
||||||
|
if hasattr(self.trading_executor, 'positions') and symbol in self.trading_executor.positions:
|
||||||
|
position = self.trading_executor.positions[symbol]
|
||||||
|
|
||||||
|
# Create trade record
|
||||||
|
trade_record = {
|
||||||
|
'symbol': symbol,
|
||||||
|
'entry_time': getattr(position, 'entry_time', datetime.now()),
|
||||||
|
'exit_time': datetime.now(),
|
||||||
|
'entry_price': getattr(position, 'entry_price', 0),
|
||||||
|
'exit_price': exit_price,
|
||||||
|
'size': getattr(position, 'size', 0),
|
||||||
|
'side': getattr(position, 'side', 'UNKNOWN'),
|
||||||
|
'pnl': self._calculate_verified_pnl(position, exit_price),
|
||||||
|
'fees': getattr(position, 'fees', 0),
|
||||||
|
'hold_time_seconds': (datetime.now() - getattr(position, 'entry_time', datetime.now())).total_seconds()
|
||||||
|
}
|
||||||
|
|
||||||
|
# Store trade record
|
||||||
|
if symbol not in self.trade_history:
|
||||||
|
self.trade_history[symbol] = []
|
||||||
|
self.trade_history[symbol].append(trade_record)
|
||||||
|
|
||||||
|
logger.info(f"Position closed: {symbol} at ${exit_price} with verified P&L: ${trade_record['pnl']:.2f}")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in close_position_with_fixes: {e}")
|
||||||
|
return original_close_position(symbol, **kwargs)
|
||||||
|
|
||||||
|
# Replace the original method
|
||||||
|
self.trading_executor.close_position = close_position_with_fixes
|
||||||
|
logger.info("Patched close_position with proper reset logic")
|
||||||
|
|
||||||
|
def _patch_calculate_pnl(self):
|
||||||
|
"""Patch the calculate_pnl method to ensure accurate P&L calculation"""
|
||||||
|
original_calculate_pnl = getattr(self.trading_executor, 'calculate_pnl', None)
|
||||||
|
|
||||||
|
def calculate_pnl_with_fixes(position, current_price=None):
|
||||||
|
"""Enhanced calculate_pnl with verification"""
|
||||||
|
try:
|
||||||
|
# If no original method, implement our own
|
||||||
|
if original_calculate_pnl is None:
|
||||||
|
return self._calculate_verified_pnl(position, current_price)
|
||||||
|
|
||||||
|
# Call original method
|
||||||
|
original_pnl = original_calculate_pnl(position, current_price)
|
||||||
|
|
||||||
|
# Calculate our verified P&L
|
||||||
|
verified_pnl = self._calculate_verified_pnl(position, current_price)
|
||||||
|
|
||||||
|
# If there's a significant difference, log it
|
||||||
|
if abs(original_pnl - verified_pnl) > 0.01:
|
||||||
|
logger.warning(f"P&L calculation discrepancy: original=${original_pnl:.2f}, verified=${verified_pnl:.2f}")
|
||||||
|
# Use the verified P&L
|
||||||
|
return verified_pnl
|
||||||
|
|
||||||
|
return original_pnl
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in calculate_pnl_with_fixes: {e}")
|
||||||
|
if original_calculate_pnl:
|
||||||
|
return original_calculate_pnl(position, current_price)
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
# Replace the original method if it exists
|
||||||
|
if original_calculate_pnl:
|
||||||
|
self.trading_executor.calculate_pnl = calculate_pnl_with_fixes
|
||||||
|
logger.info("Patched calculate_pnl with verification")
|
||||||
|
else:
|
||||||
|
# Add the method if it doesn't exist
|
||||||
|
self.trading_executor.calculate_pnl = calculate_pnl_with_fixes
|
||||||
|
logger.info("Added calculate_pnl method with verification")
|
||||||
|
|
||||||
|
def _patch_update_prices(self):
|
||||||
|
"""Patch the update_prices method to track price updates"""
|
||||||
|
original_update_prices = getattr(self.trading_executor, 'update_prices', None)
|
||||||
|
|
||||||
|
def update_prices_with_tracking(prices):
|
||||||
|
"""Enhanced update_prices with timestamp tracking"""
|
||||||
|
try:
|
||||||
|
# Call original method if it exists
|
||||||
|
if original_update_prices:
|
||||||
|
result = original_update_prices(prices)
|
||||||
|
else:
|
||||||
|
# If no original method, update prices directly
|
||||||
|
if hasattr(self.trading_executor, 'current_prices'):
|
||||||
|
self.trading_executor.current_prices.update(prices)
|
||||||
|
result = True
|
||||||
|
|
||||||
|
# Track update timestamps
|
||||||
|
current_time = datetime.now()
|
||||||
|
for symbol in prices:
|
||||||
|
self.last_price_update[symbol] = current_time
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in update_prices_with_tracking: {e}")
|
||||||
|
if original_update_prices:
|
||||||
|
return original_update_prices(prices)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Apply the patch
|
# Replace the original method if it exists
|
||||||
trading_executor.execute_action = execute_action_fixed.__get__(trading_executor)
|
if original_update_prices:
|
||||||
|
self.trading_executor.update_prices = update_prices_with_tracking
|
||||||
# Initialize recent entries dict if it doesn't exist
|
logger.info("Patched update_prices with timestamp tracking")
|
||||||
if not hasattr(trading_executor, 'recent_entries'):
|
else:
|
||||||
trading_executor.recent_entries = {}
|
# Add the method if it doesn't exist
|
||||||
|
self.trading_executor.update_prices = update_prices_with_tracking
|
||||||
logger.info("Execute action fix applied")
|
logger.info("Added update_prices method with timestamp tracking")
|
||||||
|
|
||||||
@staticmethod
|
def _calculate_verified_pnl(self, position, current_price=None):
|
||||||
def _add_trade_cooldown(trading_executor):
|
"""Calculate verified P&L for a position"""
|
||||||
"""Add trade cooldown to prevent rapid consecutive trades"""
|
try:
|
||||||
# Add cooldown settings
|
# Get position details
|
||||||
trading_executor.trade_cooldown_seconds = 30 # 30 seconds between trades
|
entry_price = getattr(position, 'entry_price', 0)
|
||||||
|
size = getattr(position, 'size', 0)
|
||||||
if not hasattr(trading_executor, 'last_trade_time'):
|
side = getattr(position, 'side', 'UNKNOWN')
|
||||||
trading_executor.last_trade_time = {}
|
leverage = getattr(position, 'leverage', 1.0)
|
||||||
|
fees = getattr(position, 'fees', 0.0)
|
||||||
def check_trade_cooldown(self, symbol, action):
|
|
||||||
"""Check if trade cooldown is active for a symbol"""
|
|
||||||
if not hasattr(self, 'last_trade_time'):
|
|
||||||
self.last_trade_time = {}
|
|
||||||
return True
|
|
||||||
|
|
||||||
if symbol not in self.last_trade_time:
|
# If current_price is not provided, try to get it
|
||||||
return True
|
if current_price is None:
|
||||||
|
symbol = getattr(position, 'symbol', None)
|
||||||
|
if symbol:
|
||||||
|
current_price = self._get_current_price(symbol)
|
||||||
|
else:
|
||||||
|
return 0.0
|
||||||
|
|
||||||
# Get time since last trade
|
# Calculate P&L based on position side
|
||||||
time_since_last = time.time() - self.last_trade_time[symbol]
|
if side == 'LONG':
|
||||||
|
pnl = (current_price - entry_price) * size * leverage
|
||||||
|
elif side == 'SHORT':
|
||||||
|
pnl = (entry_price - current_price) * size * leverage
|
||||||
|
else:
|
||||||
|
pnl = 0.0
|
||||||
|
|
||||||
# Check if cooldown is still active
|
# Subtract fees for net P&L
|
||||||
if time_since_last < self.trade_cooldown_seconds:
|
net_pnl = pnl - fees
|
||||||
logger.warning(f"Trade cooldown active for {symbol}: {time_since_last:.1f}s elapsed, "
|
|
||||||
f"need {self.trade_cooldown_seconds}s")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
return net_pnl
|
||||||
|
|
||||||
# Add the method
|
except Exception as e:
|
||||||
trading_executor._check_trade_cooldown = check_trade_cooldown.__get__(trading_executor)
|
logger.error(f"Error calculating verified P&L: {e}")
|
||||||
logger.info("Trade cooldown feature added")
|
return 0.0
|
||||||
|
|
||||||
@staticmethod
|
def _get_current_price(self, symbol):
|
||||||
def _fix_position_tracking(trading_executor):
|
"""Get current price for a symbol with fallbacks"""
|
||||||
"""Fix position tracking to ensure proper reset between trades"""
|
try:
|
||||||
# Store original close_position method
|
# Try to get from trading executor
|
||||||
original_close_position = getattr(trading_executor, 'close_position', None)
|
if hasattr(self.trading_executor, 'current_prices') and symbol in self.trading_executor.current_prices:
|
||||||
|
return self.trading_executor.current_prices[symbol]
|
||||||
if original_close_position:
|
|
||||||
def close_position_fixed(self, symbol, price=None):
|
# Try to get from data provider
|
||||||
"""Fixed close_position with proper position cleanup"""
|
if hasattr(self.trading_executor, 'data_provider'):
|
||||||
try:
|
data_provider = self.trading_executor.data_provider
|
||||||
# Call original close_position
|
if hasattr(data_provider, 'get_current_price'):
|
||||||
result = original_close_position(symbol, price)
|
price = data_provider.get_current_price(symbol)
|
||||||
|
if price and price > 0:
|
||||||
# Ensure position is fully cleaned up
|
return price
|
||||||
if symbol in self.positions:
|
|
||||||
del self.positions[symbol]
|
# Try to get from COB data
|
||||||
|
if hasattr(self.trading_executor, 'latest_cob_data') and symbol in self.trading_executor.latest_cob_data:
|
||||||
# Clear recent entry for this symbol
|
cob_data = self.trading_executor.latest_cob_data[symbol]
|
||||||
if hasattr(self, 'recent_entries') and symbol in self.recent_entries:
|
if hasattr(cob_data, 'stats') and 'mid_price' in cob_data.stats:
|
||||||
del self.recent_entries[symbol]
|
return cob_data.stats['mid_price']
|
||||||
|
|
||||||
logger.info(f"Position for {symbol} fully cleaned up after close")
|
# Default fallback
|
||||||
return result
|
return 0.0
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in close_position_fixed: {e}")
|
logger.error(f"Error getting current price for {symbol}: {e}")
|
||||||
|
return 0.0
|
||||||
|
|
||||||
|
def _refresh_price(self, symbol):
|
||||||
|
"""Force a price refresh for a symbol"""
|
||||||
|
try:
|
||||||
|
# Try to refresh from data provider
|
||||||
|
if hasattr(self.trading_executor, 'data_provider'):
|
||||||
|
data_provider = self.trading_executor.data_provider
|
||||||
|
if hasattr(data_provider, 'fetch_current_price'):
|
||||||
|
price = data_provider.fetch_current_price(symbol)
|
||||||
|
if price and price > 0:
|
||||||
|
# Update trading executor price
|
||||||
|
if hasattr(self.trading_executor, 'current_prices'):
|
||||||
|
self.trading_executor.current_prices[symbol] = price
|
||||||
|
|
||||||
|
# Update timestamp
|
||||||
|
self.last_price_update[symbol] = datetime.now()
|
||||||
|
|
||||||
|
logger.info(f"Refreshed price for {symbol}: ${price:.2f}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
logger.warning(f"Failed to refresh price for {symbol}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error refreshing price for {symbol}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _ensure_position_reset(self, symbol):
|
||||||
|
"""Ensure position is properly reset before new entry"""
|
||||||
|
try:
|
||||||
|
# Check if we have an active position
|
||||||
|
if hasattr(self.trading_executor, 'positions') and symbol in self.trading_executor.positions:
|
||||||
|
# Position exists, check if it's valid
|
||||||
|
position = self.trading_executor.positions[symbol]
|
||||||
|
if position and getattr(position, 'active', False):
|
||||||
|
logger.warning(f"Position already active for {symbol}, cannot enter new position")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# Apply the patch
|
# Check reset flag
|
||||||
trading_executor.close_position = close_position_fixed.__get__(trading_executor)
|
if symbol in self.position_reset_flags and not self.position_reset_flags[symbol]:
|
||||||
logger.info("Position tracking fix applied")
|
# Force position cleanup
|
||||||
else:
|
if hasattr(self.trading_executor, 'positions'):
|
||||||
logger.warning("close_position method not found, skipping position tracking fix")
|
self.trading_executor.positions.pop(symbol, None)
|
||||||
|
|
||||||
|
logger.info(f"Forced position reset for {symbol}")
|
||||||
|
self.position_reset_flags[symbol] = True
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error ensuring position reset for {symbol}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_trade_history(self, symbol=None):
|
||||||
|
"""Get verified trade history"""
|
||||||
|
if symbol:
|
||||||
|
return self.trade_history.get(symbol, [])
|
||||||
|
return self.trade_history
|
||||||
|
|
||||||
|
def get_price_update_status(self):
|
||||||
|
"""Get price update status for all symbols"""
|
||||||
|
status = {}
|
||||||
|
current_time = datetime.now()
|
||||||
|
|
||||||
|
for symbol, timestamp in self.last_price_update.items():
|
||||||
|
time_since_update = (current_time - timestamp).total_seconds()
|
||||||
|
status[symbol] = {
|
||||||
|
'last_update': timestamp,
|
||||||
|
'seconds_ago': time_since_update,
|
||||||
|
'is_fresh': time_since_update <= self.price_update_threshold
|
||||||
|
}
|
||||||
|
|
||||||
|
return status
|
@ -62,6 +62,7 @@ logging.getLogger('dash.dash').setLevel(logging.WARNING)
|
|||||||
|
|
||||||
# Import core components
|
# Import core components
|
||||||
from core.config import get_config
|
from core.config import get_config
|
||||||
|
from core.data_provider import DataProvider
|
||||||
from core.standardized_data_provider import StandardizedDataProvider
|
from core.standardized_data_provider import StandardizedDataProvider
|
||||||
from core.orchestrator import TradingOrchestrator
|
from core.orchestrator import TradingOrchestrator
|
||||||
from core.trading_executor import TradingExecutor
|
from core.trading_executor import TradingExecutor
|
||||||
@ -114,7 +115,7 @@ except ImportError:
|
|||||||
class CleanTradingDashboard:
|
class CleanTradingDashboard:
|
||||||
"""Clean, modular trading dashboard implementation"""
|
"""Clean, modular trading dashboard implementation"""
|
||||||
|
|
||||||
def __init__(self, data_provider: Optional[DataProvider] = None, orchestrator: Optional[Any] = None, trading_executor: Optional[TradingExecutor] = None):
|
def __init__(self, data_provider=None, orchestrator: Optional[Any] = None, trading_executor: Optional[TradingExecutor] = None):
|
||||||
self.config = get_config()
|
self.config = get_config()
|
||||||
|
|
||||||
# Initialize update batch counter to reduce flickering
|
# Initialize update batch counter to reduce flickering
|
||||||
@ -5532,6 +5533,16 @@ class CleanTradingDashboard:
|
|||||||
logger.error(f"Error initializing enhanced training system: {e}")
|
logger.error(f"Error initializing enhanced training system: {e}")
|
||||||
self.training_system = None
|
self.training_system = None
|
||||||
|
|
||||||
|
def _initialize_standardized_cnn(self):
|
||||||
|
"""Initialize StandardizedCNN model for the dashboard"""
|
||||||
|
try:
|
||||||
|
from NN.models.standardized_cnn import StandardizedCNN
|
||||||
|
self.standardized_cnn = StandardizedCNN(model_name="dashboard_standardized_cnn")
|
||||||
|
logger.info("StandardizedCNN model initialized for dashboard")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"StandardizedCNN initialization failed: {e}")
|
||||||
|
self.standardized_cnn = None
|
||||||
|
|
||||||
def _initialize_enhanced_position_sync(self):
|
def _initialize_enhanced_position_sync(self):
|
||||||
"""Initialize enhanced position synchronization system"""
|
"""Initialize enhanced position synchronization system"""
|
||||||
try:
|
try:
|
||||||
|
Reference in New Issue
Block a user