multi pair inputs, wip, working training??
This commit is contained in:
182
NN/utils/realtime_analyzer.py
Normal file
182
NN/utils/realtime_analyzer.py
Normal file
@@ -0,0 +1,182 @@
|
||||
"""
|
||||
Realtime Analyzer for Neural Network Trading System
|
||||
|
||||
This module implements real-time analysis of market data using trained neural network models.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
import numpy as np
|
||||
from threading import Thread
|
||||
from queue import Queue
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class RealtimeAnalyzer:
|
||||
"""
|
||||
Handles real-time analysis of market data using trained neural network models.
|
||||
|
||||
Features:
|
||||
- Connects to real-time data sources (websockets)
|
||||
- Processes incoming data through the neural network
|
||||
- Generates trading signals
|
||||
- Manages risk and position sizing
|
||||
- Logs all trading decisions
|
||||
"""
|
||||
|
||||
def __init__(self, data_interface, model, symbol="BTC/USDT", timeframes=None):
|
||||
"""
|
||||
Initialize the realtime analyzer.
|
||||
|
||||
Args:
|
||||
data_interface (DataInterface): Preconfigured data interface
|
||||
model: Trained neural network model
|
||||
symbol (str): Trading pair symbol
|
||||
timeframes (list): List of timeframes to monitor
|
||||
"""
|
||||
self.data_interface = data_interface
|
||||
self.model = model
|
||||
self.symbol = symbol
|
||||
self.timeframes = timeframes or ['1h']
|
||||
self.running = False
|
||||
self.data_queue = Queue()
|
||||
self.prediction_interval = 60 # Seconds between predictions
|
||||
|
||||
logger.info(f"RealtimeAnalyzer initialized for {symbol}")
|
||||
|
||||
def start(self):
|
||||
"""Start the realtime analysis process."""
|
||||
if self.running:
|
||||
logger.warning("Realtime analyzer already running")
|
||||
return
|
||||
|
||||
self.running = True
|
||||
|
||||
# Start data collection thread
|
||||
self.data_thread = Thread(target=self._collect_data, daemon=True)
|
||||
self.data_thread.start()
|
||||
|
||||
# Start analysis thread
|
||||
self.analysis_thread = Thread(target=self._analyze_data, daemon=True)
|
||||
self.analysis_thread.start()
|
||||
|
||||
logger.info("Realtime analysis started")
|
||||
|
||||
def stop(self):
|
||||
"""Stop the realtime analysis process."""
|
||||
self.running = False
|
||||
if hasattr(self, 'data_thread'):
|
||||
self.data_thread.join(timeout=1)
|
||||
if hasattr(self, 'analysis_thread'):
|
||||
self.analysis_thread.join(timeout=1)
|
||||
logger.info("Realtime analysis stopped")
|
||||
|
||||
def _collect_data(self):
|
||||
"""Thread function for collecting real-time data."""
|
||||
logger.info("Starting data collection thread")
|
||||
|
||||
# In a real implementation, this would connect to websockets/API
|
||||
# For now, we'll simulate data collection from the data interface
|
||||
while self.running:
|
||||
try:
|
||||
# Get latest data for each timeframe
|
||||
for timeframe in self.timeframes:
|
||||
# Get recent data (simulating real-time updates)
|
||||
X, timestamp = self.data_interface.prepare_realtime_input(
|
||||
timeframe=timeframe,
|
||||
n_candles=30,
|
||||
window_size=self.data_interface.window_size
|
||||
)
|
||||
|
||||
if X is not None:
|
||||
self.data_queue.put({
|
||||
'timeframe': timeframe,
|
||||
'data': X,
|
||||
'timestamp': timestamp
|
||||
})
|
||||
|
||||
# Throttle data collection
|
||||
time.sleep(1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in data collection: {str(e)}")
|
||||
time.sleep(5) # Wait before retrying
|
||||
|
||||
def _analyze_data(self):
|
||||
"""Thread function for analyzing data and generating signals."""
|
||||
logger.info("Starting analysis thread")
|
||||
|
||||
last_prediction_time = 0
|
||||
|
||||
while self.running:
|
||||
try:
|
||||
current_time = time.time()
|
||||
|
||||
# Only make predictions at the specified interval
|
||||
if current_time - last_prediction_time < self.prediction_interval:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
# Get latest data from queue
|
||||
if not self.data_queue.empty():
|
||||
data_item = self.data_queue.get()
|
||||
|
||||
# Make prediction
|
||||
prediction = self.model.predict(data_item['data'])
|
||||
|
||||
# Process prediction
|
||||
self._process_prediction(
|
||||
prediction=prediction,
|
||||
timeframe=data_item['timeframe'],
|
||||
timestamp=data_item['timestamp']
|
||||
)
|
||||
|
||||
last_prediction_time = current_time
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in analysis: {str(e)}")
|
||||
time.sleep(1) # Wait before retrying
|
||||
|
||||
def _process_prediction(self, prediction, timeframe, timestamp):
|
||||
"""
|
||||
Process model prediction and generate trading signals.
|
||||
|
||||
Args:
|
||||
prediction: Model prediction output
|
||||
timeframe (str): Timeframe the prediction is for
|
||||
timestamp: Timestamp of the prediction
|
||||
"""
|
||||
# Convert prediction to trading signal
|
||||
signal = self._prediction_to_signal(prediction)
|
||||
|
||||
# Log the signal
|
||||
logger.info(
|
||||
f"Signal generated - Timeframe: {timeframe}, "
|
||||
f"Timestamp: {timestamp}, "
|
||||
f"Signal: {signal}"
|
||||
)
|
||||
|
||||
# In a real implementation, we would execute trades here
|
||||
# For now, we'll just log the signals
|
||||
|
||||
def _prediction_to_signal(self, prediction):
|
||||
"""
|
||||
Convert model prediction to trading signal.
|
||||
|
||||
Args:
|
||||
prediction: Model prediction output
|
||||
|
||||
Returns:
|
||||
str: Trading signal (BUY, SELL, HOLD)
|
||||
"""
|
||||
# Simple threshold-based signal generation
|
||||
if len(prediction.shape) == 1:
|
||||
# Binary classification
|
||||
return "BUY" if prediction[0] > 0.5 else "SELL"
|
||||
else:
|
||||
# Multi-class classification (3 outputs)
|
||||
class_idx = np.argmax(prediction)
|
||||
return ["SELL", "HOLD", "BUY"][class_idx]
|
||||
Reference in New Issue
Block a user