new realt module
This commit is contained in:
261
NN/example.py
Normal file
261
NN/example.py
Normal file
@ -0,0 +1,261 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Example script for the Neural Network Trading System
|
||||
This shows basic usage patterns for the system components
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import tensorflow as tf
|
||||
import matplotlib.pyplot as plt
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
# Add project root to path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
# Import components
|
||||
from NN.utils.data_interface import DataInterface
|
||||
from NN.models.cnn_model import CNNModel
|
||||
from NN.models.transformer_model import TransformerModel, MixtureOfExpertsModel
|
||||
from NN.main import NeuralNetworkOrchestrator
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
|
||||
logger = logging.getLogger('example')
|
||||
|
||||
def example_data_interface():
|
||||
"""Show how to use the data interface"""
|
||||
logger.info("=== Data Interface Example ===")
|
||||
|
||||
# Initialize data interface
|
||||
di = DataInterface(symbol="BTC/USDT", timeframes=['1h', '4h', '1d'])
|
||||
|
||||
# Get historical data
|
||||
df_1h = di.get_historical_data(timeframe='1h', n_candles=100)
|
||||
if df_1h is not None and not df_1h.empty:
|
||||
logger.info(f"Retrieved {len(df_1h)} 1-hour candles")
|
||||
logger.info(f"Most recent candle: {df_1h.iloc[-1]}")
|
||||
|
||||
# Prepare data for neural network
|
||||
X, y, timestamps = di.prepare_nn_input(timeframes=['1h'], n_candles=500, window_size=20)
|
||||
if X is not None and y is not None:
|
||||
logger.info(f"Prepared input shape: {X.shape}, target shape: {y.shape}")
|
||||
|
||||
# Generate a dataset
|
||||
dataset = di.generate_training_dataset(
|
||||
timeframes=['1h', '4h'],
|
||||
n_candles=1000,
|
||||
window_size=20
|
||||
)
|
||||
if dataset:
|
||||
logger.info(f"Dataset generated and saved to: {list(dataset.values())}")
|
||||
|
||||
return X, y, timestamps if X is not None else (None, None, None)
|
||||
|
||||
def example_cnn_model(X=None, y=None):
|
||||
"""Show how to use the CNN model"""
|
||||
logger.info("=== CNN Model Example ===")
|
||||
|
||||
# If no data provided, create dummy data
|
||||
if X is None or y is None:
|
||||
logger.info("Creating dummy data for CNN example")
|
||||
X = np.random.random((1000, 20, 5)) # 1000 samples, 20 time steps, 5 features
|
||||
y = np.random.randint(0, 2, size=(1000,)) # Binary labels
|
||||
|
||||
# Split data into training and testing sets
|
||||
from sklearn.model_selection import train_test_split
|
||||
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
||||
|
||||
# Initialize and build the CNN model
|
||||
cnn = CNNModel(input_shape=(20, 5), output_size=1, model_dir='NN/models/saved')
|
||||
cnn.build_model(filters=(32, 64, 128), kernel_sizes=(3, 5, 7), dropout_rate=0.3)
|
||||
|
||||
# Train the model (very small number of epochs for this example)
|
||||
history = cnn.train(
|
||||
X_train, y_train,
|
||||
batch_size=32,
|
||||
epochs=5, # Just a few epochs for the example
|
||||
validation_split=0.2
|
||||
)
|
||||
|
||||
# Evaluate the model
|
||||
metrics = cnn.evaluate(X_test, y_test, plot_results=True)
|
||||
if metrics:
|
||||
logger.info(f"CNN Evaluation metrics: {metrics}")
|
||||
|
||||
# Make a prediction
|
||||
y_pred, y_proba = cnn.predict(X_test[:1])
|
||||
logger.info(f"CNN Prediction: {y_pred[0]}, Probability: {y_proba[0]:.4f}")
|
||||
|
||||
return cnn
|
||||
|
||||
def example_transformer_model(X=None, y=None, cnn_model=None):
|
||||
"""Show how to use the Transformer model"""
|
||||
logger.info("=== Transformer Model Example ===")
|
||||
|
||||
# If no data provided, create dummy data
|
||||
if X is None or y is None:
|
||||
logger.info("Creating dummy data for Transformer example")
|
||||
X = np.random.random((1000, 20, 5)) # 1000 samples, 20 time steps, 5 features
|
||||
y = np.random.randint(0, 2, size=(1000,)) # Binary labels
|
||||
|
||||
# Generate high-level features (from CNN model or random if no CNN provided)
|
||||
if cnn_model is not None and hasattr(cnn_model, 'extract_hidden_features'):
|
||||
# Extract features from CNN model
|
||||
X_features = cnn_model.extract_hidden_features(X)
|
||||
logger.info(f"Extracted {X_features.shape[1]} features from CNN model")
|
||||
else:
|
||||
# Generate random features
|
||||
X_features = np.random.random((len(X), 128))
|
||||
logger.info("Generated random features for Transformer model")
|
||||
|
||||
# Split data into training and testing sets
|
||||
from sklearn.model_selection import train_test_split
|
||||
X_train, X_test, X_feat_train, X_feat_test, y_train, y_test = train_test_split(
|
||||
X, X_features, y, test_size=0.2, random_state=42
|
||||
)
|
||||
|
||||
# Initialize and build the Transformer model
|
||||
transformer = TransformerModel(
|
||||
ts_input_shape=(20, 5),
|
||||
feature_input_shape=X_features.shape[1],
|
||||
output_size=1,
|
||||
model_dir='NN/models/saved'
|
||||
)
|
||||
transformer.build_model(
|
||||
embed_dim=32,
|
||||
num_heads=2,
|
||||
ff_dim=64,
|
||||
num_transformer_blocks=2,
|
||||
dropout_rate=0.2
|
||||
)
|
||||
|
||||
# Train the model (very small number of epochs for this example)
|
||||
history = transformer.train(
|
||||
X_train, X_feat_train, y_train,
|
||||
batch_size=32,
|
||||
epochs=5, # Just a few epochs for the example
|
||||
validation_split=0.2
|
||||
)
|
||||
|
||||
# Make a prediction
|
||||
y_pred, y_proba = transformer.predict(X_test[:1], X_feat_test[:1])
|
||||
logger.info(f"Transformer Prediction: {y_pred[0]}, Probability: {y_proba[0]:.4f}")
|
||||
|
||||
return transformer
|
||||
|
||||
def example_moe_model(X=None, y=None, cnn_model=None, transformer_model=None):
|
||||
"""Show how to use the Mixture of Experts model"""
|
||||
logger.info("=== Mixture of Experts Example ===")
|
||||
|
||||
# If no data provided, create dummy data
|
||||
if X is None or y is None:
|
||||
logger.info("Creating dummy data for MoE example")
|
||||
X = np.random.random((1000, 20, 5)) # 1000 samples, 20 time steps, 5 features
|
||||
y = np.random.randint(0, 2, size=(1000,)) # Binary labels
|
||||
|
||||
# If models not provided, create them
|
||||
if cnn_model is None:
|
||||
logger.info("Creating a new CNN model for MoE")
|
||||
cnn_model = CNNModel(input_shape=(20, 5), output_size=1)
|
||||
cnn_model.build_model()
|
||||
|
||||
if transformer_model is None:
|
||||
logger.info("Creating a new Transformer model for MoE")
|
||||
transformer_model = TransformerModel(ts_input_shape=(20, 5), feature_input_shape=128, output_size=1)
|
||||
transformer_model.build_model()
|
||||
|
||||
# Initialize MoE model
|
||||
moe = MixtureOfExpertsModel(output_size=1, model_dir='NN/models/saved')
|
||||
|
||||
# Add expert models
|
||||
moe.add_expert('cnn', cnn_model)
|
||||
moe.add_expert('transformer', transformer_model)
|
||||
|
||||
# Build the MoE model (this is a simplified implementation - in a real scenario
|
||||
# you would need to handle the interfaces between models more carefully)
|
||||
moe.build_model(
|
||||
ts_input_shape=(20, 5),
|
||||
expert_weights={'cnn': 0.7, 'transformer': 0.3}
|
||||
)
|
||||
|
||||
# In a real implementation, you would train the MoE model here
|
||||
logger.info("MoE model built - in a real implementation, you would train it here")
|
||||
|
||||
return moe
|
||||
|
||||
def example_orchestrator():
|
||||
"""Show how to use the Orchestrator"""
|
||||
logger.info("=== Orchestrator Example ===")
|
||||
|
||||
# Configure the orchestrator
|
||||
config = {
|
||||
'symbol': 'BTC/USDT',
|
||||
'timeframes': ['1h', '4h'],
|
||||
'window_size': 20,
|
||||
'n_features': 5,
|
||||
'output_size': 3, # BUY/HOLD/SELL
|
||||
'batch_size': 32,
|
||||
'epochs': 5, # Small number for example
|
||||
'model_dir': 'NN/models/saved',
|
||||
'data_dir': 'NN/data'
|
||||
}
|
||||
|
||||
# Initialize the orchestrator
|
||||
orchestrator = NeuralNetworkOrchestrator(config)
|
||||
|
||||
# Prepare training data
|
||||
X, y, timestamps = orchestrator.prepare_training_data(
|
||||
timeframes=['1h'],
|
||||
n_candles=200
|
||||
)
|
||||
|
||||
if X is not None and y is not None:
|
||||
logger.info(f"Prepared training data: X shape {X.shape}, y shape {y.shape}")
|
||||
|
||||
# Train CNN model
|
||||
logger.info("Training CNN model with orchestrator...")
|
||||
history = orchestrator.train_cnn_model(X, y, epochs=2) # Very small for example
|
||||
|
||||
# Make a prediction
|
||||
result = orchestrator.run_inference_pipeline(
|
||||
model_type='cnn',
|
||||
timeframe='1h'
|
||||
)
|
||||
|
||||
if result:
|
||||
logger.info(f"Inference result: {result}")
|
||||
else:
|
||||
logger.warning("Could not prepare training data - this is expected if no real data is available")
|
||||
logger.info("The orchestrator would normally handle training and inference")
|
||||
|
||||
def main():
|
||||
"""Run all examples"""
|
||||
logger.info("Starting Neural Network Trading System Examples")
|
||||
|
||||
# Example 1: Data Interface
|
||||
X, y, timestamps = example_data_interface()
|
||||
|
||||
# Example 2: CNN Model
|
||||
cnn_model = example_cnn_model(X, y)
|
||||
|
||||
# Example 3: Transformer Model
|
||||
transformer_model = example_transformer_model(X, y, cnn_model)
|
||||
|
||||
# Example 4: Mixture of Experts
|
||||
moe_model = example_moe_model(X, y, cnn_model, transformer_model)
|
||||
|
||||
# Example 5: Orchestrator
|
||||
example_orchestrator()
|
||||
|
||||
logger.info("Examples completed")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Reference in New Issue
Block a user