191 lines
6.5 KiB
Python
191 lines
6.5 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Test Build Base Data Performance
|
|
|
|
This script tests the performance of build_base_data_input to ensure it's instantaneous.
|
|
"""
|
|
|
|
import sys
|
|
import os
|
|
import time
|
|
import logging
|
|
from datetime import datetime
|
|
|
|
# Add project root to path
|
|
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
from core.orchestrator import TradingOrchestrator
|
|
from core.config import get_config
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
def test_build_base_data_performance():
|
|
"""Test the performance of build_base_data_input"""
|
|
|
|
logger.info("=== Testing Build Base Data Performance ===")
|
|
|
|
try:
|
|
# Initialize orchestrator
|
|
config = get_config()
|
|
orchestrator = TradingOrchestrator(
|
|
symbol="ETH/USDT",
|
|
config=config
|
|
)
|
|
|
|
# Start the orchestrator to initialize data
|
|
orchestrator.start()
|
|
logger.info("✅ Orchestrator started")
|
|
|
|
# Wait a bit for data to be populated
|
|
time.sleep(2)
|
|
|
|
# Test performance of build_base_data_input
|
|
symbol = "ETH/USDT"
|
|
num_tests = 10
|
|
total_time = 0
|
|
|
|
logger.info(f"Running {num_tests} performance tests...")
|
|
|
|
for i in range(num_tests):
|
|
start_time = time.time()
|
|
|
|
base_data = orchestrator.build_base_data_input(symbol)
|
|
|
|
end_time = time.time()
|
|
duration = (end_time - start_time) * 1000 # Convert to milliseconds
|
|
total_time += duration
|
|
|
|
if base_data:
|
|
logger.info(f"Test {i+1}: {duration:.2f}ms - ✅ Success")
|
|
else:
|
|
logger.warning(f"Test {i+1}: {duration:.2f}ms - ❌ Failed (no data)")
|
|
|
|
avg_time = total_time / num_tests
|
|
|
|
logger.info(f"=== Performance Results ===")
|
|
logger.info(f"Average time: {avg_time:.2f}ms")
|
|
logger.info(f"Total time: {total_time:.2f}ms")
|
|
|
|
# Performance thresholds
|
|
if avg_time < 10: # Less than 10ms is excellent
|
|
logger.info("🎉 EXCELLENT: Build time is under 10ms")
|
|
elif avg_time < 50: # Less than 50ms is good
|
|
logger.info("✅ GOOD: Build time is under 50ms")
|
|
elif avg_time < 100: # Less than 100ms is acceptable
|
|
logger.info("⚠️ ACCEPTABLE: Build time is under 100ms")
|
|
else:
|
|
logger.error("❌ SLOW: Build time is over 100ms - needs optimization")
|
|
|
|
# Test with multiple symbols
|
|
logger.info("Testing with multiple symbols...")
|
|
symbols = ["ETH/USDT", "BTC/USDT"]
|
|
|
|
for symbol in symbols:
|
|
start_time = time.time()
|
|
base_data = orchestrator.build_base_data_input(symbol)
|
|
end_time = time.time()
|
|
duration = (end_time - start_time) * 1000
|
|
|
|
logger.info(f"{symbol}: {duration:.2f}ms")
|
|
|
|
# Stop orchestrator
|
|
orchestrator.stop()
|
|
logger.info("✅ Orchestrator stopped")
|
|
|
|
return avg_time < 100 # Return True if performance is acceptable
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Performance test failed: {e}")
|
|
import traceback
|
|
traceback.print_exc()
|
|
return False
|
|
|
|
def test_cache_effectiveness():
|
|
"""Test that caching is working effectively"""
|
|
|
|
logger.info("=== Testing Cache Effectiveness ===")
|
|
|
|
try:
|
|
# Initialize orchestrator
|
|
config = get_config()
|
|
orchestrator = TradingOrchestrator(
|
|
symbol="ETH/USDT",
|
|
config=config
|
|
)
|
|
|
|
orchestrator.start()
|
|
time.sleep(2) # Let data populate
|
|
|
|
symbol = "ETH/USDT"
|
|
|
|
# First call (should build cache)
|
|
start_time = time.time()
|
|
base_data1 = orchestrator.build_base_data_input(symbol)
|
|
first_call_time = (time.time() - start_time) * 1000
|
|
|
|
# Second call (should use cache)
|
|
start_time = time.time()
|
|
base_data2 = orchestrator.build_base_data_input(symbol)
|
|
second_call_time = (time.time() - start_time) * 1000
|
|
|
|
# Third call (should still use cache)
|
|
start_time = time.time()
|
|
base_data3 = orchestrator.build_base_data_input(symbol)
|
|
third_call_time = (time.time() - start_time) * 1000
|
|
|
|
logger.info(f"First call (build cache): {first_call_time:.2f}ms")
|
|
logger.info(f"Second call (use cache): {second_call_time:.2f}ms")
|
|
logger.info(f"Third call (use cache): {third_call_time:.2f}ms")
|
|
|
|
# Cache should make subsequent calls faster
|
|
if second_call_time < first_call_time * 0.5:
|
|
logger.info("✅ Cache is working effectively")
|
|
cache_effective = True
|
|
else:
|
|
logger.warning("⚠️ Cache may not be working as expected")
|
|
cache_effective = False
|
|
|
|
# Verify data consistency
|
|
if base_data1 and base_data2 and base_data3:
|
|
# Check that we get consistent data structure
|
|
if (len(base_data1.ohlcv_1s) == len(base_data2.ohlcv_1s) == len(base_data3.ohlcv_1s)):
|
|
logger.info("✅ Data consistency maintained")
|
|
else:
|
|
logger.warning("⚠️ Data consistency issues detected")
|
|
|
|
orchestrator.stop()
|
|
|
|
return cache_effective
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Cache effectiveness test failed: {e}")
|
|
return False
|
|
|
|
def main():
|
|
"""Run all performance tests"""
|
|
|
|
logger.info("Starting Build Base Data Performance Tests")
|
|
|
|
# Test 1: Basic performance
|
|
test1_passed = test_build_base_data_performance()
|
|
|
|
# Test 2: Cache effectiveness
|
|
test2_passed = test_cache_effectiveness()
|
|
|
|
# Summary
|
|
logger.info("=== Test Summary ===")
|
|
logger.info(f"Performance Test: {'✅ PASSED' if test1_passed else '❌ FAILED'}")
|
|
logger.info(f"Cache Effectiveness: {'✅ PASSED' if test2_passed else '❌ FAILED'}")
|
|
|
|
if test1_passed and test2_passed:
|
|
logger.info("🎉 All tests passed! build_base_data_input is optimized.")
|
|
logger.info("The system now:")
|
|
logger.info(" - Builds BaseDataInput in under 100ms")
|
|
logger.info(" - Uses effective caching for repeated calls")
|
|
logger.info(" - Maintains data consistency")
|
|
else:
|
|
logger.error("❌ Some tests failed. Performance optimization needed.")
|
|
|
|
if __name__ == "__main__":
|
|
main() |