274 lines
8.6 KiB
Python
274 lines
8.6 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Integration test script for COBY system components.
|
|
Run this to test the TimescaleDB integration and basic functionality.
|
|
"""
|
|
|
|
import asyncio
|
|
import sys
|
|
from datetime import datetime, timezone
|
|
from pathlib import Path
|
|
|
|
# Add COBY to path
|
|
sys.path.insert(0, str(Path(__file__).parent))
|
|
|
|
from config import config
|
|
from storage.timescale_manager import TimescaleManager
|
|
from models.core import OrderBookSnapshot, TradeEvent, PriceLevel
|
|
from utils.logging import setup_logging, get_logger
|
|
|
|
# Setup logging
|
|
setup_logging(level='INFO', console_output=True)
|
|
logger = get_logger(__name__)
|
|
|
|
|
|
async def test_database_connection():
|
|
"""Test basic database connectivity"""
|
|
logger.info("🔌 Testing database connection...")
|
|
|
|
try:
|
|
manager = TimescaleManager()
|
|
await manager.initialize()
|
|
|
|
# Test health check
|
|
is_healthy = await manager.health_check()
|
|
if is_healthy:
|
|
logger.info("✅ Database connection: HEALTHY")
|
|
else:
|
|
logger.error("❌ Database connection: UNHEALTHY")
|
|
return False
|
|
|
|
# Test storage stats
|
|
stats = await manager.get_storage_stats()
|
|
logger.info(f"📊 Found {len(stats.get('table_sizes', []))} tables")
|
|
|
|
for table_info in stats.get('table_sizes', []):
|
|
logger.info(f" 📋 {table_info['table']}: {table_info['size']}")
|
|
|
|
await manager.close()
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Database test failed: {e}")
|
|
return False
|
|
|
|
|
|
async def test_data_storage():
|
|
"""Test storing and retrieving data"""
|
|
logger.info("💾 Testing data storage operations...")
|
|
|
|
try:
|
|
manager = TimescaleManager()
|
|
await manager.initialize()
|
|
|
|
# Create test order book
|
|
test_orderbook = OrderBookSnapshot(
|
|
symbol="BTCUSDT",
|
|
exchange="test_exchange",
|
|
timestamp=datetime.now(timezone.utc),
|
|
bids=[
|
|
PriceLevel(price=50000.0, size=1.5, count=3),
|
|
PriceLevel(price=49999.0, size=2.0, count=5)
|
|
],
|
|
asks=[
|
|
PriceLevel(price=50001.0, size=1.0, count=2),
|
|
PriceLevel(price=50002.0, size=1.5, count=4)
|
|
],
|
|
sequence_id=12345
|
|
)
|
|
|
|
# Test storing order book
|
|
result = await manager.store_orderbook(test_orderbook)
|
|
if result:
|
|
logger.info("✅ Order book storage: SUCCESS")
|
|
else:
|
|
logger.error("❌ Order book storage: FAILED")
|
|
return False
|
|
|
|
# Test retrieving order book
|
|
retrieved = await manager.get_latest_orderbook("BTCUSDT", "test_exchange")
|
|
if retrieved:
|
|
logger.info(f"✅ Order book retrieval: SUCCESS (mid_price: {retrieved.mid_price})")
|
|
else:
|
|
logger.error("❌ Order book retrieval: FAILED")
|
|
return False
|
|
|
|
# Create test trade
|
|
test_trade = TradeEvent(
|
|
symbol="BTCUSDT",
|
|
exchange="test_exchange",
|
|
timestamp=datetime.now(timezone.utc),
|
|
price=50000.5,
|
|
size=0.1,
|
|
side="buy",
|
|
trade_id="test_trade_123"
|
|
)
|
|
|
|
# Test storing trade
|
|
result = await manager.store_trade(test_trade)
|
|
if result:
|
|
logger.info("✅ Trade storage: SUCCESS")
|
|
else:
|
|
logger.error("❌ Trade storage: FAILED")
|
|
return False
|
|
|
|
await manager.close()
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Data storage test failed: {e}")
|
|
return False
|
|
|
|
|
|
async def test_batch_operations():
|
|
"""Test batch storage operations"""
|
|
logger.info("📦 Testing batch operations...")
|
|
|
|
try:
|
|
manager = TimescaleManager()
|
|
await manager.initialize()
|
|
|
|
# Create batch of order books
|
|
orderbooks = []
|
|
for i in range(5):
|
|
orderbook = OrderBookSnapshot(
|
|
symbol="ETHUSDT",
|
|
exchange="test_exchange",
|
|
timestamp=datetime.now(timezone.utc),
|
|
bids=[PriceLevel(price=3000.0 + i, size=1.0)],
|
|
asks=[PriceLevel(price=3001.0 + i, size=1.0)],
|
|
sequence_id=i
|
|
)
|
|
orderbooks.append(orderbook)
|
|
|
|
# Test batch storage
|
|
result = await manager.batch_store_orderbooks(orderbooks)
|
|
if result == 5:
|
|
logger.info(f"✅ Batch order book storage: SUCCESS ({result} records)")
|
|
else:
|
|
logger.error(f"❌ Batch order book storage: PARTIAL ({result}/5 records)")
|
|
return False
|
|
|
|
# Create batch of trades
|
|
trades = []
|
|
for i in range(10):
|
|
trade = TradeEvent(
|
|
symbol="ETHUSDT",
|
|
exchange="test_exchange",
|
|
timestamp=datetime.now(timezone.utc),
|
|
price=3000.0 + (i * 0.1),
|
|
size=0.05,
|
|
side="buy" if i % 2 == 0 else "sell",
|
|
trade_id=f"batch_trade_{i}"
|
|
)
|
|
trades.append(trade)
|
|
|
|
# Test batch trade storage
|
|
result = await manager.batch_store_trades(trades)
|
|
if result == 10:
|
|
logger.info(f"✅ Batch trade storage: SUCCESS ({result} records)")
|
|
else:
|
|
logger.error(f"❌ Batch trade storage: PARTIAL ({result}/10 records)")
|
|
return False
|
|
|
|
await manager.close()
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Batch operations test failed: {e}")
|
|
return False
|
|
|
|
|
|
async def test_configuration():
|
|
"""Test configuration system"""
|
|
logger.info("⚙️ Testing configuration system...")
|
|
|
|
try:
|
|
# Test database configuration
|
|
db_url = config.get_database_url()
|
|
logger.info(f"✅ Database URL: {db_url.replace(config.database.password, '***')}")
|
|
|
|
# Test Redis configuration
|
|
redis_url = config.get_redis_url()
|
|
logger.info(f"✅ Redis URL: {redis_url.replace(config.redis.password, '***')}")
|
|
|
|
# Test bucket sizes
|
|
btc_bucket = config.get_bucket_size('BTCUSDT')
|
|
eth_bucket = config.get_bucket_size('ETHUSDT')
|
|
logger.info(f"✅ Bucket sizes: BTC=${btc_bucket}, ETH=${eth_bucket}")
|
|
|
|
# Test configuration dict
|
|
config_dict = config.to_dict()
|
|
logger.info(f"✅ Configuration loaded: {len(config_dict)} sections")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"❌ Configuration test failed: {e}")
|
|
return False
|
|
|
|
|
|
async def run_all_tests():
|
|
"""Run all integration tests"""
|
|
logger.info("🚀 Starting COBY Integration Tests")
|
|
logger.info("=" * 50)
|
|
|
|
tests = [
|
|
("Configuration", test_configuration),
|
|
("Database Connection", test_database_connection),
|
|
("Data Storage", test_data_storage),
|
|
("Batch Operations", test_batch_operations)
|
|
]
|
|
|
|
results = []
|
|
|
|
for test_name, test_func in tests:
|
|
logger.info(f"\n🧪 Running {test_name} test...")
|
|
try:
|
|
result = await test_func()
|
|
results.append((test_name, result))
|
|
if result:
|
|
logger.info(f"✅ {test_name}: PASSED")
|
|
else:
|
|
logger.error(f"❌ {test_name}: FAILED")
|
|
except Exception as e:
|
|
logger.error(f"❌ {test_name}: ERROR - {e}")
|
|
results.append((test_name, False))
|
|
|
|
# Summary
|
|
logger.info("\n" + "=" * 50)
|
|
logger.info("📋 TEST SUMMARY")
|
|
logger.info("=" * 50)
|
|
|
|
passed = sum(1 for _, result in results if result)
|
|
total = len(results)
|
|
|
|
for test_name, result in results:
|
|
status = "✅ PASSED" if result else "❌ FAILED"
|
|
logger.info(f"{test_name:20} {status}")
|
|
|
|
logger.info(f"\nOverall: {passed}/{total} tests passed")
|
|
|
|
if passed == total:
|
|
logger.info("🎉 All tests passed! System is ready.")
|
|
return True
|
|
else:
|
|
logger.error("⚠️ Some tests failed. Check configuration and database connection.")
|
|
return False
|
|
|
|
|
|
if __name__ == "__main__":
|
|
print("COBY Integration Test Suite")
|
|
print("=" * 30)
|
|
|
|
# Run tests
|
|
success = asyncio.run(run_all_tests())
|
|
|
|
if success:
|
|
print("\n🎉 Integration tests completed successfully!")
|
|
print("The system is ready for the next development phase.")
|
|
sys.exit(0)
|
|
else:
|
|
print("\n❌ Integration tests failed!")
|
|
print("Please check the logs and fix any issues before proceeding.")
|
|
sys.exit(1) |