infinite lowad WIP
This commit is contained in:
139
test_infinite_scroll_backend.py
Normal file
139
test_infinite_scroll_backend.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""
|
||||
Test infinite scroll backend data loading
|
||||
"""
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).parent))
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from core.data_provider import DataProvider
|
||||
from ANNOTATE.core.data_loader import HistoricalDataLoader
|
||||
|
||||
def test_backend_data_loading():
|
||||
"""Test if backend can load historical data with direction parameter"""
|
||||
|
||||
print("=" * 80)
|
||||
print("Testing Infinite Scroll Backend Data Loading")
|
||||
print("=" * 80)
|
||||
|
||||
# Initialize data provider
|
||||
print("\n1. Initializing DataProvider...")
|
||||
data_provider = DataProvider()
|
||||
|
||||
# Initialize data loader
|
||||
print("2. Initializing HistoricalDataLoader...")
|
||||
data_loader = HistoricalDataLoader(data_provider)
|
||||
data_loader.disable_startup_mode() # Force fresh data
|
||||
|
||||
symbol = "ETH/USDT"
|
||||
timeframe = "1m"
|
||||
|
||||
# Test 1: Load initial data (latest)
|
||||
print(f"\n3. Loading initial data for {symbol} {timeframe}...")
|
||||
initial_df = data_loader.get_data(
|
||||
symbol=symbol,
|
||||
timeframe=timeframe,
|
||||
limit=100,
|
||||
direction='latest'
|
||||
)
|
||||
|
||||
if initial_df is None or initial_df.empty:
|
||||
print("❌ FAILED: No initial data loaded")
|
||||
return
|
||||
|
||||
print(f"✅ Loaded {len(initial_df)} initial candles")
|
||||
print(f" First timestamp: {initial_df.index[0]}")
|
||||
print(f" Last timestamp: {initial_df.index[-1]}")
|
||||
|
||||
# Test 2: Load older data (before first timestamp)
|
||||
print(f"\n4. Loading older data BEFORE {initial_df.index[0]}...")
|
||||
first_timestamp = initial_df.index[0]
|
||||
|
||||
older_df = data_loader.get_data(
|
||||
symbol=symbol,
|
||||
timeframe=timeframe,
|
||||
end_time=first_timestamp,
|
||||
limit=100,
|
||||
direction='before'
|
||||
)
|
||||
|
||||
if older_df is None or older_df.empty:
|
||||
print("❌ FAILED: No older data loaded")
|
||||
print(" This might mean:")
|
||||
print(" - No data exists before this timestamp in DuckDB")
|
||||
print(" - The query is not working correctly")
|
||||
else:
|
||||
print(f"✅ Loaded {len(older_df)} older candles")
|
||||
print(f" First timestamp: {older_df.index[0]}")
|
||||
print(f" Last timestamp: {older_df.index[-1]}")
|
||||
|
||||
# Check if older data is actually older
|
||||
if older_df.index[-1] < first_timestamp:
|
||||
print(f"✅ Data is correctly older (last older candle: {older_df.index[-1]} < first initial: {first_timestamp})")
|
||||
else:
|
||||
print(f"❌ WARNING: Data is NOT older! Last older: {older_df.index[-1]} >= first initial: {first_timestamp}")
|
||||
|
||||
# Test 3: Load newer data (after last timestamp)
|
||||
print(f"\n5. Loading newer data AFTER {initial_df.index[-1]}...")
|
||||
last_timestamp = initial_df.index[-1]
|
||||
|
||||
newer_df = data_loader.get_data(
|
||||
symbol=symbol,
|
||||
timeframe=timeframe,
|
||||
start_time=last_timestamp,
|
||||
limit=100,
|
||||
direction='after'
|
||||
)
|
||||
|
||||
if newer_df is None or newer_df.empty:
|
||||
print("❌ No newer data loaded (this is expected if we're at the latest data)")
|
||||
else:
|
||||
print(f"✅ Loaded {len(newer_df)} newer candles")
|
||||
print(f" First timestamp: {newer_df.index[0]}")
|
||||
print(f" Last timestamp: {newer_df.index[-1]}")
|
||||
|
||||
# Check if newer data is actually newer
|
||||
if newer_df.index[0] > last_timestamp:
|
||||
print(f"✅ Data is correctly newer (first newer candle: {newer_df.index[0]} > last initial: {last_timestamp})")
|
||||
else:
|
||||
print(f"❌ WARNING: Data is NOT newer! First newer: {newer_df.index[0]} <= last initial: {last_timestamp}")
|
||||
|
||||
# Test 4: Check DuckDB directly
|
||||
print(f"\n6. Checking DuckDB storage directly...")
|
||||
if data_provider.duckdb_storage:
|
||||
# Get total count
|
||||
query = "SELECT COUNT(*) as count FROM ohlcv_data WHERE symbol = ? AND timeframe = ?"
|
||||
result = data_provider.duckdb_storage.conn.execute(query, [symbol, timeframe]).fetchone()
|
||||
total_count = result[0] if result else 0
|
||||
|
||||
print(f" Total candles in DuckDB: {total_count}")
|
||||
|
||||
if total_count == 0:
|
||||
print(" ❌ No data in DuckDB! Need to fetch from API first.")
|
||||
else:
|
||||
# Get time range
|
||||
query = """
|
||||
SELECT
|
||||
MIN(timestamp) as min_ts,
|
||||
MAX(timestamp) as max_ts
|
||||
FROM ohlcv_data
|
||||
WHERE symbol = ? AND timeframe = ?
|
||||
"""
|
||||
result = data_provider.duckdb_storage.conn.execute(query, [symbol, timeframe]).fetchone()
|
||||
|
||||
if result:
|
||||
import pandas as pd
|
||||
min_ts = pd.to_datetime(result[0], unit='ms', utc=True)
|
||||
max_ts = pd.to_datetime(result[1], unit='ms', utc=True)
|
||||
print(f" Time range: {min_ts} to {max_ts}")
|
||||
print(f" Duration: {max_ts - min_ts}")
|
||||
else:
|
||||
print(" ❌ DuckDB storage not available")
|
||||
|
||||
print("\n" + "=" * 80)
|
||||
print("Test Complete")
|
||||
print("=" * 80)
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_backend_data_loading()
|
||||
Reference in New Issue
Block a user