This commit is contained in:
Dobromir Popov
2025-10-24 23:35:45 +03:00
parent 86a579bea9
commit c28ee2c432
4 changed files with 86 additions and 72 deletions

View File

@@ -11,6 +11,7 @@ from datetime import datetime, timedelta
import pandas as pd import pandas as pd
from pathlib import Path from pathlib import Path
import pickle import pickle
import time
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -44,7 +45,7 @@ class HistoricalDataLoader:
def get_data(self, symbol: str, timeframe: str, def get_data(self, symbol: str, timeframe: str,
start_time: Optional[datetime] = None, start_time: Optional[datetime] = None,
end_time: Optional[datetime] = None, end_time: Optional[datetime] = None,
limit: int = 500, limit: int = 2500,
direction: str = 'latest') -> Optional[pd.DataFrame]: direction: str = 'latest') -> Optional[pd.DataFrame]:
""" """
Get historical data for symbol and timeframe Get historical data for symbol and timeframe
@@ -60,12 +61,15 @@ class HistoricalDataLoader:
Returns: Returns:
DataFrame with OHLCV data or None if unavailable DataFrame with OHLCV data or None if unavailable
""" """
# Check memory cache first start_time_ms = time.time()
cache_key = f"{symbol}_{timeframe}_{start_time}_{end_time}_{limit}_{direction}"
if cache_key in self.memory_cache: # Check memory cache first (exclude direction from cache key for infinite scroll)
cache_key = f"{symbol}_{timeframe}_{start_time}_{end_time}_{limit}"
if cache_key in self.memory_cache and direction == 'latest':
cached_data, cached_time = self.memory_cache[cache_key] cached_data, cached_time = self.memory_cache[cache_key]
if datetime.now() - cached_time < self.cache_ttl: if datetime.now() - cached_time < self.cache_ttl:
logger.debug(f"Returning cached data for {symbol} {timeframe}") elapsed_ms = (time.time() - start_time_ms) * 1000
logger.debug(f"⚡ Memory cache hit for {symbol} {timeframe} ({elapsed_ms:.1f}ms)")
return cached_data return cached_data
try: try:
@@ -77,7 +81,8 @@ class HistoricalDataLoader:
if cached_df is not None and not cached_df.empty: if cached_df is not None and not cached_df.empty:
# Use cached data if we have enough candles # Use cached data if we have enough candles
if len(cached_df) >= min(limit, 100): # Use cached if we have at least 100 candles if len(cached_df) >= min(limit, 100): # Use cached if we have at least 100 candles
logger.debug(f"Using DataProvider cached data for {symbol} {timeframe} ({len(cached_df)} candles)") elapsed_ms = (time.time() - start_time_ms) * 1000
logger.debug(f"🚀 DataProvider cache hit for {symbol} {timeframe} ({len(cached_df)} candles, {elapsed_ms:.1f}ms)")
# Filter by time range with direction support # Filter by time range with direction support
filtered_df = self._filter_by_time_range( filtered_df = self._filter_by_time_range(
@@ -158,20 +163,29 @@ class HistoricalDataLoader:
logger.info(f"Loaded {len(df)} candles for {symbol} {timeframe}") logger.info(f"Loaded {len(df)} candles for {symbol} {timeframe}")
return df return df
# Fallback: Try DuckDB first, then fetch from API if needed # Check DuckDB first for historical data (always check for infinite scroll)
if self.startup_mode: if self.data_provider.duckdb_storage and (start_time or end_time):
logger.info(f"Loading data for {symbol} {timeframe} (startup mode: allow stale cache)") logger.info(f"Checking DuckDB for {symbol} {timeframe} historical data (direction={direction})")
df = self.data_provider.get_historical_data( df = self.data_provider.duckdb_storage.get_ohlcv_data(
symbol=symbol, symbol=symbol,
timeframe=timeframe, timeframe=timeframe,
start_time=start_time,
end_time=end_time,
limit=limit, limit=limit,
allow_stale_cache=True direction=direction
) )
else:
# Check DuckDB first for historical data if df is not None and not df.empty:
if self.data_provider.duckdb_storage and (start_time or end_time): elapsed_ms = (time.time() - start_time_ms) * 1000
logger.info(f"Checking DuckDB for {symbol} {timeframe} historical data (direction={direction})") logger.info(f" DuckDB hit for {symbol} {timeframe} ({len(df)} candles, {elapsed_ms:.1f}ms)")
df = self.data_provider.duckdb_storage.get_ohlcv_data( # Cache in memory
self.memory_cache[cache_key] = (df.copy(), datetime.now())
return df
else:
logger.info(f"📡 No data in DuckDB, fetching from exchange API for {symbol} {timeframe}")
# Fetch from exchange API with time range
df = self._fetch_from_exchange_api(
symbol=symbol, symbol=symbol,
timeframe=timeframe, timeframe=timeframe,
start_time=start_time, start_time=start_time,
@@ -181,40 +195,35 @@ class HistoricalDataLoader:
) )
if df is not None and not df.empty: if df is not None and not df.empty:
logger.info(f"✅ Loaded {len(df)} candles from DuckDB for {symbol} {timeframe}") elapsed_ms = (time.time() - start_time_ms) * 1000
logger.info(f"🌐 Exchange API hit for {symbol} {timeframe} ({len(df)} candles, {elapsed_ms:.1f}ms)")
# Store in DuckDB for future use
if self.data_provider.duckdb_storage:
stored_count = self.data_provider.duckdb_storage.store_ohlcv_data(
symbol=symbol,
timeframe=timeframe,
df=df
)
logger.info(f"💾 Stored {stored_count} new candles in DuckDB")
# Cache in memory # Cache in memory
self.memory_cache[cache_key] = (df.copy(), datetime.now()) self.memory_cache[cache_key] = (df.copy(), datetime.now())
return df return df
else: else:
logger.info(f"📡 No data in DuckDB, fetching from exchange API for {symbol} {timeframe}") logger.warning(f"No data available from exchange API for {symbol} {timeframe}")
return None
# Fetch from exchange API with time range
df = self._fetch_from_exchange_api( # Fallback: Use DataProvider for latest data (startup mode or no time range)
symbol=symbol, if self.startup_mode and not (start_time or end_time):
timeframe=timeframe, logger.info(f"Loading data for {symbol} {timeframe} (startup mode: allow stale cache)")
start_time=start_time, df = self.data_provider.get_historical_data(
end_time=end_time, symbol=symbol,
limit=limit, timeframe=timeframe,
direction=direction limit=limit,
) allow_stale_cache=True
)
if df is not None and not df.empty: else:
# Store in DuckDB for future use
if self.data_provider.duckdb_storage:
stored_count = self.data_provider.duckdb_storage.store_ohlcv_data(
symbol=symbol,
timeframe=timeframe,
df=df
)
logger.info(f"💾 Stored {stored_count} new candles in DuckDB")
# Cache in memory
self.memory_cache[cache_key] = (df.copy(), datetime.now())
return df
else:
logger.warning(f"No data available from exchange API for {symbol} {timeframe}")
return None
# Fetch from API and store in DuckDB (no time range specified) # Fetch from API and store in DuckDB (no time range specified)
logger.info(f"Fetching latest data from API for {symbol} {timeframe}") logger.info(f"Fetching latest data from API for {symbol} {timeframe}")
df = self.data_provider.get_historical_data( df = self.data_provider.get_historical_data(
@@ -403,7 +412,7 @@ class HistoricalDataLoader:
timeframes: List[str], timeframes: List[str],
start_time: Optional[datetime] = None, start_time: Optional[datetime] = None,
end_time: Optional[datetime] = None, end_time: Optional[datetime] = None,
limit: int = 500) -> Dict[str, pd.DataFrame]: limit: int = 2500) -> Dict[str, pd.DataFrame]:
""" """
Get data for multiple timeframes at once Get data for multiple timeframes at once

View File

@@ -22,10 +22,33 @@
"entry_state": {}, "entry_state": {},
"exit_state": {} "exit_state": {}
} }
},
{
"annotation_id": "5d5c4354-12dd-4e0c-92a8-eff631a5dfab",
"symbol": "ETH/USDT",
"timeframe": "1h",
"entry": {
"timestamp": "2025-10-23 20:00",
"price": 3818.72,
"index": 5
},
"exit": {
"timestamp": "2025-10-24 05:00",
"price": 3989.2,
"index": 6
},
"direction": "LONG",
"profit_loss_pct": 4.4643231239787164,
"notes": "",
"created_at": "2025-10-24T23:35:14.215744",
"market_context": {
"entry_state": {},
"exit_state": {}
}
} }
], ],
"metadata": { "metadata": {
"total_annotations": 1, "total_annotations": 2,
"last_updated": "2025-10-24T22:33:26.194492" "last_updated": "2025-10-24T23:35:14.216759"
} }
} }

View File

@@ -534,7 +534,7 @@ class AnnotationDashboard:
timeframes = data.get('timeframes', ['1s', '1m', '1h', '1d']) timeframes = data.get('timeframes', ['1s', '1m', '1h', '1d'])
start_time_str = data.get('start_time') start_time_str = data.get('start_time')
end_time_str = data.get('end_time') end_time_str = data.get('end_time')
limit = data.get('limit', 2000) # Default 2000 candles for training limit = data.get('limit', 2500) # Default 2500 candles for training
direction = data.get('direction', 'latest') # 'latest', 'before', or 'after' direction = data.get('direction', 'latest') # 'latest', 'before', or 'after'
logger.info(f"📊 Chart data request: {symbol} {timeframes} direction={direction} limit={limit}") logger.info(f"📊 Chart data request: {symbol} {timeframes} direction={direction} limit={limit}")

View File

@@ -59,10 +59,11 @@
{% block extra_js %} {% block extra_js %}
<script> <script>
// Initialize application state // Initialize application state
// IMPORTANT!!! DO NOT CHANGE {{ x }} to { { x } }
window.appState = { window.appState = {
currentSymbol: '{{ current_symbol }}', currentSymbol: '{{ current_symbol }}',
currentTimeframes: {{ timeframes | tojson }}, currentTimeframes: {{ timeframes | tojson }},
// IMPORTANT!!! DO NOT CHANGE {{ annotations | tojson }} to { { annotations | tojson } }
annotations: {{ annotations | tojson }}, annotations: {{ annotations | tojson }},
pendingAnnotation: null, pendingAnnotation: null,
chartManager: null, chartManager: null,
@@ -98,7 +99,7 @@
function loadInitialData() { function loadInitialData() {
console.log('Loading initial chart data...'); console.log('Loading initial chart data...');
// Fetch initial chart data with 2000 candles for training // Fetch initial chart data with 2500 candles for training
fetch('/api/chart-data', { fetch('/api/chart-data', {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
@@ -107,7 +108,7 @@
timeframes: appState.currentTimeframes, timeframes: appState.currentTimeframes,
start_time: null, start_time: null,
end_time: null, end_time: null,
limit: 2000 // Load 2000 candles initially for training limit: 2500 // Load 2500 candles initially for training
}) })
}) })
.then(response => { .then(response => {
@@ -453,25 +454,6 @@
console.error('✗ window.deleteAnnotation is NOT a function!'); console.error('✗ window.deleteAnnotation is NOT a function!');
} }
// Add a test button to verify functionality (temporary)
console.log('Adding test delete function to window for debugging...');
window.testDeleteFunction = function () {
console.log('Test delete function called');
console.log('window.deleteAnnotation type:', typeof window.deleteAnnotation);
if (typeof window.deleteAnnotation === 'function') {
console.log('window.deleteAnnotation is available');
// Test with a fake ID to see if the function runs
console.log('Testing delete function with fake ID...');
try {
window.deleteAnnotation('test-fake-id');
} catch (error) {
console.error('Error in test delete:', error);
}
} else {
console.error('window.deleteAnnotation is NOT available');
}
};
// Add test button to page (temporary debugging) // Add test button to page (temporary debugging)
const testButton = document.createElement('button'); const testButton = document.createElement('button');
testButton.textContent = 'Test Delete Function'; testButton.textContent = 'Test Delete Function';