merge annotate /ANNOTATE/core into /core.
fix chart updates
This commit is contained in:
@@ -48,7 +48,7 @@ sys.path.insert(0, str(annotate_dir))
|
||||
try:
|
||||
from core.annotation_manager import AnnotationManager
|
||||
from core.real_training_adapter import RealTrainingAdapter
|
||||
from core.data_loader import HistoricalDataLoader, TimeRangeManager
|
||||
# Using main DataProvider directly instead of duplicate data_loader
|
||||
except ImportError:
|
||||
# Try alternative import path
|
||||
import importlib.util
|
||||
@@ -71,15 +71,9 @@ except ImportError:
|
||||
train_spec.loader.exec_module(train_module)
|
||||
RealTrainingAdapter = train_module.RealTrainingAdapter
|
||||
|
||||
# Load data_loader
|
||||
data_spec = importlib.util.spec_from_file_location(
|
||||
"data_loader",
|
||||
annotate_dir / "core" / "data_loader.py"
|
||||
)
|
||||
data_module = importlib.util.module_from_spec(data_spec)
|
||||
data_spec.loader.exec_module(data_module)
|
||||
HistoricalDataLoader = data_module.HistoricalDataLoader
|
||||
TimeRangeManager = data_module.TimeRangeManager
|
||||
# Using main DataProvider directly - no need for duplicate data_loader
|
||||
HistoricalDataLoader = None
|
||||
TimeRangeManager = None
|
||||
|
||||
# Setup logging - configure before any logging occurs
|
||||
log_dir = Path(__file__).parent.parent / 'logs'
|
||||
@@ -745,7 +739,17 @@ class AnnotationDashboard:
|
||||
])
|
||||
|
||||
# Initialize core components (skip initial load for fast startup)
|
||||
self.data_provider = DataProvider(skip_initial_load=True) if DataProvider else None
|
||||
try:
|
||||
if DataProvider:
|
||||
config = get_config()
|
||||
self.data_provider = DataProvider(skip_initial_load=True)
|
||||
logger.info("DataProvider initialized successfully")
|
||||
else:
|
||||
self.data_provider = None
|
||||
logger.warning("DataProvider class not available")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize DataProvider: {e}")
|
||||
self.data_provider = None
|
||||
|
||||
# Enable unified storage for real-time data access
|
||||
if self.data_provider:
|
||||
@@ -780,15 +784,15 @@ class AnnotationDashboard:
|
||||
else:
|
||||
logger.info("Auto-load disabled. Models available for lazy loading: " + ", ".join(self.available_models))
|
||||
|
||||
# Initialize data loader with existing DataProvider
|
||||
self.data_loader = HistoricalDataLoader(self.data_provider) if self.data_provider else None
|
||||
self.time_range_manager = TimeRangeManager(self.data_loader) if self.data_loader else None
|
||||
# Use main DataProvider directly instead of duplicate data_loader
|
||||
self.data_loader = None # Deprecated - using data_provider directly
|
||||
self.time_range_manager = None # Deprecated
|
||||
|
||||
# Setup routes
|
||||
self._setup_routes()
|
||||
|
||||
# Start background data refresh after startup
|
||||
if self.data_loader:
|
||||
if self.data_provider:
|
||||
self._start_background_data_refresh()
|
||||
|
||||
logger.info("Annotation Dashboard initialized")
|
||||
@@ -1105,7 +1109,8 @@ class AnnotationDashboard:
|
||||
logger.info(" Starting one-time background data refresh (fetching only recent missing data)")
|
||||
|
||||
# Disable startup mode to fetch fresh data
|
||||
self.data_loader.disable_startup_mode()
|
||||
if self.data_provider:
|
||||
self.data_provider.disable_startup_mode()
|
||||
|
||||
# Use the new on-demand refresh method
|
||||
logger.info("Using on-demand refresh for recent data")
|
||||
@@ -1374,15 +1379,14 @@ class AnnotationDashboard:
|
||||
|
||||
pivot_logger.info(f"Recalculating pivots for {symbol} {timeframe} using backend data")
|
||||
|
||||
if not self.data_loader:
|
||||
if not self.data_provider:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': {'code': 'DATA_LOADER_UNAVAILABLE', 'message': 'Data loader not available'}
|
||||
'error': {'code': 'DATA_PROVIDER_UNAVAILABLE', 'message': 'Data provider not available'}
|
||||
})
|
||||
|
||||
# Fetch latest data from data_loader (which should have the updated cache/DB from previous calls)
|
||||
# We get enough history for proper pivot calculation
|
||||
df = self.data_loader.get_data(
|
||||
# Fetch latest data from data_provider for pivot calculation
|
||||
df = self.data_provider.get_data_for_annotation(
|
||||
symbol=symbol,
|
||||
timeframe=timeframe,
|
||||
limit=2500, # Enough for context
|
||||
@@ -1423,14 +1427,14 @@ class AnnotationDashboard:
|
||||
|
||||
webui_logger.info(f"Chart data GET request: {symbol} {timeframe} limit={limit}")
|
||||
|
||||
if not self.data_loader:
|
||||
if not self.data_provider:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': {'code': 'DATA_LOADER_UNAVAILABLE', 'message': 'Data loader not available'}
|
||||
'error': {'code': 'DATA_PROVIDER_UNAVAILABLE', 'message': 'Data provider not available'}
|
||||
})
|
||||
|
||||
# Fetch data using data loader
|
||||
df = self.data_loader.get_data(
|
||||
# Fetch data using main data provider
|
||||
df = self.data_provider.get_data_for_annotation(
|
||||
symbol=symbol,
|
||||
timeframe=timeframe,
|
||||
limit=limit,
|
||||
@@ -1486,12 +1490,12 @@ class AnnotationDashboard:
|
||||
if end_time_str:
|
||||
webui_logger.info(f" end_time: {end_time_str}")
|
||||
|
||||
if not self.data_loader:
|
||||
if not self.data_provider:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': {
|
||||
'code': 'DATA_LOADER_UNAVAILABLE',
|
||||
'message': 'Data loader not available'
|
||||
'code': 'DATA_PROVIDER_UNAVAILABLE',
|
||||
'message': 'Data provider not available'
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1499,14 +1503,14 @@ class AnnotationDashboard:
|
||||
start_time = datetime.fromisoformat(start_time_str.replace('Z', '+00:00')) if start_time_str else None
|
||||
end_time = datetime.fromisoformat(end_time_str.replace('Z', '+00:00')) if end_time_str else None
|
||||
|
||||
# Fetch data for each timeframe using data loader
|
||||
# Fetch data for each timeframe using data provider
|
||||
# This will automatically:
|
||||
# 1. Check DuckDB first
|
||||
# 2. Fetch from API if not in cache
|
||||
# 3. Store in DuckDB for future use
|
||||
chart_data = {}
|
||||
for timeframe in timeframes:
|
||||
df = self.data_loader.get_data(
|
||||
df = self.data_provider.get_data_for_annotation(
|
||||
symbol=symbol,
|
||||
timeframe=timeframe,
|
||||
start_time=start_time,
|
||||
@@ -1625,7 +1629,7 @@ class AnnotationDashboard:
|
||||
|
||||
# Collect market snapshots for SQLite storage
|
||||
market_snapshots = {}
|
||||
if self.data_loader:
|
||||
if self.data_provider:
|
||||
try:
|
||||
# Get OHLCV data for all timeframes around the annotation time
|
||||
entry_time = datetime.fromisoformat(data['entry']['timestamp'].replace('Z', '+00:00'))
|
||||
@@ -1636,7 +1640,7 @@ class AnnotationDashboard:
|
||||
end_time = exit_time + timedelta(minutes=5)
|
||||
|
||||
for timeframe in ['1s', '1m', '1h', '1d']:
|
||||
df = self.data_loader.get_data(
|
||||
df = self.data_provider.get_data_for_annotation(
|
||||
symbol=data['symbol'],
|
||||
timeframe=timeframe,
|
||||
start_time=start_time,
|
||||
@@ -2530,11 +2534,11 @@ class AnnotationDashboard:
|
||||
'prediction': None
|
||||
}
|
||||
|
||||
# Get latest candle for the requested timeframe using data_loader
|
||||
if self.data_loader:
|
||||
# Get latest candle for the requested timeframe using data_provider
|
||||
if self.data_provider:
|
||||
try:
|
||||
# Get latest candle from data_loader
|
||||
df = self.data_loader.get_data(symbol, timeframe, limit=2, direction='latest')
|
||||
# Get latest candle from data_provider (includes real-time data)
|
||||
df = self.data_provider.get_data_for_annotation(symbol, timeframe, limit=2, direction='latest')
|
||||
if df is not None and not df.empty:
|
||||
latest_candle = df.iloc[-1]
|
||||
|
||||
@@ -2567,9 +2571,9 @@ class AnnotationDashboard:
|
||||
'is_confirmed': is_confirmed
|
||||
}
|
||||
except Exception as e:
|
||||
logger.debug(f"Error getting latest candle from data_loader: {e}", exc_info=True)
|
||||
logger.debug(f"Error getting latest candle from data_provider: {e}", exc_info=True)
|
||||
else:
|
||||
logger.debug("Data loader not available for live updates")
|
||||
logger.debug("Data provider not available for live updates")
|
||||
|
||||
# Get latest model predictions
|
||||
if self.orchestrator:
|
||||
@@ -2641,10 +2645,10 @@ class AnnotationDashboard:
|
||||
}
|
||||
|
||||
# Get latest candle for each requested timeframe
|
||||
if self.data_loader:
|
||||
if self.data_provider:
|
||||
for timeframe in timeframes:
|
||||
try:
|
||||
df = self.data_loader.get_data(symbol, timeframe, limit=2, direction='latest')
|
||||
df = self.data_provider.get_data_for_annotation(symbol, timeframe, limit=2, direction='latest')
|
||||
if df is not None and not df.empty:
|
||||
latest_candle = df.iloc[-1]
|
||||
|
||||
@@ -3301,15 +3305,17 @@ class AnnotationDashboard:
|
||||
for tf in required_tfs + optional_tfs:
|
||||
try:
|
||||
# Fetch enough candles (600 for training, but accept less)
|
||||
df = self.data_loader.get_data(
|
||||
symbol=symbol,
|
||||
timeframe=tf,
|
||||
end_time=dt,
|
||||
limit=600,
|
||||
direction='before'
|
||||
) if self.data_loader else None
|
||||
df = None
|
||||
if self.data_provider:
|
||||
df = self.data_provider.get_data_for_annotation(
|
||||
symbol=symbol,
|
||||
timeframe=tf,
|
||||
end_time=dt,
|
||||
limit=600,
|
||||
direction='before'
|
||||
)
|
||||
|
||||
# Fallback to data provider if data_loader not available
|
||||
# Fallback to regular historical data if annotation method fails
|
||||
if df is None or df.empty:
|
||||
if self.data_provider:
|
||||
df = self.data_provider.get_historical_data(symbol, tf, limit=600, refresh=False)
|
||||
|
||||
Reference in New Issue
Block a user