unified cache. LLM report

This commit is contained in:
Dobromir Popov
2025-10-20 11:16:27 +03:00
parent f464a412dc
commit ba8813f04f
6 changed files with 1943 additions and 1 deletions

View File

@@ -55,6 +55,7 @@ from .williams_market_structure import WilliamsMarketStructure, PivotPoint, Tren
from .enhanced_cob_websocket import EnhancedCOBWebSocket, get_enhanced_cob_websocket
from .huobi_cob_websocket import get_huobi_cob_websocket
from .cob_integration import COBIntegration
from .report_data_crawler import ReportDataCrawler, ReportData
logger = logging.getLogger(__name__)
@@ -169,6 +170,9 @@ class DataProvider:
self.enhanced_cob_websocket: Optional[EnhancedCOBWebSocket] = None
self.websocket_tasks = {}
# Report data crawler for comprehensive trading reports
self.report_crawler: Optional[ReportDataCrawler] = None
# COB collection state guard to prevent duplicate starts
self._cob_started: bool = False
@@ -3516,3 +3520,71 @@ class DataProvider:
except Exception as e:
logger.error(f"Error creating transformer sequences for inference: {e}")
return []
# === REPORT DATA CRAWLER METHODS ===
def _get_report_crawler(self) -> ReportDataCrawler:
"""Get or initialize the report data crawler"""
if self.report_crawler is None:
self.report_crawler = ReportDataCrawler(data_provider=self)
return self.report_crawler
def crawl_comprehensive_report(self, symbol: str) -> Optional[ReportData]:
"""Crawl comprehensive report data for a trading pair
Args:
symbol: Trading pair symbol (e.g., 'BTC/USDT')
Returns:
ReportData object with all required information, or None if failed
"""
try:
crawler = self._get_report_crawler()
return crawler.crawl_report_data(symbol)
except Exception as e:
logger.error(f"Error crawling comprehensive report for {symbol}: {e}")
return None
def generate_trading_report(self, symbol: str) -> Optional[str]:
"""Generate formatted trading report for a symbol
Args:
symbol: Trading pair symbol (e.g., 'BTC/USDT')
Returns:
Formatted report string, or None if failed
"""
try:
crawler = self._get_report_crawler()
return crawler.crawl_and_generate_report(symbol)
except Exception as e:
logger.error(f"Error generating trading report for {symbol}: {e}")
return None
def get_report_data_for_multiple_pairs(self, symbols: List[str]) -> Dict[str, Optional[ReportData]]:
"""Get report data for multiple trading pairs
Args:
symbols: List of trading pair symbols
Returns:
Dictionary mapping symbols to their ReportData objects
"""
try:
results = {}
crawler = self._get_report_crawler()
for symbol in symbols:
try:
report_data = crawler.crawl_report_data(symbol)
results[symbol] = report_data
logger.info(f"Crawled report data for {symbol}: {'Success' if report_data else 'Failed'}")
except Exception as e:
logger.error(f"Error crawling report data for {symbol}: {e}")
results[symbol] = None
return results
except Exception as e:
logger.error(f"Error getting report data for multiple pairs: {e}")
return {}