start txt export from dash

This commit is contained in:
Dobromir Popov
2025-08-26 21:31:18 +03:00
parent b404191ffa
commit 300cf3eb2c
8 changed files with 239 additions and 59 deletions

View File

@@ -7042,6 +7042,7 @@ class TradingOrchestrator:
'main_symbol': self.symbol,
'ref1_symbol': self.ref_symbols[0] if self.ref_symbols else 'BTC/USDT',
'ref2_symbol': 'SPX', # Default to SPX for now
'ref3_symbol': 'SOL/USDT',
'export_dir': 'NN/training/samples/txt'
}

View File

@@ -44,7 +44,8 @@ class TextDataExporter:
export_dir: str = "NN/training/samples/txt",
main_symbol: str = "ETH/USDT",
ref1_symbol: str = "BTC/USDT",
ref2_symbol: str = "SPX"):
ref2_symbol: str = "SPX",
ref3_symbol: str = "SOL/USDT"):
"""
Initialize text data exporter
@@ -60,6 +61,7 @@ class TextDataExporter:
self.main_symbol = main_symbol
self.ref1_symbol = ref1_symbol
self.ref2_symbol = ref2_symbol
self.ref3_symbol = ref3_symbol
# Timeframes to export
self.timeframes = ['1s', '1m', '1h', '1d']
@@ -77,7 +79,7 @@ class TextDataExporter:
os.makedirs(self.export_dir, exist_ok=True)
logger.info(f"Text Data Exporter initialized - Export dir: {self.export_dir}")
logger.info(f"Symbols: MAIN={main_symbol}, REF1={ref1_symbol}, REF2={ref2_symbol}")
logger.info(f"Symbols: MAIN={main_symbol}, REF1={ref1_symbol}, REF2={ref2_symbol}, REF3={ref3_symbol}")
def start(self):
"""Start the data export process"""
@@ -140,7 +142,8 @@ class TextDataExporter:
symbols = [
("MAIN", self.main_symbol),
("REF1", self.ref1_symbol),
("REF2", self.ref2_symbol)
("REF2", self.ref2_symbol),
("REF3", self.ref3_symbol)
]
for symbol_type, symbol in symbols:
@@ -168,11 +171,27 @@ class TextDataExporter:
def _get_latest_data(self, symbol: str, timeframe: str) -> Optional[MarketDataPoint]:
"""Get latest market data for symbol/timeframe"""
try:
if not hasattr(self.data_provider, 'get_latest_candle'):
return None
# Try to get latest candle data
candle = self.data_provider.get_latest_candle(symbol, timeframe)
candle = None
# Try direct method
if hasattr(self.data_provider, 'get_latest_candle'):
candle = self.data_provider.get_latest_candle(symbol, timeframe)
# Fallback to historical last row
if (not candle) and hasattr(self.data_provider, 'get_historical_data'):
try:
df = self.data_provider.get_historical_data(symbol, timeframe, limit=1)
if df is not None and not df.empty:
latest = df.iloc[-1]
ts = df.index[-1]
candle = {
'open': latest.get('open', 0),
'high': latest.get('high', 0),
'low': latest.get('low', 0),
'close': latest.get('close', 0),
'volume': latest.get('volume', 0),
'timestamp': ts.to_pydatetime() if hasattr(ts, 'to_pydatetime') else ts
}
except Exception as _ex:
logger.debug(f"hist fallback failed for {symbol} {timeframe}: {_ex}")
if not candle:
return None
@@ -287,49 +306,75 @@ class TextDataExporter:
txtfile.write("timeframe\t1s\t\t\t\t\t\t1m\t\t\t\t\t\t1h\t\t\t\t\t\t1d\t\t\t\t\t\t1s\t\t\t\t\t\t1s\t\t\t\t\t\t1s\n")
txtfile.write("datapoint\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\n")
# Write data row
row_parts = []
current_time = datetime.now()
# Timestamp first
row_parts.append(current_time.strftime("%Y-%m-%dT%H:%M:%SZ"))
# ETH data for all timeframes (1s, 1m, 1h, 1d)
# Build up to 300 rows using historical 1s data when available
# Collect timestamps from MAIN 1s historical data
timestamps: List[Any] = []
hist_df = None
try:
if hasattr(self.data_provider, 'get_historical_data'):
hist_df = self.data_provider.get_historical_data(self.main_symbol, '1s', limit=300)
if hist_df is not None and not hist_df.empty:
timestamps = list(hist_df.index[-300:])
except Exception as _e:
logger.debug(f"hist 1s not available: {_e}")
if not timestamps:
timestamps = [datetime.utcnow()]
# Fetch snapshots for non-1s
main_data = grouped_data.get('MAIN', {})
for timeframe in ['1s', '1m', '1h', '1d']:
data_point = main_data.get(timeframe)
if data_point:
main_1m = main_data.get('1m')
main_1h = main_data.get('1h')
main_1d = main_data.get('1d')
ref1_1s = grouped_data.get('REF1', {}).get('1s')
ref2_1s = grouped_data.get('REF2', {}).get('1s')
ref3_1s = grouped_data.get('REF3', {}).get('1s')
for ts in timestamps:
try:
ts_str = ts.strftime("%Y-%m-%dT%H:%M:%SZ") if hasattr(ts, 'strftime') else str(ts)
except Exception:
ts_str = ""
row_parts = [ts_str]
# MAIN 1s from hist row if possible
if hist_df is not None and ts in hist_df.index:
r = hist_df.loc[ts]
row_parts.extend([
f"{data_point['open']:.2f}",
f"{data_point['high']:.2f}",
f"{data_point['low']:.2f}",
f"{data_point['close']:.2f}",
f"{data_point['volume']:.1f}",
data_point['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
f"{float(r.get('open', 0) or 0):.2f}",
f"{float(r.get('high', 0) or 0):.2f}",
f"{float(r.get('low', 0) or 0):.2f}",
f"{float(r.get('close', 0) or 0):.2f}",
f"{float(r.get('volume', 0) or 0):.1f}",
ts_str
])
else:
row_parts.extend(["0", "0", "0", "0", "0", current_time.strftime("%Y-%m-%dT%H:%M:%SZ")])
# REF1 (BTC), REF2 (SPX), REF3 (SOL) - 1s timeframe only
for ref_type in ['REF1', 'REF2']: # REF3 will be added by LLM proxy
ref_data = grouped_data.get(ref_type, {})
data_point = ref_data.get('1s')
if data_point:
row_parts.extend([
f"{data_point['open']:.2f}",
f"{data_point['high']:.2f}",
f"{data_point['low']:.2f}",
f"{data_point['close']:.2f}",
f"{data_point['volume']:.1f}",
data_point['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
])
else:
row_parts.extend(["0", "0", "0", "0", "0", current_time.strftime("%Y-%m-%dT%H:%M:%SZ")])
# Add placeholder for REF3 (SOL) - will be filled by LLM proxy
row_parts.extend(["0", "0", "0", "0", "0", current_time.strftime("%Y-%m-%dT%H:%M:%SZ")])
txtfile.write("\t".join(row_parts) + "\n")
snap = main_data.get('1s')
if snap:
row_parts.extend([
f"{snap['open']:.2f}", f"{snap['high']:.2f}", f"{snap['low']:.2f}", f"{snap['close']:.2f}", f"{snap['volume']:.1f}", ts_str
])
else:
row_parts.extend(["0", "0", "0", "0", "0", ts_str])
# MAIN 1m/1h/1d snapshots
for snap in [main_1m, main_1h, main_1d]:
if snap:
row_parts.extend([
f"{snap['open']:.2f}", f"{snap['high']:.2f}", f"{snap['low']:.2f}", f"{snap['close']:.2f}", f"{snap['volume']:.1f}", snap['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
])
else:
row_parts.extend(["0", "0", "0", "0", "0", ts_str])
# REF1/REF2/REF3 1s snapshots
for snap in [ref1_1s, ref2_1s, ref3_1s]:
if snap:
row_parts.extend([
f"{snap['open']:.2f}", f"{snap['high']:.2f}", f"{snap['low']:.2f}", f"{snap['close']:.2f}", f"{snap['volume']:.1f}", snap['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
])
else:
row_parts.extend(["0", "0", "0", "0", "0", ts_str])
txtfile.write("\t".join(row_parts) + "\n")
def get_current_filename(self) -> Optional[str]:
"""Get current export filename"""

View File

@@ -33,6 +33,7 @@ class TextExportManager:
'main_symbol': 'ETH/USDT',
'ref1_symbol': 'BTC/USDT',
'ref2_symbol': 'SPX', # Will need to be mapped to available data
'ref3_symbol': 'SOL/USDT',
'export_dir': 'NN/training/samples/txt'
}
@@ -54,7 +55,8 @@ class TextExportManager:
export_dir=self.export_config['export_dir'],
main_symbol=self.export_config['main_symbol'],
ref1_symbol=self.export_config['ref1_symbol'],
ref2_symbol=self.export_config['ref2_symbol']
ref2_symbol=self.export_config['ref2_symbol'],
ref3_symbol=self.export_config.get('ref3_symbol', 'SOL/USDT')
)
logger.info("Text data exporter initialized successfully")
@@ -198,16 +200,8 @@ class EnhancedDataProviderWrapper:
def _get_spx_data(self) -> Optional[Dict[str, Any]]:
"""Get SPX data - placeholder for now"""
# For now, return mock SPX data
# In production, this would connect to a stock data provider
return {
'open': 5500.0,
'high': 5520.0,
'low': 5495.0,
'close': 5510.0,
'volume': 1000000,
'timestamp': datetime.now()
}
# No synthetic data allowed; return None if not available
return None
# Integration helper functions
def setup_text_export(data_provider=None, orchestrator=None, config: Optional[Dict[str, Any]] = None) -> TextExportManager: