3 Commits

Author SHA1 Message Date
Dobromir Popov
d49a473ed6 configurable txt separator 2025-08-27 01:48:29 +03:00
Dobromir Popov
fc1ac2061f edit prediction penalties 2025-08-26 22:26:42 +03:00
Dobromir Popov
300cf3eb2c start txt export from dash 2025-08-26 21:31:18 +03:00
9 changed files with 284 additions and 76 deletions

View File

@@ -0,0 +1,24 @@
so, curl example:
curl http://localhost:1234/v1/chat/completions \
-H "Content-Type: application/json" \
-d '{
"model": "deepseek-r1-0528-qwen3-8b",
"messages": [
{ "role": "system", "content": "what will be the next row in this sequence:
symbol MAIN SYMBOL (ETH) REF1 (BTC) REF2 (SPX) REF3 (SOL)
timeframe 1s 1m 1h 1d 1s 1s 1s
datapoint O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp
2025-01-15T10:00:00Z 3421.5 3421.75 3421.25 3421.6 125.4 2025-01-15T10:00:00Z 3422.1 3424.8 3420.5 3423.25 1245.7 2025-01-15T10:00:00Z 3420 3428.5 3418.75 3425.1 12847.2 2025-01-15T10:00:00Z 3415.25 3435.6 3410.8 3430.4 145238.6 2025-01-15T10:00:00Z 97850.2 97852.4 97848.1 97851.3 8.7 2025-01-15T10:00:00Z 5925.4 5926.1 5924.8 5925.7 0 2025-01-15T10:00:00Z 191.22 191.45 191.08 191.35 1247.3
2025-01-15T10:00:01Z 3421.6 3421.85 3421.45 3421.75 98.2 2025-01-15T10:01:00Z 3423.25 3425.9 3421.8 3424.6 1189.3 2025-01-15T11:00:00Z 3425.1 3432.2 3422.4 3429.8 11960.5 2025-01-16T10:00:00Z 3430.4 3445.2 3425.15 3440.85 138947.1 2025-01-15T10:00:01Z 97851.3 97853.8 97849.5 97852.9 9.1 2025-01-15T10:00:01Z 5925.7 5926.3 5925.2 5925.9 0 2025-01-15T10:00:01Z 191.35 191.58 191.15 191.48 1156.7
2025-01-15T10:00:02Z 3421.75 3421.95 3421.55 3421.8 110.6 2025-01-15T10:02:00Z 3424.6 3427.15 3423.4 3425.9 1356.8 2025-01-15T12:00:00Z 3429.8 3436.7 3427.2 3434.5 13205.9 2025-01-17T10:00:00Z 3440.85 3455.3 3438.9 3450.75 142568.3 2025-01-15T10:00:02Z 97852.9 97855.2 97850.7 97854.6 7.9 2025-01-15T10:00:02Z 5925.9 5926.5 5925.4 5926.1 0 2025-01-15T10:00:02Z 191.48 191.72 191.28 191.61 1298.4
2025-01-15T10:00:03Z 3421.8 3422.05 3421.65 3421.9 87.3 2025-01-15T10:03:00Z 3425.9 3428.4 3424.2 3427.1 1423.5 2025-01-15T13:00:00Z 3434.5 3441.8 3432.1 3438.2 14087.6 2025-01-18T10:00:00Z 3450.75 3465.4 3448.6 3460.2 149825.7 2025-01-15T10:00:03Z 97854.6 97857.1 97852.3 97856.8 8.4 2025-01-15T10:00:03Z 5926.1 5926.7 5925.6 5926.3 0 2025-01-15T10:00:03Z 191.61 191.85 191.42 191.74 1187.9
2025-01-15T10:00:04Z 3421.9 3422.15 3421.75 3422.0 134.7 2025-01-15T10:04:00Z 3427.1 3429.6 3425.8 3428.3 1298.2 2025-01-15T14:00:00Z 3438.2 3445.6 3436.4 3442.1 12734.8 2025-01-19T10:00:00Z 3460.2 3475.8 3457.4 3470.6 156742.4 2025-01-15T10:00:04Z 97856.8 97859.4 97854.9 97858.2 9.2 2025-01-15T10:00:04Z 5926.3 5926.9 5925.8 5926.5 0 2025-01-15T10:00:04Z 191.74 191.98 191.55 191.87 1342.6
2025-01-15T10:00:05Z 3422.0 3422.25 3421.85 3422.1 156.8 2025-01-15T10:05:00Z 3428.3 3430.8 3426.9 3429.5 1467.9 2025-01-15T15:00:00Z 3442.1 3449.3 3440.7 3446.8 11823.4 2025-01-20T10:00:00Z 3470.6 3485.2 3467.9 3480.1 163456.8 2025-01-15T10:00:05Z 97858.2 97860.7 97856.4 97859.8 8.8 2025-01-15T10:00:05Z 5926.5 5927.1 5926.0 5926.7 0 2025-01-15T10:00:05Z 191.87 192.11 191.68 192.0 1278.5
" },
{ "role": "user", "content": "2025-01-15T10:00:06Z" }
],
"temperature": 0.7,
"max_tokens": -1,
"stream": false
}'

View File

@@ -9,3 +9,8 @@ datapoint O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp O H L C V
2025-01-15T10:00:03Z 3421.8 3422.05 3421.65 3421.9 87.3 2025-01-15T10:03:00Z 3425.9 3428.4 3424.2 3427.1 1423.5 2025-01-15T13:00:00Z 3434.5 3441.8 3432.1 3438.2 14087.6 2025-01-18T10:00:00Z 3450.75 3465.4 3448.6 3460.2 149825.7 2025-01-15T10:00:03Z 97854.6 97857.1 97852.3 97856.8 8.4 2025-01-15T10:00:03Z 5926.1 5926.7 5925.6 5926.3 0 2025-01-15T10:00:03Z 191.61 191.85 191.42 191.74 1187.9
2025-01-15T10:00:04Z 3421.9 3422.15 3421.75 3422.0 134.7 2025-01-15T10:04:00Z 3427.1 3429.6 3425.8 3428.3 1298.2 2025-01-15T14:00:00Z 3438.2 3445.6 3436.4 3442.1 12734.8 2025-01-19T10:00:00Z 3460.2 3475.8 3457.4 3470.6 156742.4 2025-01-15T10:00:04Z 97856.8 97859.4 97854.9 97858.2 9.2 2025-01-15T10:00:04Z 5926.3 5926.9 5925.8 5926.5 0 2025-01-15T10:00:04Z 191.74 191.98 191.55 191.87 1342.6
2025-01-15T10:00:05Z 3422.0 3422.25 3421.85 3422.1 156.8 2025-01-15T10:05:00Z 3428.3 3430.8 3426.9 3429.5 1467.9 2025-01-15T15:00:00Z 3442.1 3449.3 3440.7 3446.8 11823.4 2025-01-20T10:00:00Z 3470.6 3485.2 3467.9 3480.1 163456.8 2025-01-15T10:00:05Z 97858.2 97860.7 97856.4 97859.8 8.8 2025-01-15T10:00:05Z 5926.5 5927.1 5926.0 5926.7 0 2025-01-15T10:00:05Z 191.87 192.11 191.68 192.0 1278.5

View File

@@ -0,0 +1,4 @@
symbol MAIN SYMBOL (ETH) REF1 (BTC) REF2 (SPX) REF3 (SOL)
timeframe 1s 1m 1h 1d 1s 1s 1s
datapoint O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp O H L C V Timestamp
2025-08-26T21:29:44Z 0 0 0 0 0 2025-08-26T21:29:44Z 0 0 0 0 0 2025-08-26T21:29:44Z 0 0 0 0 0 2025-08-26T21:29:44Z 0 0 0 0 0 2025-08-26T21:29:44Z 0 0 0 0 0 2025-08-26T21:29:44Z 5500.00 5520.00 5495.00 5510.00 1000000.0 2025-08-26T21:29:44Z 0 0 0 0 0 2025-08-26T21:29:44Z

View File

@@ -302,12 +302,13 @@ class EnhancedRewardCalculator:
normalized_mse = min(mse / max_mse, 1.0)
mse_reward = np.exp(-5 * normalized_mse) # Exponential decay, range [exp(-5), 1]
# Direction accuracy bonus/penalty
# Direction accuracy bonus/penalty (stronger punishment for wrong direction)
direction_correct = (prediction.predicted_direction == actual_direction)
direction_bonus = 0.5 if direction_correct else -0.5
# Increase wrong-direction penalty; reduce correct-direction bonus slightly
direction_bonus = 0.25 if direction_correct else -1.0
# Confidence scaling
confidence_weight = prediction.confidence
# Confidence scaling (apply floor to avoid near-zero scaling)
confidence_weight = max(prediction.confidence, 0.2)
# Final reward calculation
base_reward = mse_reward + direction_bonus

View File

@@ -7042,7 +7042,9 @@ class TradingOrchestrator:
'main_symbol': self.symbol,
'ref1_symbol': self.ref_symbols[0] if self.ref_symbols else 'BTC/USDT',
'ref2_symbol': 'SPX', # Default to SPX for now
'export_dir': 'NN/training/samples/txt'
'ref3_symbol': 'SOL/USDT',
'export_dir': 'NN/training/samples/txt',
'export_format': 'PIPE'
}
self.text_export_manager.export_config.update(export_config)

View File

@@ -44,7 +44,9 @@ class TextDataExporter:
export_dir: str = "NN/training/samples/txt",
main_symbol: str = "ETH/USDT",
ref1_symbol: str = "BTC/USDT",
ref2_symbol: str = "SPX"):
ref2_symbol: str = "SPX",
ref3_symbol: str = "SOL/USDT",
export_format: str = "PIPE"):
"""
Initialize text data exporter
@@ -60,6 +62,8 @@ class TextDataExporter:
self.main_symbol = main_symbol
self.ref1_symbol = ref1_symbol
self.ref2_symbol = ref2_symbol
self.ref3_symbol = ref3_symbol
self.export_format = export_format.upper() if isinstance(export_format, str) else "PIPE"
# Timeframes to export
self.timeframes = ['1s', '1m', '1h', '1d']
@@ -77,7 +81,7 @@ class TextDataExporter:
os.makedirs(self.export_dir, exist_ok=True)
logger.info(f"Text Data Exporter initialized - Export dir: {self.export_dir}")
logger.info(f"Symbols: MAIN={main_symbol}, REF1={ref1_symbol}, REF2={ref2_symbol}")
logger.info(f"Symbols: MAIN={main_symbol}, REF1={ref1_symbol}, REF2={ref2_symbol}, REF3={ref3_symbol}")
def start(self):
"""Start the data export process"""
@@ -140,7 +144,8 @@ class TextDataExporter:
symbols = [
("MAIN", self.main_symbol),
("REF1", self.ref1_symbol),
("REF2", self.ref2_symbol)
("REF2", self.ref2_symbol),
("REF3", self.ref3_symbol)
]
for symbol_type, symbol in symbols:
@@ -168,11 +173,27 @@ class TextDataExporter:
def _get_latest_data(self, symbol: str, timeframe: str) -> Optional[MarketDataPoint]:
"""Get latest market data for symbol/timeframe"""
try:
if not hasattr(self.data_provider, 'get_latest_candle'):
return None
# Try to get latest candle data
candle = self.data_provider.get_latest_candle(symbol, timeframe)
candle = None
# Try direct method
if hasattr(self.data_provider, 'get_latest_candle'):
candle = self.data_provider.get_latest_candle(symbol, timeframe)
# Fallback to historical last row
if (not candle) and hasattr(self.data_provider, 'get_historical_data'):
try:
df = self.data_provider.get_historical_data(symbol, timeframe, limit=1)
if df is not None and not df.empty:
latest = df.iloc[-1]
ts = df.index[-1]
candle = {
'open': latest.get('open', 0),
'high': latest.get('high', 0),
'low': latest.get('low', 0),
'close': latest.get('close', 0),
'volume': latest.get('volume', 0),
'timestamp': ts.to_pydatetime() if hasattr(ts, 'to_pydatetime') else ts
}
except Exception as _ex:
logger.debug(f"hist fallback failed for {symbol} {timeframe}: {_ex}")
if not candle:
return None
@@ -205,8 +226,10 @@ class TextDataExporter:
grouped_data = self._group_data_by_symbol(export_data)
with open(filepath, 'w', encoding='utf-8') as txtfile:
# Write in the format specified in readme.md sample
self._write_tab_format(txtfile, grouped_data)
if self.export_format == 'TAB':
self._write_tab_format(txtfile, grouped_data)
else:
self._write_pipe_format(txtfile, grouped_data)
logger.debug(f"Exported {len(export_data)} data points to {filepath}")
@@ -280,56 +303,105 @@ class TextDataExporter:
rows.append(row)
return rows
def _write_tab_format(self, txtfile, grouped_data: Dict[str, Dict[str, Dict[str, Any]]]):
def _write_tab_format(self, txtfile, grouped_data: Dict[str, Dict[str, Dict[str, Any]]] ):
"""Write data in tab-separated format like readme.md sample"""
# Write header structure
txtfile.write("symbol\tMAIN SYMBOL (ETH)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tREF1 (BTC)\t\t\t\t\t\tREF2 (SPX)\t\t\t\t\t\tREF3 (SOL)\n")
txtfile.write("symbol\tMAIN SYMBOL (ETH)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tREF1 (BTC)\t\t\t\t\t\t\tREF2 (SPX)\t\t\t\t\t\t\tREF3 (SOL)\n")
txtfile.write("timeframe\t1s\t\t\t\t\t\t1m\t\t\t\t\t\t1h\t\t\t\t\t\t1d\t\t\t\t\t\t1s\t\t\t\t\t\t1s\t\t\t\t\t\t1s\n")
txtfile.write("datapoint\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\tO\tH\tL\tC\tV\tTimestamp\n")
# Write data row
row_parts = []
current_time = datetime.now()
# Build up to 300 rows using historical 1s data when available
# Collect timestamps from MAIN 1s historical data
timestamps: List[Any] = []
hist_df = None
try:
if hasattr(self.data_provider, 'get_historical_data'):
hist_df = self.data_provider.get_historical_data(self.main_symbol, '1s', limit=300)
if hist_df is not None and not hist_df.empty:
timestamps = list(hist_df.index[-300:])
except Exception as _e:
logger.debug(f"hist 1s not available: {_e}")
if not timestamps:
timestamps = [datetime.utcnow()]
# Timestamp first
row_parts.append(current_time.strftime("%Y-%m-%dT%H:%M:%SZ"))
# ETH data for all timeframes (1s, 1m, 1h, 1d)
# Fetch snapshots for non-1s
main_data = grouped_data.get('MAIN', {})
for timeframe in ['1s', '1m', '1h', '1d']:
data_point = main_data.get(timeframe)
if data_point:
main_1m = main_data.get('1m')
main_1h = main_data.get('1h')
main_1d = main_data.get('1d')
ref1_1s = grouped_data.get('REF1', {}).get('1s')
ref2_1s = grouped_data.get('REF2', {}).get('1s')
ref3_1s = grouped_data.get('REF3', {}).get('1s')
for ts in timestamps:
try:
ts_str = ts.strftime("%Y-%m-%dT%H:%M:%SZ") if hasattr(ts, 'strftime') else str(ts)
except Exception:
ts_str = ""
row_parts = [ts_str]
# MAIN 1s from hist row if possible
if hist_df is not None and ts in hist_df.index:
r = hist_df.loc[ts]
row_parts.extend([
f"{data_point['open']:.2f}",
f"{data_point['high']:.2f}",
f"{data_point['low']:.2f}",
f"{data_point['close']:.2f}",
f"{data_point['volume']:.1f}",
data_point['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
f"{float(r.get('open', 0) or 0):.2f}",
f"{float(r.get('high', 0) or 0):.2f}",
f"{float(r.get('low', 0) or 0):.2f}",
f"{float(r.get('close', 0) or 0):.2f}",
f"{float(r.get('volume', 0) or 0):.1f}",
ts_str
])
else:
row_parts.extend(["0", "0", "0", "0", "0", current_time.strftime("%Y-%m-%dT%H:%M:%SZ")])
snap = main_data.get('1s')
if snap:
row_parts.extend([
f"{snap['open']:.2f}", f"{snap['high']:.2f}", f"{snap['low']:.2f}", f"{snap['close']:.2f}", f"{snap['volume']:.1f}", ts_str
])
else:
row_parts.extend(["0", "0", "0", "0", "0", ts_str])
# REF1 (BTC), REF2 (SPX), REF3 (SOL) - 1s timeframe only
for ref_type in ['REF1', 'REF2']: # REF3 will be added by LLM proxy
ref_data = grouped_data.get(ref_type, {})
data_point = ref_data.get('1s')
if data_point:
row_parts.extend([
f"{data_point['open']:.2f}",
f"{data_point['high']:.2f}",
f"{data_point['low']:.2f}",
f"{data_point['close']:.2f}",
f"{data_point['volume']:.1f}",
data_point['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
])
else:
row_parts.extend(["0", "0", "0", "0", "0", current_time.strftime("%Y-%m-%dT%H:%M:%SZ")])
# MAIN 1m/1h/1d snapshots
for snap in [main_1m, main_1h, main_1d]:
if snap:
row_parts.extend([
f"{snap['open']:.2f}", f"{snap['high']:.2f}", f"{snap['low']:.2f}", f"{snap['close']:.2f}", f"{snap['volume']:.1f}", snap['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
])
else:
row_parts.extend(["0", "0", "0", "0", "0", ts_str])
# Add placeholder for REF3 (SOL) - will be filled by LLM proxy
row_parts.extend(["0", "0", "0", "0", "0", current_time.strftime("%Y-%m-%dT%H:%M:%SZ")])
# REF1/REF2/REF3 1s snapshots
for snap in [ref1_1s, ref2_1s, ref3_1s]:
if snap:
row_parts.extend([
f"{snap['open']:.2f}", f"{snap['high']:.2f}", f"{snap['low']:.2f}", f"{snap['close']:.2f}", f"{snap['volume']:.1f}", snap['timestamp'].strftime("%Y-%m-%dT%H:%M:%SZ")
])
else:
row_parts.extend(["0", "0", "0", "0", "0", ts_str])
txtfile.write("\t".join(row_parts) + "\n")
txtfile.write("\t".join(row_parts) + "\n")
def _write_pipe_format(self, txtfile, grouped_data: Dict[str, Dict[str, Dict[str, Any]]] ):
"""Write data in pipe-delimited console-friendly grid."""
from io import StringIO
buffer = StringIO()
self._write_tab_format(buffer, grouped_data)
content = buffer.getvalue().splitlines()
if not content:
return
headers = [line.split('\t') for line in content[:3]]
data_rows = [line.split('\t') for line in content[3:]]
def to_pipe(row: List[str]) -> str:
return "|" + "|".join(str(col) for col in row) + "|"
# Render header rows with separators
for hdr in headers:
txtfile.write(to_pipe(hdr) + "\n")
txtfile.write("|" + "|".join(["-" * max(3, len(str(c))) for c in hdr]) + "|\n")
# Render data
for row in data_rows:
txtfile.write(to_pipe(row) + "\n")
def get_current_filename(self) -> Optional[str]:
"""Get current export filename"""

View File

@@ -33,7 +33,9 @@ class TextExportManager:
'main_symbol': 'ETH/USDT',
'ref1_symbol': 'BTC/USDT',
'ref2_symbol': 'SPX', # Will need to be mapped to available data
'export_dir': 'NN/training/samples/txt'
'ref3_symbol': 'SOL/USDT',
'export_dir': 'NN/training/samples/txt',
'export_format': 'PIPE' # PIPE (default) or TAB
}
def initialize_exporter(self, config: Optional[Dict[str, Any]] = None):
@@ -54,7 +56,9 @@ class TextExportManager:
export_dir=self.export_config['export_dir'],
main_symbol=self.export_config['main_symbol'],
ref1_symbol=self.export_config['ref1_symbol'],
ref2_symbol=self.export_config['ref2_symbol']
ref2_symbol=self.export_config['ref2_symbol'],
ref3_symbol=self.export_config.get('ref3_symbol', 'SOL/USDT'),
export_format=self.export_config.get('export_format', 'PIPE')
)
logger.info("Text data exporter initialized successfully")
@@ -198,16 +202,8 @@ class EnhancedDataProviderWrapper:
def _get_spx_data(self) -> Optional[Dict[str, Any]]:
"""Get SPX data - placeholder for now"""
# For now, return mock SPX data
# In production, this would connect to a stock data provider
return {
'open': 5500.0,
'high': 5520.0,
'low': 5495.0,
'close': 5510.0,
'volume': 1000000,
'timestamp': datetime.now()
}
# No synthetic data allowed; return None if not available
return None
# Integration helper functions
def setup_text_export(data_provider=None, orchestrator=None, config: Optional[Dict[str, Any]] = None) -> TextExportManager:

View File

@@ -1921,6 +1921,86 @@ class CleanTradingDashboard:
logger.error(f"Error closing position manually: {e}")
return [html.I(className="fas fa-times me-1"), "CLOSE"]
# Text Export Controls
@self.app.callback(
Output('text-export-status', 'children'),
[Input('start-text-export-btn', 'n_clicks'),
Input('stop-text-export-btn', 'n_clicks')],
prevent_initial_call=True
)
def handle_text_export_controls(start_clicks, stop_clicks):
"""Handle text export start/stop buttons"""
ctx = dash.callback_context
if not ctx.triggered:
raise PreventUpdate
button_id = ctx.triggered[0]['prop_id'].split('.')[0]
try:
if button_id == 'start-text-export-btn' and start_clicks:
success = self.orchestrator.start_text_export()
if success:
logger.info("Text export started from dashboard")
return "Export: Running"
else:
logger.error("Failed to start text export")
return "Export: Failed to start"
elif button_id == 'stop-text-export-btn' and stop_clicks:
success = self.orchestrator.stop_text_export()
if success:
logger.info("Text export stopped from dashboard")
return "Export: Stopped"
else:
logger.error("Failed to stop text export")
return "Export: Failed to stop"
except Exception as e:
logger.error(f"Error in text export controls: {e}")
return f"Export: Error - {str(e)}"
raise PreventUpdate
# LLM Proxy Controls
@self.app.callback(
Output('llm-status', 'children'),
[Input('start-llm-btn', 'n_clicks'),
Input('stop-llm-btn', 'n_clicks')],
prevent_initial_call=True
)
def handle_llm_controls(start_clicks, stop_clicks):
"""Handle LLM proxy start/stop buttons"""
ctx = dash.callback_context
if not ctx.triggered:
raise PreventUpdate
button_id = ctx.triggered[0]['prop_id'].split('.')[0]
try:
if button_id == 'start-llm-btn' and start_clicks:
success = self.orchestrator.start_llm_proxy()
if success:
logger.info("LLM proxy started from dashboard")
return "LLM: Running"
else:
logger.error("Failed to start LLM proxy")
return "LLM: Failed to start"
elif button_id == 'stop-llm-btn' and stop_clicks:
success = self.orchestrator.stop_llm_proxy()
if success:
logger.info("LLM proxy stopped from dashboard")
return "LLM: Stopped"
else:
logger.error("Failed to stop LLM proxy")
return "LLM: Failed to stop"
except Exception as e:
logger.error(f"Error in LLM controls: {e}")
return f"LLM: Error - {str(e)}"
raise PreventUpdate
# Leverage slider callback
@self.app.callback(
Output('leverage-display', 'children'),
@@ -2899,7 +2979,7 @@ class CleanTradingDashboard:
hold_predictions = []
for pred in dqn_predictions[-30:]: # Last 30 DQN predictions
action = pred.get('action', 2) # 0=BUY, 1=SELL, 2=HOLD
action = pred.get('action', 2) # 0=SELL, 1=HOLD, 2=BUY
confidence = pred.get('confidence', 0)
timestamp = pred.get('timestamp', datetime.now())
price = pred.get('price', 0)
@@ -2916,9 +2996,10 @@ class CleanTradingDashboard:
'q_values': pred.get('q_values', [0, 0, 0])
}
if action == 0: # BUY
# Correct mapping: 2 -> BUY, 0 -> SELL, 1 -> HOLD
if action == 2: # BUY
buy_predictions.append(pred_data)
elif action == 1: # SELL
elif action == 0: # SELL
sell_predictions.append(pred_data)
else: # HOLD
hold_predictions.append(pred_data)
@@ -3329,10 +3410,6 @@ class CleanTradingDashboard:
try:
predictions = []
# Generate sample predictions if needed (for display purposes)
if hasattr(self.orchestrator, 'generate_sample_predictions_for_display'):
self.orchestrator.generate_sample_predictions_for_display(symbol)
# Get REAL predictions from orchestrator
if hasattr(self.orchestrator, 'recent_dqn_predictions'):
predictions.extend(list(self.orchestrator.recent_dqn_predictions.get(symbol, [])))

View File

@@ -320,6 +320,33 @@ class DashboardLayoutManager:
html.I(className="fas fa-arrows-rotate me-1"),
"Sync Positions/Orders"
], id="manual-sync-btn", className="btn btn-primary btn-sm w-100 mt-2"),
# Text Export Controls
html.Hr(className="my-2"),
html.Small("Text Export & LLM", className="text-muted d-block mb-1"),
html.Div([
html.Button([
html.I(className="fas fa-file-export me-1"),
"Start Text Export"
], id="start-text-export-btn", className="btn btn-success btn-sm me-1", style={"fontSize": "10px"}),
html.Button([
html.I(className="fas fa-stop me-1"),
"Stop"
], id="stop-text-export-btn", className="btn btn-danger btn-sm", style={"fontSize": "10px"})
], className="d-flex mb-2"),
html.Div([
html.Button([
html.I(className="fas fa-robot me-1"),
"Start LLM"
], id="start-llm-btn", className="btn btn-info btn-sm me-1", style={"fontSize": "10px"}),
html.Button([
html.I(className="fas fa-stop me-1"),
"Stop"
], id="stop-llm-btn", className="btn btn-warning btn-sm", style={"fontSize": "10px"})
], className="d-flex mb-2"),
html.Small(id="text-export-status", children="Export: Stopped", className="text-muted d-block"),
html.Small(id="llm-status", children="LLM: Stopped", className="text-muted d-block"),
html.Hr(className="my-2"),
html.Small("System Status", className="text-muted d-block mb-1"),
html.Div([