checkpoint info in dropdowns

This commit is contained in:
Dobromir Popov
2025-11-11 11:20:23 +02:00
parent 27039c70a3
commit 6c1ca8baf4
3 changed files with 121 additions and 9 deletions

View File

@@ -785,15 +785,27 @@ class TradingOrchestrator:
checkpoint = torch.load(checkpoint_path, map_location=self.device)
self.primary_transformer.load_state_dict(checkpoint.get('model_state_dict', checkpoint))
# Update checkpoint info
# Extract checkpoint metrics for display
epoch = checkpoint.get('epoch', 0)
loss = checkpoint.get('loss', 0.0)
accuracy = checkpoint.get('accuracy', 0.0)
learning_rate = checkpoint.get('learning_rate', 0.0)
# Update checkpoint info with detailed metrics
self.transformer_checkpoint_info = {
'path': checkpoint_path,
'filename': os.path.basename(checkpoint_path),
'metadata': checkpoint_metadata,
'loaded_at': datetime.now().isoformat()
'loaded_at': datetime.now().isoformat(),
'epoch': epoch,
'loss': loss,
'accuracy': accuracy,
'learning_rate': learning_rate,
'status': 'loaded'
}
logger.info(f"Transformer checkpoint loaded from: {checkpoint_path}")
logger.info(f"Checkpoint metrics: {checkpoint_metadata.get('performance_metrics', {})}")
logger.info(f"✅ Loaded transformer checkpoint: {os.path.basename(checkpoint_path)}")
logger.info(f" Epoch: {epoch}, Loss: {loss:.6f}, Accuracy: {accuracy:.2%}, LR: {learning_rate:.6f}")
checkpoint_loaded = True
else:
logger.info("No transformer checkpoint found - using fresh model")