dedulicae model storage
This commit is contained in:
@@ -346,11 +346,58 @@ class TradingOrchestrator:
|
||||
logger.warning("Extrema trainer not available")
|
||||
self.extrema_trainer = None
|
||||
|
||||
# COB RL Model REMOVED - See COB_MODEL_ARCHITECTURE_DOCUMENTATION.md
|
||||
# Reason: Need quality COB data first before evaluating massive parameter benefit
|
||||
# Will recreate improved version when COB data pipeline is fixed
|
||||
logger.info("COB RL model removed - focusing on COB data quality first")
|
||||
self.cob_rl_agent = None
|
||||
# Initialize COB RL Model - UNIFIED with ModelManager
|
||||
try:
|
||||
from NN.models.cob_rl_model import COBRLModelInterface
|
||||
|
||||
# Initialize COB RL model using unified approach
|
||||
self.cob_rl_agent = COBRLModelInterface(
|
||||
model_checkpoint_dir="@checkpoints/cob_rl",
|
||||
device='cuda' if torch.cuda.is_available() else 'cpu'
|
||||
)
|
||||
|
||||
# Add COB RL to model states tracking
|
||||
self.model_states['cob_rl'] = {
|
||||
'initial_loss': None,
|
||||
'current_loss': None,
|
||||
'best_loss': None,
|
||||
'checkpoint_loaded': False
|
||||
}
|
||||
|
||||
# Load best checkpoint using unified ModelManager
|
||||
checkpoint_loaded = False
|
||||
try:
|
||||
from NN.training.model_manager import load_best_checkpoint
|
||||
result = load_best_checkpoint("cob_rl_agent")
|
||||
if result:
|
||||
file_path, metadata = result
|
||||
self.model_states['cob_rl']['initial_loss'] = metadata.loss
|
||||
self.model_states['cob_rl']['current_loss'] = metadata.loss
|
||||
self.model_states['cob_rl']['best_loss'] = metadata.loss
|
||||
self.model_states['cob_rl']['checkpoint_loaded'] = True
|
||||
self.model_states['cob_rl']['checkpoint_filename'] = metadata.checkpoint_id
|
||||
checkpoint_loaded = True
|
||||
loss_str = f"{metadata.loss:.4f}" if metadata.loss is not None else "N/A"
|
||||
logger.info(f"COB RL checkpoint loaded: {metadata.checkpoint_id} (loss={loss_str})")
|
||||
except Exception as e:
|
||||
logger.warning(f"Error loading COB RL checkpoint: {e}")
|
||||
|
||||
if not checkpoint_loaded:
|
||||
# New model - no synthetic data, start fresh
|
||||
self.model_states['cob_rl']['initial_loss'] = None
|
||||
self.model_states['cob_rl']['current_loss'] = None
|
||||
self.model_states['cob_rl']['best_loss'] = None
|
||||
self.model_states['cob_rl']['checkpoint_filename'] = 'none (fresh start)'
|
||||
logger.info("COB RL starting fresh - no checkpoint found")
|
||||
|
||||
logger.info("COB RL Agent initialized and integrated with unified ModelManager")
|
||||
logger.info(" - Uses @checkpoints/ directory structure")
|
||||
logger.info(" - Follows same load/save/checkpoint flow as other models")
|
||||
logger.info(" - Integrated with enhanced real-time training system")
|
||||
|
||||
except ImportError as e:
|
||||
logger.warning(f"COB RL Model not available: {e}")
|
||||
self.cob_rl_agent = None
|
||||
|
||||
# Initialize TRANSFORMER Model
|
||||
try:
|
||||
|
||||
Reference in New Issue
Block a user