better testcase managment, script fix
This commit is contained in:
@ -36,7 +36,8 @@ class TradeDataManager:
|
||||
def _setup_directory_structure(self):
|
||||
"""Setup the testcases directory structure"""
|
||||
try:
|
||||
for case_type in ['positive', 'negative']:
|
||||
# Create base directories including new 'base' directory for temporary trades
|
||||
for case_type in ['positive', 'negative', 'base']:
|
||||
for subdir in ['cases', 'sessions', 'models']:
|
||||
dir_path = os.path.join(self.base_dir, case_type, subdir)
|
||||
os.makedirs(dir_path, exist_ok=True)
|
||||
@ -543,4 +544,139 @@ class TradeDataManager:
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.debug(f"Error getting price history: {e}")
|
||||
return []
|
||||
return []
|
||||
|
||||
def store_base_trade_for_later_classification(self, trade_record: Dict[str, Any]) -> Optional[str]:
|
||||
"""Store opening trade as BASE case until position is closed and P&L is known"""
|
||||
try:
|
||||
# Store in base directory (temporary)
|
||||
case_dir = os.path.join(self.base_dir, "base")
|
||||
cases_dir = os.path.join(case_dir, "cases")
|
||||
|
||||
# Create unique case ID for base case
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
symbol_clean = trade_record['symbol'].replace('/', '')
|
||||
base_case_id = f"base_{timestamp}_{symbol_clean}_{trade_record['side']}"
|
||||
|
||||
# Store comprehensive case data as pickle
|
||||
case_filepath = os.path.join(cases_dir, f"{base_case_id}.pkl")
|
||||
with open(case_filepath, 'wb') as f:
|
||||
pickle.dump(trade_record, f)
|
||||
|
||||
# Store JSON summary
|
||||
json_filepath = os.path.join(cases_dir, f"{base_case_id}.json")
|
||||
json_summary = {
|
||||
'case_id': base_case_id,
|
||||
'timestamp': trade_record.get('timestamp_entry', datetime.now()).isoformat() if hasattr(trade_record.get('timestamp_entry'), 'isoformat') else str(trade_record.get('timestamp_entry')),
|
||||
'symbol': trade_record['symbol'],
|
||||
'side': trade_record['side'],
|
||||
'entry_price': trade_record['entry_price'],
|
||||
'leverage': trade_record.get('leverage', 1),
|
||||
'quantity': trade_record.get('quantity', 0),
|
||||
'trade_status': 'OPENING',
|
||||
'confidence': trade_record.get('confidence', 0),
|
||||
'trade_type': trade_record.get('trade_type', 'manual'),
|
||||
'training_ready': False, # Not ready until closed
|
||||
'feature_counts': {
|
||||
'market_state': len(trade_record.get('model_inputs_at_entry', {})),
|
||||
'cob_features': len(trade_record.get('cob_snapshot_at_entry', {}))
|
||||
}
|
||||
}
|
||||
|
||||
with open(json_filepath, 'w') as f:
|
||||
json.dump(json_summary, f, indent=2, default=str)
|
||||
|
||||
logger.info(f"Stored base case for later classification: {base_case_id}")
|
||||
return base_case_id
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error storing base trade: {e}")
|
||||
return None
|
||||
|
||||
def move_base_trade_to_outcome(self, base_case_id: str, closing_trade_record: Dict[str, Any], is_positive: bool) -> Optional[str]:
|
||||
"""Move base case to positive/negative based on trade outcome"""
|
||||
try:
|
||||
# Load the original base case
|
||||
base_case_path = os.path.join(self.base_dir, "base", "cases", f"{base_case_id}.pkl")
|
||||
base_json_path = os.path.join(self.base_dir, "base", "cases", f"{base_case_id}.json")
|
||||
|
||||
if not os.path.exists(base_case_path):
|
||||
logger.warning(f"Base case not found: {base_case_id}")
|
||||
return None
|
||||
|
||||
# Load opening trade data
|
||||
with open(base_case_path, 'rb') as f:
|
||||
opening_trade_data = pickle.load(f)
|
||||
|
||||
# Combine opening and closing data
|
||||
combined_trade_record = {
|
||||
**opening_trade_data, # Opening snapshot
|
||||
**closing_trade_record, # Closing snapshot
|
||||
'opening_data': opening_trade_data,
|
||||
'closing_data': closing_trade_record,
|
||||
'trade_complete': True
|
||||
}
|
||||
|
||||
# Determine target directory
|
||||
case_type = "positive" if is_positive else "negative"
|
||||
case_dir = os.path.join(self.base_dir, case_type)
|
||||
cases_dir = os.path.join(case_dir, "cases")
|
||||
|
||||
# Create new case ID for final outcome
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
symbol_clean = closing_trade_record['symbol'].replace('/', '')
|
||||
pnl_leveraged = closing_trade_record.get('pnl_leveraged', 0)
|
||||
final_case_id = f"{case_type}_{timestamp}_{symbol_clean}_pnl_{pnl_leveraged:.4f}".replace('.', 'p').replace('-', 'neg')
|
||||
|
||||
# Store final case data
|
||||
final_case_filepath = os.path.join(cases_dir, f"{final_case_id}.pkl")
|
||||
with open(final_case_filepath, 'wb') as f:
|
||||
pickle.dump(combined_trade_record, f)
|
||||
|
||||
# Store JSON summary
|
||||
final_json_filepath = os.path.join(cases_dir, f"{final_case_id}.json")
|
||||
json_summary = {
|
||||
'case_id': final_case_id,
|
||||
'original_base_case_id': base_case_id,
|
||||
'timestamp_opened': str(opening_trade_data.get('timestamp_entry', '')),
|
||||
'timestamp_closed': str(closing_trade_record.get('timestamp_exit', '')),
|
||||
'symbol': closing_trade_record['symbol'],
|
||||
'side_opened': opening_trade_data['side'],
|
||||
'side_closed': closing_trade_record['side'],
|
||||
'entry_price': opening_trade_data['entry_price'],
|
||||
'exit_price': closing_trade_record['exit_price'],
|
||||
'leverage': closing_trade_record.get('leverage', 1),
|
||||
'quantity': closing_trade_record.get('quantity', 0),
|
||||
'pnl_raw': closing_trade_record.get('pnl_raw', 0),
|
||||
'pnl_leveraged': pnl_leveraged,
|
||||
'trade_type': closing_trade_record.get('trade_type', 'manual'),
|
||||
'training_ready': True,
|
||||
'complete_trade_pair': True,
|
||||
'feature_counts': {
|
||||
'opening_market_state': len(opening_trade_data.get('model_inputs_at_entry', {})),
|
||||
'opening_cob_features': len(opening_trade_data.get('cob_snapshot_at_entry', {})),
|
||||
'closing_market_state': len(closing_trade_record.get('model_inputs_at_exit', {})),
|
||||
'closing_cob_features': len(closing_trade_record.get('cob_snapshot_at_exit', {}))
|
||||
}
|
||||
}
|
||||
|
||||
with open(final_json_filepath, 'w') as f:
|
||||
json.dump(json_summary, f, indent=2, default=str)
|
||||
|
||||
# Update case index
|
||||
self._update_case_index(case_dir, final_case_id, json_summary, case_type)
|
||||
|
||||
# Clean up base case files
|
||||
try:
|
||||
os.remove(base_case_path)
|
||||
os.remove(base_json_path)
|
||||
logger.debug(f"Cleaned up base case files: {base_case_id}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Error cleaning up base case files: {e}")
|
||||
|
||||
logger.info(f"Moved base case to {case_type}: {final_case_id}")
|
||||
return final_case_id
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error moving base trade to outcome: {e}")
|
||||
return None
|
Reference in New Issue
Block a user