inference works

This commit is contained in:
Dobromir Popov
2025-07-27 00:24:32 +03:00
parent 36a8e256a8
commit 13155197f8
6 changed files with 238 additions and 45 deletions

View File

@ -123,6 +123,29 @@ class DatabaseManager:
conn.execute("CREATE INDEX IF NOT EXISTS idx_checkpoint_active ON checkpoint_metadata(is_active)")
logger.info(f"Database initialized at {self.db_path}")
# Run migrations to handle schema updates
self._run_migrations()
def _run_migrations(self):
"""Run database migrations to handle schema updates"""
try:
with self._get_connection() as conn:
# Check if input_features_blob column exists
cursor = conn.execute("PRAGMA table_info(inference_records)")
columns = [row[1] for row in cursor.fetchall()]
if 'input_features_blob' not in columns:
logger.info("Adding input_features_blob column to inference_records table")
conn.execute("ALTER TABLE inference_records ADD COLUMN input_features_blob BLOB")
conn.commit()
logger.info("Successfully added input_features_blob column")
else:
logger.debug("input_features_blob column already exists")
except Exception as e:
logger.error(f"Error running database migrations: {e}")
# If migration fails, we can still continue without the blob column
@contextmanager
def _get_connection(self):
@ -145,31 +168,61 @@ class DatabaseManager:
"""Log an inference record"""
try:
with self._get_connection() as conn:
# Serialize input features if provided
# Check if input_features_blob column exists
cursor = conn.execute("PRAGMA table_info(inference_records)")
columns = [row[1] for row in cursor.fetchall()]
has_blob_column = 'input_features_blob' in columns
# Serialize input features if provided and column exists
input_features_blob = None
if record.input_features is not None:
if record.input_features is not None and has_blob_column:
input_features_blob = record.input_features.tobytes()
conn.execute("""
INSERT INTO inference_records (
model_name, timestamp, symbol, action, confidence,
probabilities, input_features_hash, input_features_blob,
processing_time_ms, memory_usage_mb, checkpoint_id, metadata
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
record.model_name,
record.timestamp.isoformat(),
record.symbol,
record.action,
record.confidence,
json.dumps(record.probabilities),
record.input_features_hash,
input_features_blob,
record.processing_time_ms,
record.memory_usage_mb,
record.checkpoint_id,
json.dumps(record.metadata) if record.metadata else None
))
if has_blob_column:
# Use full query with blob column
conn.execute("""
INSERT INTO inference_records (
model_name, timestamp, symbol, action, confidence,
probabilities, input_features_hash, input_features_blob,
processing_time_ms, memory_usage_mb, checkpoint_id, metadata
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
record.model_name,
record.timestamp.isoformat(),
record.symbol,
record.action,
record.confidence,
json.dumps(record.probabilities),
record.input_features_hash,
input_features_blob,
record.processing_time_ms,
record.memory_usage_mb,
record.checkpoint_id,
json.dumps(record.metadata) if record.metadata else None
))
else:
# Fallback query without blob column
logger.warning("input_features_blob column missing, storing without full features")
conn.execute("""
INSERT INTO inference_records (
model_name, timestamp, symbol, action, confidence,
probabilities, input_features_hash,
processing_time_ms, memory_usage_mb, checkpoint_id, metadata
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", (
record.model_name,
record.timestamp.isoformat(),
record.symbol,
record.action,
record.confidence,
json.dumps(record.probabilities),
record.input_features_hash,
record.processing_time_ms,
record.memory_usage_mb,
record.checkpoint_id,
json.dumps(record.metadata) if record.metadata else None
))
conn.commit()
return True
except Exception as e:
@ -343,7 +396,8 @@ class DatabaseManager:
for row in cursor.fetchall():
# Deserialize input features if available
input_features = None
if row['input_features_blob']:
# Check if the column exists in the row (handles missing column gracefully)
if 'input_features_blob' in row.keys() and row['input_features_blob']:
try:
# Reconstruct numpy array from bytes
input_features = np.frombuffer(row['input_features_blob'], dtype=np.float32)
@ -412,6 +466,15 @@ class DatabaseManager:
cutoff_time = datetime.now() - timedelta(hours=hours_back)
with self._get_connection() as conn:
# Check if input_features_blob column exists before querying
cursor = conn.execute("PRAGMA table_info(inference_records)")
columns = [row[1] for row in cursor.fetchall()]
has_blob_column = 'input_features_blob' in columns
if not has_blob_column:
logger.warning("input_features_blob column not found, returning empty list")
return []
if symbol:
cursor = conn.execute("""
SELECT * FROM inference_records
@ -493,4 +556,10 @@ def get_database_manager(db_path: str = "data/trading_system.db") -> DatabaseMan
if _db_manager_instance is None:
_db_manager_instance = DatabaseManager(db_path)
return _db_manager_instance
return _db_manager_instance
def reset_database_manager():
"""Reset the database manager instance to force re-initialization"""
global _db_manager_instance
_db_manager_instance = None
logger.info("Database manager instance reset - will re-initialize on next access")