inrefence predictions fix

This commit is contained in:
Dobromir Popov
2025-07-26 23:34:36 +03:00
parent 7c61c12b70
commit 3eb6335169
9 changed files with 1125 additions and 305 deletions

View File

@ -61,6 +61,13 @@ class InferenceLogger:
# Get current memory usage
memory_usage_mb = self._get_memory_usage()
# Convert input features to numpy array if needed
features_array = None
if isinstance(input_features, np.ndarray):
features_array = input_features.astype(np.float32)
elif isinstance(input_features, (list, tuple)):
features_array = np.array(input_features, dtype=np.float32)
# Create inference record
record = InferenceRecord(
model_name=model_name,
@ -72,6 +79,7 @@ class InferenceLogger:
input_features_hash=feature_hash,
processing_time_ms=processing_time_ms,
memory_usage_mb=memory_usage_mb,
input_features=features_array,
checkpoint_id=checkpoint_id,
metadata=metadata
)