inrefence predictions fix
This commit is contained in:
@ -61,6 +61,13 @@ class InferenceLogger:
|
||||
# Get current memory usage
|
||||
memory_usage_mb = self._get_memory_usage()
|
||||
|
||||
# Convert input features to numpy array if needed
|
||||
features_array = None
|
||||
if isinstance(input_features, np.ndarray):
|
||||
features_array = input_features.astype(np.float32)
|
||||
elif isinstance(input_features, (list, tuple)):
|
||||
features_array = np.array(input_features, dtype=np.float32)
|
||||
|
||||
# Create inference record
|
||||
record = InferenceRecord(
|
||||
model_name=model_name,
|
||||
@ -72,6 +79,7 @@ class InferenceLogger:
|
||||
input_features_hash=feature_hash,
|
||||
processing_time_ms=processing_time_ms,
|
||||
memory_usage_mb=memory_usage_mb,
|
||||
input_features=features_array,
|
||||
checkpoint_id=checkpoint_id,
|
||||
metadata=metadata
|
||||
)
|
||||
|
Reference in New Issue
Block a user