reduce logging. actual training

This commit is contained in:
Dobromir Popov
2025-10-31 03:52:41 +02:00
parent 6ac324289c
commit 1bf41e06a8
9 changed files with 1700 additions and 74 deletions

View File

@@ -615,7 +615,7 @@ class RealTrainingAdapter:
# Show breakdown of before/after
before_count = sum(1 for s in negative_samples if 'before' in str(s.get('timestamp', '')))
after_count = len(negative_samples) - before_count
logger.info(f" -> {before_count} beforesignal, {after_count} after signal")
logger.info(f" -> {before_count} before signal, {after_count} after signal")
except Exception as e:
logger.error(f" Error preparing test case {i+1}: {e}")
@@ -1413,12 +1413,17 @@ class RealTrainingAdapter:
result = trainer.train_step(batch)
if result is not None:
epoch_loss += result.get('total_loss', 0.0)
epoch_accuracy += result.get('accuracy', 0.0)
batch_loss = result.get('total_loss', 0.0)
batch_accuracy = result.get('accuracy', 0.0)
epoch_loss += batch_loss
epoch_accuracy += batch_accuracy
num_batches += 1
if (i + 1) % 100 == 0:
logger.info(f" Batch {i + 1}/{len(converted_batches)}, Loss: {result.get('total_loss', 0.0):.6f}, Accuracy: {result.get('accuracy', 0.0):.2%}")
# Log first batch and every 100th batch for debugging
if (i + 1) == 1 or (i + 1) % 100 == 0:
logger.info(f" Batch {i + 1}/{len(converted_batches)}, Loss: {batch_loss:.6f}, Accuracy: {batch_accuracy:.4f}")
else:
logger.warning(f" Batch {i + 1} returned None result - skipping")
except Exception as e:
logger.error(f" Error in batch {i + 1}: {e}")