diff --git a/model_training_nnn/rnn_trainer.py b/model_training_nnn/rnn_trainer.py index a862607..fbafc29 100644 --- a/model_training_nnn/rnn_trainer.py +++ b/model_training_nnn/rnn_trainer.py @@ -46,10 +46,9 @@ class BrainToTextDecoder_Trainer: gradient_accumulation_steps=args.get('gradient_accumulation_steps', 1), log_with=None, # We'll use our own logging project_dir=args.get('output_dir', './output'), + even_batches=False, # Required for batch_size=None DataLoaders ) - # Set even_batches to False after initialization - required for batch_size=None DataLoaders - # Note: This may not be settable in all Accelerate versions, but we handle it in DataLoader config # Trainer fields self.args = args