tpu support fix
This commit is contained in:
@@ -193,7 +193,6 @@ class BrainToTextDecoder_Trainer:
|
||||
if use_tpu:
|
||||
# For TPU, create a custom DataLoader that properly handles our batch-returning Dataset
|
||||
# TPU requires specific DataLoader configuration to avoid batch_sampler issues
|
||||
from torch.utils.data import DataLoader
|
||||
self.train_loader = DataLoader(
|
||||
self.train_dataset,
|
||||
batch_size = None, # None because our Dataset returns batches
|
||||
|
Reference in New Issue
Block a user