Browse Source

train.py: shuffling at every epoch

master
rafaelvalle 5 years ago
parent
commit
f37998c59d
1 changed files with 1 additions and 1 deletions
  1. +1
    -1
      train.py

+ 1
- 1
train.py View File

@ -59,7 +59,7 @@ def prepare_dataloaders(hparams):
train_sampler = DistributedSampler(trainset) \ train_sampler = DistributedSampler(trainset) \
if hparams.distributed_run else None if hparams.distributed_run else None
train_loader = DataLoader(trainset, num_workers=1, shuffle=False,
train_loader = DataLoader(trainset, num_workers=1, shuffle=True,
sampler=train_sampler, sampler=train_sampler,
batch_size=hparams.batch_size, pin_memory=False, batch_size=hparams.batch_size, pin_memory=False,
drop_last=True, collate_fn=collate_fn) drop_last=True, collate_fn=collate_fn)

Loading…
Cancel
Save