Skip to content

Commit

Permalink
Handle when no batch sampler (coqui-ai#1882)
Browse files Browse the repository at this point in the history
  • Loading branch information
erogol authored Aug 18, 2022
1 parent 7442bce commit fcb0bb5
Showing 1 changed file with 18 additions and 8 deletions.
26 changes: 18 additions & 8 deletions TTS/tts/models/vits.py
Original file line number Diff line number Diff line change
Expand Up @@ -1613,14 +1613,24 @@ def get_data_loader(

# get samplers
sampler = self.get_sampler(config, dataset, num_gpus)

loader = DataLoader(
dataset,
batch_sampler=sampler,
collate_fn=dataset.collate_fn,
num_workers=config.num_eval_loader_workers if is_eval else config.num_loader_workers,
pin_memory=False,
)
if sampler is None:
loader = DataLoader(
dataset,
batch_size=config.eval_batch_size if is_eval else config.batch_size,
shuffle=False, # shuffle is done in the dataset.
collate_fn=dataset.collate_fn,
drop_last=False, # setting this False might cause issues in AMP training.
num_workers=config.num_eval_loader_workers if is_eval else config.num_loader_workers,
pin_memory=False,
)
else:
loader = DataLoader(
dataset,
batch_sampler=sampler,
collate_fn=dataset.collate_fn,
num_workers=config.num_eval_loader_workers if is_eval else config.num_loader_workers,
pin_memory=False,
)
return loader

def get_optimizer(self) -> List:
Expand Down

0 comments on commit fcb0bb5

Please sign in to comment.