Skip to content

Commit 5c0372a

Browse files
authoredApr 17, 2024
Fix shuffle param for data_loader.py
1 parent 08a5b5a commit 5c0372a

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed
 

‎code/DeepDA/data_loader.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,9 @@ def load_data(data_folder, batch_size, train, num_workers=0, **kwargs):
2626

2727
def get_data_loader(dataset, batch_size, shuffle=True, drop_last=False, num_workers=0, infinite_data_loader=False, **kwargs):
2828
if not infinite_data_loader:
29-
return torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, drop_last=drop_last, num_workers=num_workers, **kwargs)
29+
return torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=shuffle, drop_last=drop_last, num_workers=num_workers, **kwargs)
3030
else:
31-
return InfiniteDataLoader(dataset, batch_size=batch_size, shuffle=True, drop_last=drop_last, num_workers=num_workers, **kwargs)
31+
return InfiniteDataLoader(dataset, batch_size=batch_size, shuffle=shuffle, drop_last=drop_last, num_workers=num_workers, **kwargs)
3232

3333
class _InfiniteSampler(torch.utils.data.Sampler):
3434
"""Wraps another Sampler to yield an infinite stream."""
@@ -66,4 +66,4 @@ def __iter__(self):
6666
yield next(self._infinite_iterator)
6767

6868
def __len__(self):
69-
return 0 # Always return 0
69+
return 0 # Always return 0

0 commit comments

Comments
 (0)
Please sign in to comment.