Skip to content

Commit

Permalink
mnist_hogwild manual breaking of gradient sharing removed (pytorch#138)
Browse files Browse the repository at this point in the history
  • Loading branch information
pfrendl authored and apaszke committed May 19, 2017
1 parent 89facbe commit 5c41070
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 5 deletions.
2 changes: 1 addition & 1 deletion mnist_hogwild/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def forward(self, x):
torch.manual_seed(args.seed)

model = Net()
model.share_memory()
model.share_memory() # gradients are allocated lazily, so they are not shared here

processes = []
for rank in range(args.num_processes):
Expand Down
4 changes: 0 additions & 4 deletions mnist_hogwild/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,6 @@

def train(rank, args, model):
torch.manual_seed(args.seed + rank)
for param in model.parameters():
# Break gradient sharing
if param.grad is not None:
param.grad.data = param.grad.data.clone()

train_loader = torch.utils.data.DataLoader(
datasets.MNIST('../data', train=True, download=True,
Expand Down

0 comments on commit 5c41070

Please sign in to comment.