Skip to content

Commit

Permalink
reshape norm output to 0-D tensor manually
Browse files Browse the repository at this point in the history
  • Loading branch information
HydrogenSulfate committed May 9, 2023
1 parent aa85dec commit d91ea1b
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions paddlescience/network/grad_norm.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,8 @@ def get_grad_norm_loss(self, losses):
norms = []
for i in range(losses.shape[0]):
grad = paddle.autograd.grad(losses[i], W, retain_graph=True)
norms.append(paddle.norm(self.loss_weights[i] * grad[0], p=2))
norms = paddle.concat(norms)
norms.append(paddle.norm(self.loss_weights[i] * grad[0], p=2).reshape([]))
norms = paddle.stack(norms)

# calculate the inverse train rate
loss_ratio = losses.numpy() / self.initial_losses
Expand Down

0 comments on commit d91ea1b

Please sign in to comment.