Skip to content

Commit

Permalink
fix wrong norm in nest
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Aug 5, 2021
1 parent 22da26f commit e532424
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 3 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'vit-pytorch',
packages = find_packages(exclude=['examples']),
version = '0.20.1',
version = '0.20.2',
license='MIT',
description = 'Vision Transformer (ViT) - Pytorch',
author = 'Phil Wang',
Expand Down
14 changes: 12 additions & 2 deletions vit_pytorch/nest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,20 @@
def cast_tuple(val, depth):
return val if isinstance(val, tuple) else ((val,) * depth)

LayerNorm = partial(nn.InstanceNorm2d, affine = True)

# classes

class LayerNorm(nn.Module):
def __init__(self, dim, eps = 1e-5):
super().__init__()
self.eps = eps
self.g = nn.Parameter(torch.ones(1, dim, 1, 1))
self.b = nn.Parameter(torch.zeros(1, dim, 1, 1))

def forward(self, x):
std = torch.var(x, dim = 1, unbiased = False, keepdim = True).sqrt()
mean = torch.mean(x, dim = 1, keepdim = True)
return (x - mean) / (std + self.eps) * self.g + self.b

class PreNorm(nn.Module):
def __init__(self, dim, fn):
super().__init__()
Expand Down

0 comments on commit e532424

Please sign in to comment.