Skip to content

Commit

Permalink
Update model.py
Browse files Browse the repository at this point in the history
  • Loading branch information
shiming-chen authored Jul 30, 2021
1 parent ed1fb6a commit 5be3656
Showing 1 changed file with 0 additions and 29 deletions.
29 changes: 0 additions & 29 deletions model.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,32 +150,3 @@ def getLayersOutDet(self):

def reparameter(mu,sigma):
return (torch.randn_like(mu) *sigma) + mu

class AttDec(nn.Module):
def __init__(self, opt, attSize):
super(AttDec, self).__init__()
self.embedSz = 0
self.fc1 = nn.Linear(opt.resSize + self.embedSz, opt.ngh)
self.fc3 = nn.Linear(opt.ngh, attSize)
self.lrelu = nn.LeakyReLU(0.2, True)
self.hidden = None
self.sigmoid = None
self.apply(weights_init)

def forward(self, feat, att=None):
h = feat
if self.embedSz > 0:
assert att is not None, 'Conditional Decoder requires attribute input'
h = torch.cat((feat,att),1)
self.hidden = self.lrelu(self.fc1(h))
h = self.fc3(self.hidden)
if self.sigmoid is not None:
h = self.sigmoid(h)
else:
h = h/h.pow(2).sum(1).sqrt().unsqueeze(1).expand(h.size(0),h.size(1))
self.out = h
return h

def getLayersOutDet(self):
#used at synthesis time and feature transformation
return self.hidden.detach()

0 comments on commit 5be3656

Please sign in to comment.