From 5be365699e752a5f7ae9af7cd163fa83660ae812 Mon Sep 17 00:00:00 2001 From: Shiming Chen Date: Fri, 30 Jul 2021 15:58:47 +0800 Subject: [PATCH] Update model.py --- model.py | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/model.py b/model.py index 26f8978..8be701d 100644 --- a/model.py +++ b/model.py @@ -150,32 +150,3 @@ def getLayersOutDet(self): def reparameter(mu,sigma): return (torch.randn_like(mu) *sigma) + mu - -class AttDec(nn.Module): - def __init__(self, opt, attSize): - super(AttDec, self).__init__() - self.embedSz = 0 - self.fc1 = nn.Linear(opt.resSize + self.embedSz, opt.ngh) - self.fc3 = nn.Linear(opt.ngh, attSize) - self.lrelu = nn.LeakyReLU(0.2, True) - self.hidden = None - self.sigmoid = None - self.apply(weights_init) - - def forward(self, feat, att=None): - h = feat - if self.embedSz > 0: - assert att is not None, 'Conditional Decoder requires attribute input' - h = torch.cat((feat,att),1) - self.hidden = self.lrelu(self.fc1(h)) - h = self.fc3(self.hidden) - if self.sigmoid is not None: - h = self.sigmoid(h) - else: - h = h/h.pow(2).sum(1).sqrt().unsqueeze(1).expand(h.size(0),h.size(1)) - self.out = h - return h - - def getLayersOutDet(self): - #used at synthesis time and feature transformation - return self.hidden.detach() \ No newline at end of file