Skip to content

Commit

Permalink
Ignore .pth files
Browse files Browse the repository at this point in the history
  • Loading branch information
RickyHFR committed Nov 1, 2024
1 parent 8ffab58 commit 8b73f7d
Show file tree
Hide file tree
Showing 6 changed files with 41 additions and 0 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
*.pth
40 changes: 40 additions & 0 deletions Generator_Discriminator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import torch.nn as nn

class Generator(nn.Module):
def __init__(self, noise_dim, img_channels):
super(Generator, self).__init__()
self.model = nn.Sequential(
nn.Linear(noise_dim, 256),
nn.ReLU(True),
nn.BatchNorm1d(256),
nn.Linear(256, 512),
nn.ReLU(True),
nn.BatchNorm1d(512),
nn.Linear(512, 1024),
nn.ReLU(True),
nn.BatchNorm1d(1024),
nn.Linear(1024, img_channels * 80 * 80),
nn.Tanh()
)

def forward(self, x):
x = self.model(x)
return x.view(-1, 3, 80, 80)

class Discriminator(nn.Module):
def __init__(self, img_channels):
super(Discriminator, self).__init__()
self.model = nn.Sequential(
nn.Conv2d(img_channels, 80, kernel_size=5, stride=2, padding=2),
nn.LeakyReLU(0.2),
nn.Dropout(0.3),
nn.Conv2d(80, 128, kernel_size=5, stride=2, padding=2),
nn.LeakyReLU(0.2),
nn.Dropout(0.3),
nn.Flatten(),
nn.Linear(128 * 20 * 20, 1),
nn.Sigmoid()
)

def forward(self, x):
return self.model(x)
Empty file removed model/discriminator.pth
Empty file.
Empty file removed model/generator.pth
Empty file.
Empty file removed model/optimizer_d.pth
Empty file.
Empty file removed model/optimizer_g.pth
Empty file.

0 comments on commit 8b73f7d

Please sign in to comment.