Skip to content

Commit

Permalink
clean models
Browse files Browse the repository at this point in the history
  • Loading branch information
ljk628 committed Sep 18, 2018
1 parent c2087ce commit 510fa3a
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 26 deletions.
2 changes: 1 addition & 1 deletion cifar10/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def test(testloader, net, criterion, use_cuda=True):
one_hot_targets = one_hot_targets.float()
if use_cuda:
inputs, one_hot_targets = inputs.cuda(), one_hot_targets.cuda()
inputs, one_hot_targets = Variable(inputs, volatile=True), Variable(one_hot_targets)
inputs, one_hot_targets = Variable(inputs), Variable(one_hot_targets)
outputs = F.softmax(net(inputs))
loss = criterion(outputs, one_hot_targets)
test_loss += loss.item()*batch_size
Expand Down
43 changes: 19 additions & 24 deletions cifar10/models/resnet.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,16 @@
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable


def conv3x3(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)

class BasicBlock(nn.Module):
expansion = 1

def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(in_planes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)

self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
Expand All @@ -36,10 +31,10 @@ class BasicBlock_noshortcut(nn.Module):

def __init__(self, in_planes, planes, stride=1):
super(BasicBlock_noshortcut, self).__init__()
self.conv1 = conv3x3(in_planes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)

def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
Expand All @@ -54,11 +49,11 @@ class Bottleneck(nn.Module):
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)

self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
Expand All @@ -82,11 +77,11 @@ class Bottleneck_noshortcut(nn.Module):
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck_noshortcut, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)

def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
Expand All @@ -100,8 +95,8 @@ def __init__(self, block, num_blocks, num_classes=10):
super(ResNet, self).__init__()
self.in_planes = 64

self.conv1 = conv3x3(3,64)
self.bn1 = nn.BatchNorm2d(64)
self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
Expand Down Expand Up @@ -133,8 +128,8 @@ def __init__(self, block, num_blocks, num_classes=10):
super(ResNet_cifar, self).__init__()
self.in_planes = 16

self.conv1 = conv3x3(3,16)
self.bn1 = nn.BatchNorm2d(16)
self.conv1 = nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(16)
self.layer1 = self._make_layer(block, 16, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 32, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 64, num_blocks[2], stride=2)
Expand Down Expand Up @@ -164,7 +159,7 @@ def __init__(self, block, num_blocks, k, num_classes=10):
super(WResNet_cifar, self).__init__()
self.in_planes = 16*k

self.conv1 = conv3x3(3,16*k)
self.conv1 = nn.Conv2d(3, 16*k, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(16*k)
self.layer1 = self._make_layer(block, 16*k, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 32*k, num_blocks[1], stride=2)
Expand Down
1 change: 0 additions & 1 deletion cifar10/models/vgg.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import torch
import torch.nn as nn
from torch.autograd import Variable


cfg = {
Expand Down

0 comments on commit 510fa3a

Please sign in to comment.