Skip to content

Commit

Permalink
Add new directory "models"
Browse files Browse the repository at this point in the history
  • Loading branch information
liliangqi committed Oct 25, 2018
1 parent 84e601a commit 8363b7c
Show file tree
Hide file tree
Showing 5 changed files with 92 additions and 110 deletions.
10 changes: 5 additions & 5 deletions densenet.py → models/densenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
#
# Author: Liangqi Li
# Creating Date: Apr 28, 2018
# Latest rectifying: Apr 29, 2018
# Latest rectifying: Oct 25, 2018
# -----------------------------------------------------
import torch.nn as nn
from torchvision import models
import yaml


class densenet:
class DenseNet:

def __init__(self, num_layers=121, pre_model=None, training=True):
self.training = training
Expand Down Expand Up @@ -70,8 +70,8 @@ def initialize(self, fixed_blocks):
def set_bn_fix(m):
class_name = m.__class__.__name__
if class_name.find('BatchNorm') != -1:
for p in m.parameters():
p.requires_grad = False
for param in m.parameters():
param.requires_grad = False

self.model.apply(set_bn_fix)

Expand Down Expand Up @@ -106,4 +106,4 @@ def set_bn_eval(m):
if class_name.find('BatchNorm') != -1:
m.eval()

self.model.apply(set_bn_eval)
self.model.apply(set_bn_eval)
47 changes: 21 additions & 26 deletions model.py → models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,18 @@
#
# Author: Liangqi Li and Xinlei Chen
# Creating Date: Apr 1, 2018
# Latest rectified: Aug 7, 2018
# Latest rectified: Oct 25, 2018
# -----------------------------------------------------
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.functional as func
import yaml

from vgg16 import Vgg16
from resnet import resnet
from densenet import densenet
from strpn import STRPN
from losses import oim_loss, smooth_l1_loss
from .vgg16 import Vgg16
from .resnet import MyResNet
from .densenet import DenseNet
from .strpn import STRPN
from utils.losses import oim_loss, smooth_l1_loss


class SIPN(nn.Module):
Expand Down Expand Up @@ -43,13 +43,13 @@ def __init__(self, net_name, dataset_name, pre_model=None, is_train=True):
if self.net_name == 'vgg16':
self.net = Vgg16(pre_model, self.is_train)
elif self.net_name == 'res34':
self.net = resnet(34, pre_model, self.is_train)
self.net = MyResNet(34, pre_model, self.is_train)
elif self.net_name == 'res50':
self.net = resnet(50, pre_model, self.is_train)
self.net = MyResNet(50, pre_model, self.is_train)
elif self.net_name == 'dense121':
self.net = densenet(121, pre_model, self.is_train)
self.net = DenseNet(121, pre_model, self.is_train)
elif self.net_name == 'dense161':
self.net = densenet(161, pre_model, self.is_train)
self.net = DenseNet(161, pre_model, self.is_train)
else:
raise KeyError(self.net_name)

Expand Down Expand Up @@ -80,16 +80,13 @@ def forward(self, im_data, gt_boxes, im_info, mode='gallery'):
cls_score = self.cls_score_net(fc7)
bbox_pred = self.bbox_pred_net(fc7)

reid_fc7 = self.tail(trans_feat).mean(3).mean(2)
reid_feat = F.normalize(self.reid_feat_net(reid_fc7))
# reid_feat = F.normalize(self.reid_feat_net(fc7))
# reid_fc7 = self.tail(trans_feat).mean(3).mean(2)
# reid_feat = F.normalize(self.reid_feat_net(reid_fc7))
reid_feat = func.normalize(self.reid_feat_net(fc7))

cls_pred = torch.max(cls_score, 1)[1]
cls_prob = F.softmax(cls_score, 1)
det_label, pid_label = label

det_label = det_label.view(-1)
cls_loss = F.cross_entropy(cls_score.view(-1, 2), det_label)
cls_loss = func.cross_entropy(cls_score.view(-1, 2), det_label)
bbox_loss = smooth_l1_loss(bbox_pred, bbox_info)
reid_loss = oim_loss(reid_feat, pid_label, self.lut, self.queue,
gt_boxes.size(0), self.lut_momentum)
Expand All @@ -110,12 +107,11 @@ def forward(self, im_data, gt_boxes, im_info, mode='gallery'):
cls_score = self.cls_score_net(fc7)
bbox_pred = self.bbox_pred_net(fc7)

reid_fc7 = self.tail(trans_feat).mean(3).mean(2)
reid_feat = F.normalize(self.reid_feat_net(reid_fc7))
# reid_feat = F.normalize(self.reid_feat_net(fc7))
# reid_fc7 = self.tail(trans_feat).mean(3).mean(2)
# reid_feat = F.normalize(self.reid_feat_net(reid_fc7))
reid_feat = func.normalize(self.reid_feat_net(fc7))

cls_pred = torch.max(cls_score, 1)[1]
cls_prob = F.softmax(cls_score, 1)
cls_prob = func.softmax(cls_score, 1)

with open('config.yml', 'r') as f:
config = yaml.load(f)
Expand Down Expand Up @@ -143,7 +139,7 @@ def forward(self, im_data, gt_boxes, im_info, mode='gallery'):
fc7 = self.tail(pooled_feat)
else:
fc7 = self.tail(pooled_feat).mean(3).mean(2)
reid_feat = F.normalize(self.reid_feat_net(fc7))
reid_feat = func.normalize(self.reid_feat_net(fc7))

return reid_feat.data.cpu().numpy()

Expand All @@ -157,7 +153,7 @@ def train(self, mode=True):
def init_linear_weight(self, trun):
def normal_init(m, mean, stddev, truncated=False):
"""
weight initalizer: truncated normal and random normal.
weight initializer: truncated normal and random normal.
"""
# x is a parameter
if truncated:
Expand All @@ -172,7 +168,6 @@ def normal_init(m, mean, stddev, truncated=False):
# TODO: change 0.01 for reid_feat_net
normal_init(self.reid_feat_net, 0, 0.01, trun)


def load_trained_model(self, state_dict):
nn.Module.load_state_dict(
self, {k: state_dict[k] for k in list(self.state_dict())})
15 changes: 7 additions & 8 deletions resnet.py → models/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#
# Author: Liangqi Li and Xinlei Chen
# Creating Date: Apr 1, 2018
# Latest rectifying: May 6, 2018
# Latest rectifying: Oct 25, 2018
# -----------------------------------------------------
import torch
import torch.nn as nn
Expand All @@ -13,8 +13,8 @@
import yaml


__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
'resnet152']
__all__ = ['ResNet', 'MyResNet', 'resnet18', 'resnet34', 'resnet50',
'resnet101', 'resnet152']

root_url = 'https://s3.amazonaws.com/pytorch/models/'
model_urls = {
Expand Down Expand Up @@ -139,8 +139,7 @@ def _make_layer(self, block, planes, blocks, stride=1):
nn.BatchNorm2d(planes * block.expansion),
)

layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
layers = [block(self.inplanes, planes, stride, downsample)]
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
Expand Down Expand Up @@ -203,7 +202,7 @@ def resnet152(pretrained=False):
return model


class resnet:
class MyResNet:

def __init__(self, num_layers=50, pre_model=None, training=True):
self.training = training
Expand Down Expand Up @@ -259,8 +258,8 @@ def initialize(self, fixed_blocks):
def set_bn_fix(m):
class_name = m.__class__.__name__
if class_name.find('BatchNorm') != -1:
for p in m.parameters():
p.requires_grad = False
for param in m.parameters():
param.requires_grad = False

self.model.apply(set_bn_fix)

Expand Down
Loading

0 comments on commit 8363b7c

Please sign in to comment.