Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
iscyy committed Aug 31, 2022
1 parent f0c5d69 commit 99b29ac
Show file tree
Hide file tree
Showing 7 changed files with 286 additions and 14 deletions.
2 changes: 1 addition & 1 deletion configs/attention/yolov5s_acmix.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ backbone:
[-1, 9, C3, [512]],
[-1, 1, Conv, [1024, 3, 2]], # 7-P5/32
[-1, 3, C3, [1024]],
[-1, 1, ACmix, [1024, 1024]], #9
[-1, 1, ACmix, [1024]], #9
[-1, 1, SPPF, [1024,5]], #10
]

Expand Down
49 changes: 49 additions & 0 deletions configs/backbone/yolov5_mobileone_block.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# YOLOv5改进 🚀 license

# Parameters
nc: 80 # number of classes
depth_multiple: 0.33 # model depth multiple
width_multiple: 0.50 # layer channel multiple
anchors:
- [10,13, 16,30, 33,23] # P3/8
- [30,61, 62,45, 59,119] # P4/16
- [116,90, 156,198, 373,326] # P5/32

# YOLOv5 backbone by yoloair
backbone:
# [from, number, module, args]
[[-1, 1, Conv, [64, 6, 2, 2]], # 0-P1/2
[-1, 1, Conv, [128, 3, 2]], # 1-P2/4
[-1, 3, C3, [128]],
[-1, 1, Conv, [256, 3, 2]], # 3-P3/8
[-1, 3, C3, [256]],
[-1, 1, Conv, [512, 3, 2]], # 5-P4/16
[-1, 3, C3, [512]],
[-1, 1, Conv, [1024, 3, 2]], # 7-P5/32
[-1, 6, MobileOneBlock, [1024, 4, 1, False]],
[-1, 1, SPPF, [1024, 5]], # 9
]

# YOLOv5 head
head:
[[-1, 1, Conv, [512, 1, 1]],
[-1, 1, nn.Upsample, [None, 2, 'nearest']],
[[-1, 6], 1, Concat, [1]], # cat backbone P4
[-1, 3, C3, [512, False]], # 13

[-1, 1, Conv, [256, 1, 1]],
[-1, 1, nn.Upsample, [None, 2, 'nearest']],
[[-1, 4], 1, Concat, [1]], # cat backbone P3
[-1, 3, C3, [256, False]], # 17 (P3/8-small)

[-1, 1, Conv, [256, 3, 2]],
[[-1, 14], 1, Concat, [1]], # cat head P4
[-1, 3, C3, [512, False]], # 20 (P4/16-medium)

[-1, 1, Conv, [512, 3, 2]],
[[-1, 10], 1, Concat, [1]], # cat head P5
[-1, 3, C3, [1024, False]], # 23 (P5/32-large)

[[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5)
]

48 changes: 48 additions & 0 deletions configs/yolov5_exp/yolov5_repvgg_v6backbone.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# YOLOv5 🚀 by Ultralytics, GPL-3.0 license

# Parameters
nc: 80 # number of classes
depth_multiple: 0.33 # model depth multiple
width_multiple: 0.50 # layer channel multiple
anchors:
- [10,13, 16,30, 33,23] # P3/8
- [30,61, 62,45, 59,119] # P4/16
- [116,90, 156,198, 373,326] # P5/32

# YOLOv5 v6.0 backbone
backbone:
# [from, number, module, args]
[[-1, 1, RepVGGBlockv6,[64,3,2]], # 0-P1/2
[-1, 1, RepVGGBlockv6,[128,3,2]], # 1-P2/4
[-1, 3, RepBlock,[128]],
[-1, 1, RepVGGBlockv6,[256,3,2]], # 3-P3/8
[-1, 6, RepBlock,[256]],
[-1, 1, RepVGGBlockv6,[512,3,2]], # 5-P4/16
[-1, 9, RepBlock,[512]],
[-1, 1, RepVGGBlockv6,[1024,3,2]], # 7-P5/32
[-1, 3, RepBlock,[1024]],
[-1, 1, SimSPPF,[1024]], # 9
]

# YOLOv5 v6.0 head
head:
[[-1, 1, Conv, [512, 1, 1]],
[-1, 1, nn.Upsample, [None, 2, 'nearest']],
[[-1, 6], 1, Concat, [1]], # cat backbone P4
[-1, 3, C3, [512, False]], # 13

[-1, 1, Conv, [256, 1, 1]],
[-1, 1, nn.Upsample, [None, 2, 'nearest']],
[[-1, 4], 1, Concat, [1]], # cat backbone P3
[-1, 3, C3, [256, False]], # 17 (P3/8-small)

[-1, 1, Conv, [256, 3, 2]],
[[-1, 14], 1, Concat, [1]], # cat head P4
[-1, 3, C3, [512, False]],

[-1, 1, Conv, [512, 3, 2]],
[[-1, 10], 1, Concat, [1]], # cat head P5
[-1, 3, C3, [1024, False]], # 23 (P5/32-large)

[[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5)
]
15 changes: 8 additions & 7 deletions models/Models/yolov4.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, k
super(Conv, self).__init__()
self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False)
self.bn = nn.BatchNorm2d(c2)
self.act = Mish() if act else nn.Identity()
self.act = nn.Mish() if act else nn.Identity()

def forward(self, x):
return self.act(self.bn(self.conv(x)))
Expand Down Expand Up @@ -88,7 +88,7 @@ def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, nu
self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False)
self.cv4 = Conv(2 * c_, c2, 1, 1)
self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3)
self.act = Mish()
self.act = nn.Mish()
self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)])

def forward(self, x):
Expand All @@ -107,7 +107,7 @@ def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, nu
#self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False)
self.cv4 = Conv(2 * c_, c2, 1, 1)
self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3)
self.act = Mish()
self.act = nn.Mish()
self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)])

def forward(self, x):
Expand All @@ -126,7 +126,7 @@ def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, nu
self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False)
#self.cv4 = Conv(2 * c_, c2, 1, 1)
self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3)
self.act = Mish()
self.act = nn.Mish()
self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)])

def forward(self, x):
Expand All @@ -144,7 +144,7 @@ def __init__(self, c1, c2, n=1, shortcut=False, g=1, e=0.5): # ch_in, ch_out, n
self.cv2 = nn.Conv2d(c_, c_, 1, 1, bias=False)
self.cv3 = Conv(2 * c_, c2, 1, 1)
self.bn = nn.BatchNorm2d(2 * c_)
self.act = Mish()
self.act = nn.Mish()
self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)])

def forward(self, x):
Expand Down Expand Up @@ -197,7 +197,7 @@ def __init__(self, c1, c2, n=1, shortcut=False, g=1, e=0.5, k=(5, 9, 13)):
self.cv5 = Conv(4 * c_, c_, 1, 1)
self.cv6 = Conv(c_, c_, 3, 1)
self.bn = nn.BatchNorm2d(2 * c_)
self.act = Mish()
self.act = nn.Mish()
self.cv7 = Conv(2 * c_, c2, 1, 1)

def forward(self, x):
Expand Down Expand Up @@ -430,8 +430,9 @@ def forward_fuse(self, x):

class SimSPPF(nn.Module):
'''Simplified SPPF with ReLU activation'''
def __init__(self, in_channels, out_channels, kernel_size=5):
def __init__(self, in_channels, kernel_size=5):
super().__init__()
out_channels = in_channels
c_ = in_channels // 2 # hidden channels
self.cv1 = SimConv(in_channels, c_, 1, 1)
self.cv2 = SimConv(c_ * 4, out_channels, 1, 1)
Expand Down
10 changes: 5 additions & 5 deletions models/yolo.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ def parse_model(d, ch): # model_dict, input_channels(3)

n = n_ = max(round(n * gd), 1) if n > 1 else n # depth gain
if m in [Conv, GhostConv, Bottleneck, GhostBottleneck, SPP, SPPF, DWConv, MixConv2d, Focus, CrossConv,
BottleneckCSP, C3, C3TR, C3SPP, C3Ghost, C3HB, C3RFEM, MultiSEAM, SEAM, C3STR]:
BottleneckCSP, C3, C3TR, C3SPP, C3Ghost, C3HB, C3RFEM, MultiSEAM, SEAM, C3STR, MobileOneBlock]:
c1, c2 = ch[f], args[0]
if c2 != no: # if not output
c2 = make_divisible(c2 * gw, 8)
Expand All @@ -357,9 +357,9 @@ def parse_model(d, ch): # model_dict, input_channels(3)
args.insert(2, n) # number of repeats
n = 1
# add module research
elif m in [CARAFE, SPPCSPC, SPPFCSPC, RepConv, BoT3, CA, CBAM, NAMAttention, GAMAttention, ACmix, Involution, Stem, ResCSPC, ResCSPB, \
elif m in [CARAFE, SPPCSPC, SPPFCSPC, RepConv, BoT3, CA, CBAM, NAMAttention, GAMAttention, Involution, Stem, ResCSPC, ResCSPB, \
ResXCSPB, ResXCSPC, BottleneckCSPB, BottleneckCSPC,
ASPP, BasicRFB, SPPCSPC_group, SimSPPF, HorBlock, CNeB,C3GC ,C3C2, nn.ConvTranspose2d]:
ASPP, BasicRFB, SPPCSPC_group, HorBlock, CNeB,C3GC ,C3C2, nn.ConvTranspose2d]:
c1, c2 = ch[f], args[0]
if c2 != no: # if not output
c2 = make_divisible(c2 * gw, 8)
Expand All @@ -374,7 +374,7 @@ def parse_model(d, ch): # model_dict, input_channels(3)
args[6] = make_divisible(args[6] * gw, 8)
elif m in [CBH, ES_Bottleneck, DWConvblock, RepVGGBlock, LC_Block, Dense, conv_bn_relu_maxpool, \
Shuffle_Block, stem, mobilev3_bneck, conv_bn_hswish, MobileNetV3_InvertedResidual, DepthSepConv, \
ShuffleNetV2_Model, Conv_maxpool, CoT3, ConvNextBlock]:
ShuffleNetV2_Model, Conv_maxpool, CoT3, ConvNextBlock, RepBlock]:
c1, c2 = ch[f], args[0]
if c2 != no: # if not output
c2 = make_divisible(c2 * gw, 8)
Expand All @@ -395,7 +395,7 @@ def parse_model(d, ch): # model_dict, input_channels(3)
n = 1
elif m in [ReOrg, DWT]:
c2 = ch[f] * 4
elif m in [S2Attention, CrissCrossAttention, SOCA, ShuffleAttention, SEAttention, SimAM, SKAttention]:
elif m in [S2Attention, SimSPPF, ACmix, CrissCrossAttention, SOCA, ShuffleAttention, SEAttention, SimAM, SKAttention]:
c1, c2 = ch[f], args[0]
if c2 != no: # if not output
c2 = make_divisible(c2 * gw, 8)
Expand Down
4 changes: 3 additions & 1 deletion train.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
from utils.loggers import Loggers
from utils.loggers.wandb.wandb_utils import check_wandb_resume
from utils.loss import ComputeLoss, ComputeNWDLoss, ComputeLossOTA_v7, ComputeLossAuxOTA, ComputeLossBinOTA
# from utils.loss_ps import ComputeLoss_v4
from utils.loss_ps import ComputeLoss_v4
from utils.metrics import fitness
from utils.plots import plot_evolve, plot_labels
from utils.torch_utils import EarlyStopping, ModelEMA, de_parallel, is_parallel, select_device, torch_distributed_zero_first
Expand Down Expand Up @@ -290,6 +290,8 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary
compute_loss = ComputeLoss(model)
if loss_category is None:
compute_loss = ComputeLoss(model) # init loss class
elif opt.loss == 'v4':
compute_loss = ComputeLoss_v4(model)
else:
compute_loss = loss_category(model)# loss class
if opt.loss == 'nwd':
Expand Down
Loading

0 comments on commit 99b29ac

Please sign in to comment.