Skip to content

Commit

Permalink
update van
Browse files Browse the repository at this point in the history
  • Loading branch information
uyzhang committed Mar 5, 2022
1 parent 809c593 commit e8f45db
Show file tree
Hide file tree
Showing 37 changed files with 50 additions and 1 deletion.
Empty file modified README.md
100644 → 100755
Empty file.
Empty file modified code/branch_attentions/dynamic_conv.py
100644 → 100755
Empty file.
Empty file modified code/branch_attentions/highway.py
100644 → 100755
Empty file.
Empty file modified code/branch_attentions/resnest_module.py
100644 → 100755
Empty file.
Empty file modified code/branch_attentions/sk_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_attentions/dia_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_attentions/eca_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_attentions/enc_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_attentions/fcanet.py
100644 → 100755
Empty file.
Empty file modified code/channel_attentions/gct_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_attentions/se_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_attentions/soca_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_spatial_attentions/bam.py
100644 → 100755
Empty file.
Empty file modified code/channel_spatial_attentions/cbam.py
100644 → 100755
Empty file.
Empty file modified code/channel_spatial_attentions/coordatt_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_spatial_attentions/danet.py
100644 → 100755
Empty file.
1 change: 0 additions & 1 deletion code/channel_spatial_attentions/scnet.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Improving convolutional networks with self-calibrated convolutions (CVPR 2020)
import jittor as jt
from jittor import nn
from matplotlib.pyplot import grid


class SCConv(nn.Module):
Expand Down
Empty file modified code/channel_spatial_attentions/simam_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_spatial_attentions/strip_pooling_module.py
100644 → 100755
Empty file.
Empty file modified code/channel_spatial_attentions/triplet_attention.py
100644 → 100755
Empty file.
Empty file modified code/spatial_attentions/attention_augmented_module.py
100644 → 100755
Empty file.
Empty file modified code/spatial_attentions/doub_attention.py
100644 → 100755
Empty file.
Empty file modified code/spatial_attentions/gc_module.py
100644 → 100755
Empty file.
Empty file modified code/spatial_attentions/hamnet.py
100644 → 100755
Empty file.
50 changes: 50 additions & 0 deletions code/spatial_attentions/lka.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# Visual Attention Network
import jittor as jt
import jittor.nn as nn


class AttentionModule(nn.Module):
def __init__(self, dim):
super().__init__()
self.conv0 = nn.Conv2d(dim, dim, 5, padding=2, groups=dim)
self.conv_spatial = nn.Conv2d(
dim, dim, 7, stride=1, padding=9, groups=dim, dilation=3)
self.conv1 = nn.Conv2d(dim, dim, 1)

def execute(self, x):
u = x.clone()
attn = self.conv0(x)
attn = self.conv_spatial(attn)
attn = self.conv1(attn)

return u * attn


class SpatialAttention(nn.Module):
def __init__(self, d_model):
super().__init__()

self.proj_1 = nn.Conv2d(d_model, d_model, 1)
self.activation = nn.GELU()
self.spatial_gating_unit = AttentionModule(d_model)
self.proj_2 = nn.Conv2d(d_model, d_model, 1)

def execute(self, x):
shorcut = x.clone()
x = self.proj_1(x)
x = self.activation(x)
x = self.spatial_gating_unit(x)
x = self.proj_2(x)
x = x + shorcut
return x


def main():
attention_block = SpatialAttention(64)
input = jt.rand([4, 64, 32, 32])
output = attention_block(input)
print(input.size(), output.size())


if __name__ == '__main__':
main()
Empty file modified code/spatial_attentions/mhsa.py
100644 → 100755
Empty file.
Empty file modified code/spatial_attentions/ocr.py
100644 → 100755
Empty file.
Empty file modified code/spatial_attentions/offset_module.py
100644 → 100755
Empty file.
Empty file modified code/spatial_attentions/segformer_module.py
100644 → 100755
Empty file.
Empty file modified code/spatial_attentions/self_attention.py
100644 → 100755
Empty file.
Empty file modified code/spatial_attentions/stn.py
100644 → 100755
Empty file.
Empty file modified code/spatial_temporal_attentions/dstt_module.py
100644 → 100755
Empty file.
Empty file modified code/temporal_attentions/gltr.py
100644 → 100755
Empty file.
Empty file modified imgs/attention_category.png
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified imgs/fuse.png
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified imgs/fuse_fig.png
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Empty file modified imgs/timeline.png
100644 → 100755
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit e8f45db

Please sign in to comment.