Skip to content

Commit

Permalink
update simam_module
Browse files Browse the repository at this point in the history
  • Loading branch information
uyzhang committed Dec 26, 2021
1 parent 9284f22 commit 859ec1e
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 1 deletion.
1 change: 0 additions & 1 deletion code/channel_spatial_attentions/danet.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import jittor as jt
import jittor.nn as nn
from jittor import init


class DANetHead(nn.Module):
Expand Down
37 changes: 37 additions & 0 deletions code/channel_spatial_attentions/simam_module.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Simam: A simple, parameter-free attention module for convolutional neural networks (ICML 2021)
import jittor as jt
from jittor import nn


class simam_module(nn.Module):
def __init__(self, e_lambda=1e-4):
super(simam_module, self).__init__()

self.activaton = nn.Sigmoid()
self.e_lambda = e_lambda

def execute(self, x):

b, c, h, w = x.size()

n = w * h - 1

x_minus_mu_square = (
x - x.mean(dim=2, keepdims=True).mean(dim=3, keepdims=True)).pow(2)
y = x_minus_mu_square / \
(4 * (x_minus_mu_square.sum(dim=2,
keepdims=True).sum(dim=3,
keepdims=True) / n + self.e_lambda)) + 0.5

return x * self.activaton(y)


def main():
attention_block = simam_module()
input = jt.ones([4, 64, 32, 32])
output = attention_block(input)
print(input.size(), output.size())


if __name__ == '__main__':
main()

0 comments on commit 859ec1e

Please sign in to comment.