Skip to content

Commit

Permalink
fixing attn 2
Browse files Browse the repository at this point in the history
  • Loading branch information
isaacmg committed Mar 1, 2024
1 parent 59576c1 commit 1220180
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 5 deletions.
5 changes: 3 additions & 2 deletions flood_forecast/transformer_xl/informer.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,10 +245,11 @@ def forward(self, x, cross, x_mask=None, cross_mask=None) -> torch.Tensor:
res = self.dropout(x)
x = x + res
x = self.norm1(x)
x = x + self.dropout(self.cross_attention(
x, attn = self.cross_attention(
x, cross, cross,
attn_mask=cross_mask
))
)
x = x + self.dropout(x)

y = x = self.norm2(x)
y = self.dropout(self.activation(self.conv1(y.transpose(-1, 1))))
Expand Down
6 changes: 3 additions & 3 deletions tests/test_attn.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ def test_full_attn(self):
# Tests the full attention mechanism and
t = torch.rand(2, 20, 8, 30)
a = self.full_attention(torch.rand(2, 20, 8, 30), t, t, self.triangle)
self.assertIsInstance(a, torch.Tensor)
self.assertEqual(len(a.shape), 4)
self.assertEqual(a.shape[0], 2)
self.assertIsInstance(a[0], torch.Tensor)
self.assertEqual(len(a[0].shape), 4)
self.assertEqual(a[0].shape[0], 2)

def test_single_local(self):
Single_Local_SelfAttn_Module(10, 4, 10, 5, 1, 128, 128, 128, 32, 2, 8)

0 comments on commit 1220180

Please sign in to comment.