Skip to content

Commit

Permalink
fix bug with default empty memories for txl
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jan 5, 2021
1 parent fb7a5d4 commit 208d70d
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'x-transformers',
packages = find_packages(exclude=['examples']),
version = '0.7.1',
version = '0.7.2',
license='MIT',
description = 'X-Transformers - Pytorch',
author = 'Phil Wang',
Expand Down
4 changes: 2 additions & 2 deletions x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,7 +437,7 @@ def __init__(
layer_types = default_block * depth

self.layer_types = layer_types
self.default_mems = ([None] * len(list(filter(equals('a'), layer_types))))
self.num_attn_layers = len(list(filter(equals('a'), layer_types)))

for layer_type in self.layer_types:
if layer_type == 'a':
Expand Down Expand Up @@ -472,7 +472,7 @@ def forward(
prev_attn = None
prev_cross_attn = None

mems = mems.copy() if exists(mems) else self.default_mems
mems = mems.copy() if exists(mems) else [None] * self.num_attn_layers

for ind, (layer_type, (norm, block)) in enumerate(zip(self.layer_types, self.layers)):
is_last = ind == (len(self.layers) - 1)
Expand Down

0 comments on commit 208d70d

Please sign in to comment.