Skip to content

Commit

Permalink
fix bug with absolute pos emb
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jan 1, 2021
1 parent d5975c2 commit 8e8200d
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'x-transformers',
packages = find_packages(exclude=['examples']),
version = '0.5.1',
version = '0.5.2',
license='MIT',
description = 'X-Transformers - Pytorch',
author = 'Phil Wang',
Expand Down
2 changes: 1 addition & 1 deletion x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -498,7 +498,7 @@ def __init__(
dim = attn_layers.dim
self.max_seq_len = max_seq_len
self.token_emb = nn.Embedding(num_tokens, dim)
self.pos_emb = AbsolutePositionalEmbedding(max_seq_len, dim) if (use_pos_emb and not attn_layers.has_pos_emb) else always(0)
self.pos_emb = AbsolutePositionalEmbedding(dim, max_seq_len) if (use_pos_emb and not attn_layers.has_pos_emb) else always(0)
self.emb_dropout = nn.Dropout(emb_dropout)

self.attn_layers = attn_layers
Expand Down

0 comments on commit 8e8200d

Please sign in to comment.