Skip to content

Commit

Permalink
Add dropout layer for embedding vector
Browse files Browse the repository at this point in the history
Add dropout layer for embedding vector.
  • Loading branch information
DevKiHyun authored Jul 2, 2020
1 parent 2e966f0 commit 7c021c6
Showing 1 changed file with 2 additions and 0 deletions.
2 changes: 2 additions & 0 deletions las.pytorch/models/DecoderRNN.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ def __init__(self, vocab_size, max_len, hidden_size, encoder_size,
batch_first=True, dropout=dropout_p, bidirectional=self.bidirectional_decoder)

self.embedding = nn.Embedding(self.vocab_size, self.hidden_size)
self.input_dropout == nn.Dropout(self.dropout_p)

self.attention = Attention(dec_dim=self.hidden_size, enc_dim=self.encoder_output_size, conv_dim=1, attn_dim=self.hidden_size)
self.fc = nn.Linear(self.hidden_size + self.encoder_output_size, self.output_size)

Expand Down

0 comments on commit 7c021c6

Please sign in to comment.