Skip to content

Commit

Permalink
fine tuning offset
Browse files Browse the repository at this point in the history
  • Loading branch information
ajfisch committed May 7, 2018
1 parent 36a6e3f commit 055570d
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 7 deletions.
8 changes: 4 additions & 4 deletions drqa/reader/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ def forward(self, x, y, y_mask):
scores.data.masked_fill_(y_mask.data, -float('inf'))

# Normalize with softmax
alpha_flat = F.softmax(scores.view(-1, y.size(1)))
alpha_flat = F.softmax(scores.view(-1, y.size(1)), dim=-1)
alpha = alpha_flat.view(-1, x.size(1), y.size(1))

# Take weighted average
Expand Down Expand Up @@ -247,10 +247,10 @@ def forward(self, x, y, x_mask):
if self.normalize:
if self.training:
# In training we output log-softmax for NLL
alpha = F.log_softmax(xWy)
alpha = F.log_softmax(xWy, dim=-1)
else:
# ...Otherwise 0-1 probabilities
alpha = F.softmax(xWy)
alpha = F.softmax(xWy, dim=-1)
else:
alpha = xWy.exp()
return alpha
Expand All @@ -277,7 +277,7 @@ def forward(self, x, x_mask):
x_flat = x.view(-1, x.size(-1))
scores = self.linear(x_flat).view(x.size(0), x.size(1))
scores.data.masked_fill_(x_mask.data, -float('inf'))
alpha = F.softmax(scores)
alpha = F.softmax(scores, dim=-1)
return alpha


Expand Down
7 changes: 4 additions & 3 deletions drqa/reader/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,15 +239,16 @@ def reset_parameters(self):

# Reset fixed embeddings to original value
if self.args.tune_partial > 0:
# Embeddings to fix are indexed after the special + N tuned words
offset = self.args.tune_partial + self.word_dict.START
if self.parallel:
embedding = self.network.module.embedding.weight.data
fixed_embedding = self.network.module.fixed_embedding
else:
embedding = self.network.embedding.weight.data
fixed_embedding = self.network.fixed_embedding
if offset < embedding.size(0):

# Embeddings to fix are the last indices
offset = embedding.size(0) - fixed_embedding.size(0)
if offset >= 0:
embedding[offset:] = fixed_embedding

# --------------------------------------------------------------------------
Expand Down

0 comments on commit 055570d

Please sign in to comment.