Skip to content

Commit

Permalink
able to return per token logit entropy
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Dec 27, 2024
1 parent e1be411 commit f944dd7
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 1 deletion.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'x-transformers',
packages = find_packages(exclude=['examples']),
version = '1.43.4',
version = '1.43.5',
license='MIT',
description = 'X-Transformers - Pytorch',
author = 'Phil Wang',
Expand Down
17 changes: 17 additions & 0 deletions x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class LayerIntermediates:
attn_z_loss: Tensor | None = None
mems: Tensor | None = None
memory_tokens: Tensor | None = None
logit_entropies: Tensor | None = None

LinearNoBias = partial(nn.Linear, bias = False)

Expand Down Expand Up @@ -136,6 +137,15 @@ def or_reduce(masks):
head = head | rest
return head

# entropy

def calc_entropy(
t: Tensor,
is_prob = False
):
prob = t.softmax(dim = -1) if not is_prob else t
return -(prob * log(prob)).sum(dim = -1)

# auxiliary loss helpers

def calc_z_loss(
Expand Down Expand Up @@ -2592,6 +2602,7 @@ def forward(
return_embeddings = False,
return_logits_and_embeddings = False,
return_intermediates = False,
return_logit_entropies = False,
mask = None,
return_mems = False,
return_attn = False,
Expand Down Expand Up @@ -2809,6 +2820,12 @@ def forward(
else:
out = logits

# logit entropies

if return_logit_entropies:
intermediates.logit_entropies = calc_entropy(logits)
return_intermediates = True

# aux loss

if return_attn_z_loss:
Expand Down

0 comments on commit f944dd7

Please sign in to comment.