Skip to content

Commit

Permalink
allow for attention bias to be passed in externally
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Oct 18, 2024
1 parent 7c56d23 commit 34a4818
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'x-transformers',
packages = find_packages(exclude=['examples']),
version = '1.40.1',
version = '1.40.2',
license='MIT',
description = 'X-Transformers - Pytorch',
author = 'Phil Wang',
Expand Down
3 changes: 2 additions & 1 deletion x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1107,6 +1107,7 @@ def forward(
context_mask = None,
attn_mask = None,
rel_pos = None,
attn_bias = None,
rotary_pos_emb = None,
prev_attn = None,
mem = None,
Expand Down Expand Up @@ -1237,8 +1238,8 @@ def forward(

# prepare relative positional bias, if needed

attn_bias = None
if exists(rel_pos):
assert not exists(attn_bias)
attn_bias = rel_pos(i, j)
attn_bias = pad_at_dim(attn_bias, (num_mem_kv, 0), value = 0.) # handle memory key / values

Expand Down

0 comments on commit 34a4818

Please sign in to comment.