Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Apr 23, 2023
1 parent ed4c373 commit ca6585b
Showing 1 changed file with 3 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
import torch

from nemo.collections.nlp.modules.common.megatron.alibi_relative_position_embedding import (
build_slopes,
build_relative_position,
build_slopes,
)

__all__ = ['KERPLERelativePositionEmbedding']
Expand Down Expand Up @@ -67,7 +67,7 @@ def __init__(
self.kerple_b = torch.nn.Parameter(build_slopes(num_attention_heads, num_attention_heads_kerple))
self.kerple_a = torch.zeros_like(self.kerple_b)
self.kerple_p = torch.ones_like(self.kerple_b)

# cache the relative position bias. shape (num_attention_heads, max_seq_len, max_seq_len)
self.relative_position = build_relative_position(max_seq_len, max_seq_len, num_attention_heads)

Expand All @@ -85,4 +85,4 @@ def forward(self, query_seq_length, key_seq_length):
relative_position = torch.tril(relative_position)

# shape (1, num_heads, query_length, key_length)
return - self.kerple_b * torch.log(1 + self.kerple_a * relative_position.unsqueeze(0).pow(self.kerple_p))
return -self.kerple_b * torch.log(1 + self.kerple_a * relative_position.unsqueeze(0).pow(self.kerple_p))

0 comments on commit ca6585b

Please sign in to comment.