Skip to content

Commit

Permalink
bahdanau attn dropout was ignored
Browse files Browse the repository at this point in the history
  • Loading branch information
brainsqueeze committed Jul 15, 2022
1 parent 85e44b7 commit 7b4fff7
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

setup(
name="text2vec",
version="2.0.2",
version="2.0.3",
description="Building blocks for text vectorization and embedding",
author="Dave Hollander",
author_url="https://github.com/brainsqueeze",
Expand Down
4 changes: 2 additions & 2 deletions text2vec/models/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def call(self, x, mask, training: bool = False):
x = self.h_drop(ffn(x), training=training) + x
x = self.layer_norm(x)

x, context = self.attention(x)
x, context = self.attention(x, training=training)
return x, context


Expand Down Expand Up @@ -124,7 +124,7 @@ def call(self, x_enc, x_dec, dec_mask, context, attention: BahdanauAttention, tr
), training=training) + x_dec
x_dec = self.layer_norm(x_dec)

x_dec, cross_context = attention(encoded=x_enc, decoded=x_dec)
x_dec, cross_context = attention(encoded=x_enc, decoded=x_dec, training=training)
x_dec = self.h_drop(self.projection(x_dec, projection_vector=cross_context), training=training) + x_dec

x_dec = self.layer_norm(x_dec)
Expand Down

0 comments on commit 7b4fff7

Please sign in to comment.