Skip to content

Commit

Permalink
adding bias_dropout_add_fusion option for BERT (#7332)
Browse files Browse the repository at this point in the history
Signed-off-by: Alexander Jipa <azzhipa@amazon.com>
Co-authored-by: Alexander Jipa <azzhipa@amazon.com>
  • Loading branch information
2 people authored and yaoyu-33 committed Sep 5, 2023
1 parent c5059d8 commit f537f39
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,7 @@ def __init__(
layernorm_epsilon=1e-5,
masked_softmax_fusion=False,
bias_gelu_fusion=True,
bias_dropout_add_fusion=True,
openai_gelu=False,
onnx_safe=False,
add_binary_head=True,
Expand Down Expand Up @@ -232,6 +233,7 @@ def __init__(
layernorm_epsilon=layernorm_epsilon,
masked_softmax_fusion=masked_softmax_fusion,
bias_activation_fusion=bias_gelu_fusion,
bias_dropout_add_fusion=bias_dropout_add_fusion,
openai_gelu=openai_gelu,
onnx_safe=onnx_safe,
megatron_legacy=megatron_legacy,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ def model_provider_func(self, pre_process, post_process):
layernorm_epsilon=cfg.get('layernorm_epsilon', 1e-5),
masked_softmax_fusion=cfg.get('masked_softmax_fusion', True),
bias_gelu_fusion=cfg.get('bias_gelu_fusion', True),
bias_dropout_add_fusion=cfg.get("bias_dropout_add_fusion", True),
onnx_safe=cfg.get('onnx_safe', False),
add_binary_head=cfg.bert_binary_head,
megatron_legacy=cfg.get('megatron_legacy', False),
Expand Down

0 comments on commit f537f39

Please sign in to comment.