From 21ab904f102566ab44496f8c2f39b27a33f9bd06 Mon Sep 17 00:00:00 2001 From: MaximumEntropy Date: Mon, 21 Nov 2022 12:52:34 -0800 Subject: [PATCH] Add num layers check Signed-off-by: MaximumEntropy --- nemo/collections/nlp/modules/common/megatron/transformer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nemo/collections/nlp/modules/common/megatron/transformer.py b/nemo/collections/nlp/modules/common/megatron/transformer.py index dd4f920f8194..12e18c853bce 100644 --- a/nemo/collections/nlp/modules/common/megatron/transformer.py +++ b/nemo/collections/nlp/modules/common/megatron/transformer.py @@ -2372,7 +2372,7 @@ def forward( fp8_context = nullcontext() with fp8_context: - if self.activations_checkpoint_granularity == 'full': + if self.activations_checkpoint_granularity == 'full' and self.activations_checkpoint_num_layers > 0: hidden_states = self._checkpointed_forward( hidden_states, attention_mask,