From 391ef23df90c545c60d206bdeef1decc7b0067ee Mon Sep 17 00:00:00 2001 From: Chen Cui Date: Fri, 16 Aug 2024 01:11:09 -0400 Subject: [PATCH] import in function to fix test Signed-off-by: Chen Cui --- .../nlp/models/language_modeling/megatron_gpt_model.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py index f0e763f91d6a..49f749e4a40e 100644 --- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py +++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_model.py @@ -39,7 +39,6 @@ from nemo.collections.nlp.data.language_modeling.megatron.gpt_dataset import build_train_valid_test_datasets from nemo.collections.nlp.data.language_modeling.megatron.gpt_fim_dataset import GPTFIMDataset, GPTFIMDatasetConfig from nemo.collections.nlp.models.language_modeling.megatron.falcon.falcon_spec import get_falcon_layer_spec -from nemo.collections.nlp.models.language_modeling.megatron.gemma2.gemma2_spec import get_gemma2_layer_spec from nemo.collections.nlp.models.language_modeling.megatron.gpt_full_te_layer_autocast_spec import ( get_gpt_full_te_layer_autocast_spec, ) @@ -155,6 +154,8 @@ def mcore_supports_moe() -> bool: ## TODO: This function will not work if TE is not installed def get_specs(spec_name, transformer_config=None, use_te=True, hyena_cfg: Dict = None): + from nemo.collections.nlp.models.language_modeling.megatron.gemma2.gemma2_spec import get_gemma2_layer_spec + # else cases for backwards compatibility with neva num_experts = transformer_config.num_moe_experts if transformer_config else None moe_grouped_gemm = transformer_config.moe_grouped_gemm if transformer_config else False