From 3e62d8260ccdcad242498b7274e5f70edea54689 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 16 Nov 2022 13:27:49 -0800 Subject: [PATCH] Fixed bug in notebook (#5382) (#5394) Signed-off-by: Virginia Adams Signed-off-by: Virginia Adams Signed-off-by: Virginia Adams Co-authored-by: Virginia Adams <78445382+vadam5@users.noreply.github.com> Signed-off-by: andrusenkoau --- .../language_modeling/megatron_gpt_prompt_learning_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nemo/collections/nlp/models/language_modeling/megatron_gpt_prompt_learning_model.py b/nemo/collections/nlp/models/language_modeling/megatron_gpt_prompt_learning_model.py index add7c898c80c..387ff52bb078 100644 --- a/nemo/collections/nlp/models/language_modeling/megatron_gpt_prompt_learning_model.py +++ b/nemo/collections/nlp/models/language_modeling/megatron_gpt_prompt_learning_model.py @@ -986,7 +986,7 @@ def dummy(): # Call same generate code as in MegatronGPT return megatron_gpt_generate( - self.cuda(), processed_inputs, self.tokenizer, length_params, sampling_params, task_ids + self.cuda(), processed_inputs, self.tokenizer, length_params, sampling_params, task_ids=task_ids ) def predict_step(self, batch: Any, batch_idx: int, dataloader_idx: Optional[int] = None) -> Any: