From 1d9488aa29d12e2667fd4e1af3748159dd3c433e Mon Sep 17 00:00:00 2001 From: Vahid Noroozi Date: Tue, 25 Oct 2022 14:52:30 -0700 Subject: [PATCH] fixed the onnx bug in conformer for non-streaming models. (#5242) Signed-off-by: Vahid Signed-off-by: Vahid Signed-off-by: Vladimir Bataev --- nemo/collections/asr/modules/conformer_encoder.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nemo/collections/asr/modules/conformer_encoder.py b/nemo/collections/asr/modules/conformer_encoder.py index e7d8f9c6f360..158532da2245 100644 --- a/nemo/collections/asr/modules/conformer_encoder.py +++ b/nemo/collections/asr/modules/conformer_encoder.py @@ -99,10 +99,10 @@ def input_example(self, max_batch=1, max_dim=256): if hasattr(self, 'export_cache_support') and self.export_cache_support: cache_last_channel = torch.randn(self.n_layers, max_batch, max_dim, self.d_model).to(dev) cache_last_time = torch.randn(self.n_layers, max_batch, self.d_model, self.conv_context_size[0]).to(dev) + all_input_example = tuple([input_example, input_example_length, cache_last_channel, cache_last_time]) else: - cache_last_channel = cache_last_time = None + all_input_example = tuple([input_example, input_example_length]) - all_input_example = tuple([input_example, input_example_length, cache_last_channel, cache_last_time]) return all_input_example @property