From 8823ff5a750c383f1eebabc48c10365dbc27a631 Mon Sep 17 00:00:00 2001 From: kan-bayashi Date: Mon, 19 Jun 2023 19:45:30 +0900 Subject: [PATCH 1/8] fix norm issue --- parallel_wavegan/bin/train.py | 12 ++-- parallel_wavegan/models/hifigan.py | 100 ++++++++++++++++++++++++++++- test/test_hifigan.py | 34 ++++++++++ 3 files changed, 140 insertions(+), 6 deletions(-) diff --git a/parallel_wavegan/bin/train.py b/parallel_wavegan/bin/train.py index 43632dbd..5fab384a 100755 --- a/parallel_wavegan/bin/train.py +++ b/parallel_wavegan/bin/train.py @@ -156,15 +156,19 @@ def load_checkpoint(self, checkpoint_path, load_only_params=False): state_dict = torch.load(checkpoint_path, map_location="cpu") if self.config["distributed"]: self.model["generator"].module.load_state_dict( - state_dict["model"]["generator"] + state_dict["model"]["generator"], ) self.model["discriminator"].module.load_state_dict( - state_dict["model"]["discriminator"] + state_dict["model"]["discriminator"], + strict=False, ) else: - self.model["generator"].load_state_dict(state_dict["model"]["generator"]) + self.model["generator"].load_state_dict( + state_dict["model"]["generator"], + ) self.model["discriminator"].load_state_dict( - state_dict["model"]["discriminator"] + state_dict["model"]["discriminator"], + strict=False, ) if not load_only_params: self.steps = state_dict["steps"] diff --git a/parallel_wavegan/models/hifigan.py b/parallel_wavegan/models/hifigan.py index e42eea98..d44a1036 100644 --- a/parallel_wavegan/models/hifigan.py +++ b/parallel_wavegan/models/hifigan.py @@ -571,13 +571,19 @@ def __init__( raise ValueError("Either use use_weight_norm or use_spectral_norm.") # apply weight norm + self.use_weight_norm = use_weight_norm if use_weight_norm: self.apply_weight_norm() # apply spectral norm + self.use_spectral_norm = use_spectral_norm if use_spectral_norm: self.apply_spectral_norm() + # backward compatibility + self._register_load_state_dict_pre_hook(self._load_state_dict_pre_hook) + self.register_load_state_dict_post_hook(self._load_state_dict_post_hook) + def forward(self, x): """Calculate forward propagation. @@ -599,7 +605,7 @@ def apply_weight_norm(self): """Apply weight normalization module from all of the layers.""" def _apply_weight_norm(m): - if isinstance(m, torch.nn.Conv2d): + if isinstance(m, torch.nn.Conv1d): torch.nn.utils.weight_norm(m) logging.debug(f"Weight norm is applied to {m}.") @@ -609,12 +615,102 @@ def apply_spectral_norm(self): """Apply spectral normalization module from all of the layers.""" def _apply_spectral_norm(m): - if isinstance(m, torch.nn.Conv2d): + if isinstance(m, torch.nn.Conv1d): torch.nn.utils.spectral_norm(m) logging.debug(f"Spectral norm is applied to {m}.") self.apply(_apply_spectral_norm) + def remove_weight_norm(self): + """Remove weight normalization module from all of the layers.""" + + def _remove_weight_norm(m): + try: + logging.debug(f"Weight norm is removed from {m}.") + torch.nn.utils.remove_weight_norm(m) + except ValueError: # this module didn't have weight norm + return + + self.apply(_remove_weight_norm) + + def remove_spectral_norm(self): + """Remove spectral normalization module from all of the layers.""" + + def _remove_spectral_norm(m): + try: + logging.debug(f"Spectral norm is removed from {m}.") + torch.nn.utils.remove_spectral_norm(m) + except ValueError: # this module didn't have weight norm + return + + self.apply(_remove_spectral_norm) + + def _load_state_dict_pre_hook( + self, + state_dict, + prefix, + local_metadata, + strict, + missing_keys, + unexpected_keys, + error_msgs, + ): + """Fix the compatibility of weight / spectral normalization issue. + + Some pretrained models are trained with configs that use weight / spectral + normalization, but actually, the norm is not applied. This causes the mismatch + of the parameters with configs. To solve this issue, when parameter mismatch + happens in loading, we remove the norm at first, load the parameters, and then + apply the norm in post-hook functions. + + See also: + - https://github.com/kan-bayashi/ParallelWaveGAN/issues/309 + - https://github.com/espnet/espnet/issues/4595 + + """ + if self.use_weight_norm: + if not any(["weight_g" in k for k in state_dict.keys()]): + logging.warning( + "It seems weight norm is not applied in the pretrained model. To" + " keep the compatibility, we will remove the norm, load the" + " parameteres, and then apply the norm again." + ) + self.remove_weight_norm() + if self.use_spectral_norm: + if not any(["weight_u" in k for k in state_dict.keys()]): + logging.warning( + "It seems spectral norm is not applied in the pretrained model. To" + " keep the compatibility, we will remove the norm, load the" + " parameteres, and then apply the norm again." + ) + self.remove_spectral_norm() + + def _load_state_dict_post_hook( + self, + module, + improbable_keys, + ): + """Fix the compatibility of weight / spectral normalization issue. + + Some pretrained models are trained with configs that use weight / spectral + normalization, but actually, the norm is not applied. This causes the mismatch + of the parameters with configs. To solve this issue, when parameter mismatch + happens in loading, we remove the norm at first, load the parameters, and then + apply the norm in post-hook functions. + + See also: + - https://github.com/kan-bayashi/ParallelWaveGAN/issues/309 + - https://github.com/espnet/espnet/issues/4595 + + """ + if self.use_weight_norm: + if not any(["weight_g" in k for k in self.state_dict().keys()]): + self.apply_weight_norm() + if self.use_spectral_norm: + if not any(["weight_u" in k for k in self.state_dict().keys()]): + self.apply_spectral_norm() + pass + class HiFiGANMultiScaleDiscriminator(torch.nn.Module): """HiFi-GAN multi-scale discriminator module.""" diff --git a/test/test_hifigan.py b/test/test_hifigan.py index d7569259..922ee8d3 100644 --- a/test/test_hifigan.py +++ b/test/test_hifigan.py @@ -6,12 +6,15 @@ """Test code for HiFi-GAN modules.""" import logging +import os +import yaml import numpy as np import pytest import torch from test_parallel_wavegan import make_mutli_reso_stft_loss_args +import parallel_wavegan.models from parallel_wavegan.losses import ( DiscriminatorAdversarialLoss, FeatureMatchLoss, @@ -219,3 +222,34 @@ def test_causal_hifigan(dict_g): y[..., : c.size(-1) // 2 * upsampling_factor].detach().cpu().numpy(), y_[..., : c_.size(-1) // 2 * upsampling_factor].detach().cpu().numpy(), ) + + +@pytest.mark.skipif(torch.cuda.is_available(), reason="Run in only local") +def test_fix_norm_issue(): + from parallel_wavegan.utils import download_pretrained_model + + checkpoint = download_pretrained_model("ljspeech_hifigan.v1") + config = os.path.join(os.path.dirname(checkpoint), "config.yml") + with open(config) as f: + config = yaml.load(f, Loader=yaml.Loader) + + # get model and load parameters + discriminator_type = config.get("discriminator_type") + model_class = getattr( + parallel_wavegan.models, + discriminator_type, + ) + model_1 = model_class(**config["discriminator_params"]) + model_2 = model_class(**config["discriminator_params"]) + + model_2.load_state_dict(model_1.state_dict()) + state_dict_2 = model_2.state_dict() + + model_1.load_state_dict( + torch.load(checkpoint, map_location="cpu")["model"]["discriminator"], + strict=False, + ) + state_dict_1 = model_1.state_dict() + for k in state_dict_1.keys(): + with pytest.raises(AssertionError): + np.testing.assert_array_equal(state_dict_1[k], state_dict_2[k]) From 1dc31270efd518ef40c5f7260bbb51ae96b06319 Mon Sep 17 00:00:00 2001 From: kan-bayashi Date: Mon, 19 Jun 2023 20:23:18 +0900 Subject: [PATCH 2/8] not to use register_load_state_dict_pre_hook --- parallel_wavegan/models/hifigan.py | 57 +++++++++++++----------------- 1 file changed, 24 insertions(+), 33 deletions(-) diff --git a/parallel_wavegan/models/hifigan.py b/parallel_wavegan/models/hifigan.py index d44a1036..d696ade9 100644 --- a/parallel_wavegan/models/hifigan.py +++ b/parallel_wavegan/models/hifigan.py @@ -582,7 +582,6 @@ def __init__( # backward compatibility self._register_load_state_dict_pre_hook(self._load_state_dict_pre_hook) - self.register_load_state_dict_post_hook(self._load_state_dict_post_hook) def forward(self, x): """Calculate forward propagation. @@ -672,44 +671,36 @@ def _load_state_dict_pre_hook( if not any(["weight_g" in k for k in state_dict.keys()]): logging.warning( "It seems weight norm is not applied in the pretrained model. To" - " keep the compatibility, we will remove the norm, load the" - " parameteres, and then apply the norm again." + " keep the compatibility, we will apply the norm to the pretrained parameters." ) - self.remove_weight_norm() + keys = [k[:-2] for k in self.state_dict().keys() if k.endswith("weight_g")] + from torch.nn.utils import weight_norm + for k in keys: + weight = state_dict[prefix + k] + m = torch.nn.Conv1d(weight.shape[1], weight.shape[0], weight.shape[2]) + weight_norm(m) + state_dict[prefix + k + "_g"] = m.weight_g + state_dict[prefix + k + "_v"] = m.weight_v + del state_dict[prefix + k] + del m + if self.use_spectral_norm: if not any(["weight_u" in k for k in state_dict.keys()]): logging.warning( "It seems spectral norm is not applied in the pretrained model. To" - " keep the compatibility, we will remove the norm, load the" - " parameteres, and then apply the norm again." + " keep the compatibility, we will apply the norm to the pretrained parameters." ) - self.remove_spectral_norm() - - def _load_state_dict_post_hook( - self, - module, - improbable_keys, - ): - """Fix the compatibility of weight / spectral normalization issue. - - Some pretrained models are trained with configs that use weight / spectral - normalization, but actually, the norm is not applied. This causes the mismatch - of the parameters with configs. To solve this issue, when parameter mismatch - happens in loading, we remove the norm at first, load the parameters, and then - apply the norm in post-hook functions. - - See also: - - https://github.com/kan-bayashi/ParallelWaveGAN/issues/309 - - https://github.com/espnet/espnet/issues/4595 - - """ - if self.use_weight_norm: - if not any(["weight_g" in k for k in self.state_dict().keys()]): - self.apply_weight_norm() - if self.use_spectral_norm: - if not any(["weight_u" in k for k in self.state_dict().keys()]): - self.apply_spectral_norm() - pass + keys = [k[:-2] for k in self.state_dict().keys() if k.endswith("weight_u")] + from torch.nn.utils import spectral_norm + for k in keys: + weight = state_dict[prefix + k] + m = torch.nn.Conv1d(weight.shape[1], weight.shape[0], weight.shape[2]) + spectral_norm(m) + state_dict[prefix + k + "_u"] = m.weight_u + state_dict[prefix + k + "_v"] = m.weight_v + state_dict[prefix + k + "_orig"] = m.weight_orig + del state_dict[prefix + k] + del m class HiFiGANMultiScaleDiscriminator(torch.nn.Module): From 1059cc606c1cb65c5097b8758aa62d0ff3452fdc Mon Sep 17 00:00:00 2001 From: kan-bayashi Date: Mon, 19 Jun 2023 20:28:09 +0900 Subject: [PATCH 3/8] fix linter --- parallel_wavegan/models/hifigan.py | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/parallel_wavegan/models/hifigan.py b/parallel_wavegan/models/hifigan.py index d696ade9..4034de2a 100644 --- a/parallel_wavegan/models/hifigan.py +++ b/parallel_wavegan/models/hifigan.py @@ -671,13 +671,19 @@ def _load_state_dict_pre_hook( if not any(["weight_g" in k for k in state_dict.keys()]): logging.warning( "It seems weight norm is not applied in the pretrained model. To" - " keep the compatibility, we will apply the norm to the pretrained parameters." + " keep the compatibility, we will apply the norm to the pretrained" + " parameters." ) - keys = [k[:-2] for k in self.state_dict().keys() if k.endswith("weight_g")] + keys = [ + k[:-2] for k in self.state_dict().keys() if k.endswith("weight_g") + ] from torch.nn.utils import weight_norm + for k in keys: weight = state_dict[prefix + k] - m = torch.nn.Conv1d(weight.shape[1], weight.shape[0], weight.shape[2]) + m = torch.nn.Conv1d( + weight.shape[1], weight.shape[0], weight.shape[2] + ) weight_norm(m) state_dict[prefix + k + "_g"] = m.weight_g state_dict[prefix + k + "_v"] = m.weight_v @@ -688,13 +694,19 @@ def _load_state_dict_pre_hook( if not any(["weight_u" in k for k in state_dict.keys()]): logging.warning( "It seems spectral norm is not applied in the pretrained model. To" - " keep the compatibility, we will apply the norm to the pretrained parameters." + " keep the compatibility, we will apply the norm to the pretrained" + " parameters." ) - keys = [k[:-2] for k in self.state_dict().keys() if k.endswith("weight_u")] + keys = [ + k[:-2] for k in self.state_dict().keys() if k.endswith("weight_u") + ] from torch.nn.utils import spectral_norm + for k in keys: weight = state_dict[prefix + k] - m = torch.nn.Conv1d(weight.shape[1], weight.shape[0], weight.shape[2]) + m = torch.nn.Conv1d( + weight.shape[1], weight.shape[0], weight.shape[2] + ) spectral_norm(m) state_dict[prefix + k + "_u"] = m.weight_u state_dict[prefix + k + "_v"] = m.weight_v From 9c88276dbf13495cad7aae6c785cacf7ff52c22a Mon Sep 17 00:00:00 2001 From: kan-bayashi Date: Mon, 19 Jun 2023 20:29:19 +0900 Subject: [PATCH 4/8] fix --- parallel_wavegan/models/hifigan.py | 86 ++++++++++++++---------------- 1 file changed, 40 insertions(+), 46 deletions(-) diff --git a/parallel_wavegan/models/hifigan.py b/parallel_wavegan/models/hifigan.py index 4034de2a..9cd48814 100644 --- a/parallel_wavegan/models/hifigan.py +++ b/parallel_wavegan/models/hifigan.py @@ -667,52 +667,46 @@ def _load_state_dict_pre_hook( - https://github.com/espnet/espnet/issues/4595 """ - if self.use_weight_norm: - if not any(["weight_g" in k for k in state_dict.keys()]): - logging.warning( - "It seems weight norm is not applied in the pretrained model. To" - " keep the compatibility, we will apply the norm to the pretrained" - " parameters." - ) - keys = [ - k[:-2] for k in self.state_dict().keys() if k.endswith("weight_g") - ] - from torch.nn.utils import weight_norm - - for k in keys: - weight = state_dict[prefix + k] - m = torch.nn.Conv1d( - weight.shape[1], weight.shape[0], weight.shape[2] - ) - weight_norm(m) - state_dict[prefix + k + "_g"] = m.weight_g - state_dict[prefix + k + "_v"] = m.weight_v - del state_dict[prefix + k] - del m - - if self.use_spectral_norm: - if not any(["weight_u" in k for k in state_dict.keys()]): - logging.warning( - "It seems spectral norm is not applied in the pretrained model. To" - " keep the compatibility, we will apply the norm to the pretrained" - " parameters." - ) - keys = [ - k[:-2] for k in self.state_dict().keys() if k.endswith("weight_u") - ] - from torch.nn.utils import spectral_norm - - for k in keys: - weight = state_dict[prefix + k] - m = torch.nn.Conv1d( - weight.shape[1], weight.shape[0], weight.shape[2] - ) - spectral_norm(m) - state_dict[prefix + k + "_u"] = m.weight_u - state_dict[prefix + k + "_v"] = m.weight_v - state_dict[prefix + k + "_orig"] = m.weight_orig - del state_dict[prefix + k] - del m + if self.use_weight_norm and not any( + ["weight_g" in k for k in state_dict.keys()] + ): + logging.warning( + "It seems weight norm is not applied in the pretrained model. To" + " keep the compatibility, we will apply the norm to the pretrained" + " parameters." + ) + keys = [k[:-2] for k in self.state_dict().keys() if k.endswith("weight_g")] + from torch.nn.utils import weight_norm + + for k in keys: + weight = state_dict[prefix + k] + m = torch.nn.Conv1d(weight.shape[1], weight.shape[0], weight.shape[2]) + weight_norm(m) + state_dict[prefix + k + "_g"] = m.weight_g + state_dict[prefix + k + "_v"] = m.weight_v + del state_dict[prefix + k] + del m + + if self.use_spectral_norm and not any( + ["weight_u" in k for k in state_dict.keys()] + ): + logging.warning( + "It seems spectral norm is not applied in the pretrained model. To" + " keep the compatibility, we will apply the norm to the pretrained" + " parameters." + ) + keys = [k[:-2] for k in self.state_dict().keys() if k.endswith("weight_u")] + from torch.nn.utils import spectral_norm + + for k in keys: + weight = state_dict[prefix + k] + m = torch.nn.Conv1d(weight.shape[1], weight.shape[0], weight.shape[2]) + spectral_norm(m) + state_dict[prefix + k + "_u"] = m.weight_u + state_dict[prefix + k + "_v"] = m.weight_v + state_dict[prefix + k + "_orig"] = m.weight_orig + del state_dict[prefix + k] + del m class HiFiGANMultiScaleDiscriminator(torch.nn.Module): From 892b4cace7c808765905be82a281f349f9854ad4 Mon Sep 17 00:00:00 2001 From: kan-bayashi Date: Mon, 19 Jun 2023 20:31:04 +0900 Subject: [PATCH 5/8] fix condition --- test/test_hifigan.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_hifigan.py b/test/test_hifigan.py index 922ee8d3..fffe68ca 100644 --- a/test/test_hifigan.py +++ b/test/test_hifigan.py @@ -7,11 +7,11 @@ import logging import os -import yaml import numpy as np import pytest import torch +import yaml from test_parallel_wavegan import make_mutli_reso_stft_loss_args import parallel_wavegan.models @@ -224,7 +224,7 @@ def test_causal_hifigan(dict_g): ) -@pytest.mark.skipif(torch.cuda.is_available(), reason="Run in only local") +@pytest.mark.skipif(not torch.cuda.is_available(), reason="Run in only local") def test_fix_norm_issue(): from parallel_wavegan.utils import download_pretrained_model From 2b18019530777e3da81be55c4ce822900faff351 Mon Sep 17 00:00:00 2001 From: kan-bayashi Date: Mon, 19 Jun 2023 21:03:43 +0900 Subject: [PATCH 6/8] fix --- parallel_wavegan/models/hifigan.py | 51 +++++++++++++----------------- test/test_hifigan.py | 17 ++-------- 2 files changed, 25 insertions(+), 43 deletions(-) diff --git a/parallel_wavegan/models/hifigan.py b/parallel_wavegan/models/hifigan.py index 9cd48814..0eae3b3c 100644 --- a/parallel_wavegan/models/hifigan.py +++ b/parallel_wavegan/models/hifigan.py @@ -671,42 +671,35 @@ def _load_state_dict_pre_hook( ["weight_g" in k for k in state_dict.keys()] ): logging.warning( - "It seems weight norm is not applied in the pretrained model. To" - " keep the compatibility, we will apply the norm to the pretrained" - " parameters." + "It seems weight norm is not applied in the pretrained model but the" + " current model uses it. To keep the compatibility, we remove the norm" + " from the current model. This may causes training error due to the the" + " parameter mismatch when finetuning. To avoid this issue, please" + " change the following parameters in config to false: \n" + " - discriminator_params.follow_official_norm \n" + " - discriminator_params.scale_discriminator_params.use_weight_norm \n" + " - discriminator_params.scale_discriminator_params.use_spectral_norm \n" + " See also: https://github.com/kan-bayashi/ParallelWaveGAN/issues/309" ) - keys = [k[:-2] for k in self.state_dict().keys() if k.endswith("weight_g")] - from torch.nn.utils import weight_norm - - for k in keys: - weight = state_dict[prefix + k] - m = torch.nn.Conv1d(weight.shape[1], weight.shape[0], weight.shape[2]) - weight_norm(m) - state_dict[prefix + k + "_g"] = m.weight_g - state_dict[prefix + k + "_v"] = m.weight_v - del state_dict[prefix + k] - del m + self.remove_weight_norm() + self.use_weight_norm = False if self.use_spectral_norm and not any( ["weight_u" in k for k in state_dict.keys()] ): logging.warning( - "It seems spectral norm is not applied in the pretrained model. To" - " keep the compatibility, we will apply the norm to the pretrained" - " parameters." + "It seems spectral norm is not applied in the pretrained model but the" + " current model uses it. To keep the compatibility, we remove the norm" + " from the current model. This may causes training error due to the the" + " parameter mismatch when finetuning. To avoid this issue, please" + " change the following parameters in config to false: \n" + " - discriminator_params.follow_official_norm \n" + " - discriminator_params.scale_discriminator_params.use_weight_norm \n" + " - discriminator_params.scale_discriminator_params.use_spectral_norm \n" + " See also: https://github.com/kan-bayashi/ParallelWaveGAN/issues/309" ) - keys = [k[:-2] for k in self.state_dict().keys() if k.endswith("weight_u")] - from torch.nn.utils import spectral_norm - - for k in keys: - weight = state_dict[prefix + k] - m = torch.nn.Conv1d(weight.shape[1], weight.shape[0], weight.shape[2]) - spectral_norm(m) - state_dict[prefix + k + "_u"] = m.weight_u - state_dict[prefix + k + "_v"] = m.weight_v - state_dict[prefix + k + "_orig"] = m.weight_orig - del state_dict[prefix + k] - del m + self.remove_spectral_norm() + self.use_spectral_norm = False class HiFiGANMultiScaleDiscriminator(torch.nn.Module): diff --git a/test/test_hifigan.py b/test/test_hifigan.py index fffe68ca..9054b7c9 100644 --- a/test/test_hifigan.py +++ b/test/test_hifigan.py @@ -239,17 +239,6 @@ def test_fix_norm_issue(): parallel_wavegan.models, discriminator_type, ) - model_1 = model_class(**config["discriminator_params"]) - model_2 = model_class(**config["discriminator_params"]) - - model_2.load_state_dict(model_1.state_dict()) - state_dict_2 = model_2.state_dict() - - model_1.load_state_dict( - torch.load(checkpoint, map_location="cpu")["model"]["discriminator"], - strict=False, - ) - state_dict_1 = model_1.state_dict() - for k in state_dict_1.keys(): - with pytest.raises(AssertionError): - np.testing.assert_array_equal(state_dict_1[k], state_dict_2[k]) + model = model_class(**config["discriminator_params"]) + state_dict = torch.load(checkpoint, map_location="cpu")["model"]["discriminator"] + model.load_state_dict(state_dict, strict=False) From f1a2117926c6510300c5b8c94d17f99cc3ea8c29 Mon Sep 17 00:00:00 2001 From: kan-bayashi Date: Mon, 19 Jun 2023 21:07:58 +0900 Subject: [PATCH 7/8] update --- parallel_wavegan/models/hifigan.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/parallel_wavegan/models/hifigan.py b/parallel_wavegan/models/hifigan.py index 0eae3b3c..89a98bb7 100644 --- a/parallel_wavegan/models/hifigan.py +++ b/parallel_wavegan/models/hifigan.py @@ -659,12 +659,11 @@ def _load_state_dict_pre_hook( Some pretrained models are trained with configs that use weight / spectral normalization, but actually, the norm is not applied. This causes the mismatch of the parameters with configs. To solve this issue, when parameter mismatch - happens in loading, we remove the norm at first, load the parameters, and then - apply the norm in post-hook functions. + happens in loading pretrained model, we remove the norm from the current model. See also: - - https://github.com/kan-bayashi/ParallelWaveGAN/issues/309 - - https://github.com/espnet/espnet/issues/4595 + - https://github.com/kan-bayashi/ParallelWaveGAN/pull/409 + - https://github.com/espnet/espnet/pull/5240 """ if self.use_weight_norm and not any( From b081c0f4e062b0aef9b0d9579d799ccfe097c0d3 Mon Sep 17 00:00:00 2001 From: kan-bayashi Date: Mon, 19 Jun 2023 21:10:18 +0900 Subject: [PATCH 8/8] fix --- parallel_wavegan/models/hifigan.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/parallel_wavegan/models/hifigan.py b/parallel_wavegan/models/hifigan.py index 89a98bb7..e0a29d33 100644 --- a/parallel_wavegan/models/hifigan.py +++ b/parallel_wavegan/models/hifigan.py @@ -678,7 +678,7 @@ def _load_state_dict_pre_hook( " - discriminator_params.follow_official_norm \n" " - discriminator_params.scale_discriminator_params.use_weight_norm \n" " - discriminator_params.scale_discriminator_params.use_spectral_norm \n" - " See also: https://github.com/kan-bayashi/ParallelWaveGAN/issues/309" + " See also: https://github.com/kan-bayashi/ParallelWaveGAN/pull/409" ) self.remove_weight_norm() self.use_weight_norm = False @@ -695,7 +695,7 @@ def _load_state_dict_pre_hook( " - discriminator_params.follow_official_norm \n" " - discriminator_params.scale_discriminator_params.use_weight_norm \n" " - discriminator_params.scale_discriminator_params.use_spectral_norm \n" - " See also: https://github.com/kan-bayashi/ParallelWaveGAN/issues/309" + " See also: https://github.com/kan-bayashi/ParallelWaveGAN/pull/409" ) self.remove_spectral_norm() self.use_spectral_norm = False