From 70771a3950e39ea7151bde04b21c3a48e2ce7b49 Mon Sep 17 00:00:00 2001 From: Harshavardhan Kamarthi Date: Mon, 27 Jun 2022 13:05:51 -0700 Subject: [PATCH 1/7] Pytorch: Check if grad is not None --- aim/sdk/adapters/pytorch.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/aim/sdk/adapters/pytorch.py b/aim/sdk/adapters/pytorch.py index 6900c8442f..5dd44eb536 100644 --- a/aim/sdk/adapters/pytorch.py +++ b/aim/sdk/adapters/pytorch.py @@ -62,12 +62,14 @@ def get_model_layers(model, dt, parent_name=None): layers[layer_name] = {} if hasattr(m, 'weight') \ and m.weight is not None \ - and hasattr(m.weight, dt): + and hasattr(m.weight, dt) \ + and getattr(m.weight, dt) is not None: layers[layer_name]['weight'] = get_pt_tensor(getattr(m.weight, dt)).numpy() if hasattr(m, 'bias') \ and m.bias is not None \ - and hasattr(m.bias, dt): + and hasattr(m.bias, dt) \ + and getattr(m.bias, dt) is not None: layers[layer_name]['bias'] = get_pt_tensor(getattr(m.bias, dt)).numpy() return layers From c8d38700ef598984ec261df6baa6aaba22007e70 Mon Sep 17 00:00:00 2001 From: Harshavardhan Kamarthi Date: Tue, 28 Jun 2022 03:16:27 -0400 Subject: [PATCH 2/7] Pytorch: Condense attribute check to single getattr call. --- aim/sdk/adapters/pytorch.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/aim/sdk/adapters/pytorch.py b/aim/sdk/adapters/pytorch.py index 5dd44eb536..657728a506 100644 --- a/aim/sdk/adapters/pytorch.py +++ b/aim/sdk/adapters/pytorch.py @@ -62,14 +62,12 @@ def get_model_layers(model, dt, parent_name=None): layers[layer_name] = {} if hasattr(m, 'weight') \ and m.weight is not None \ - and hasattr(m.weight, dt) \ - and getattr(m.weight, dt) is not None: + and getattr(m.weight, dt, None) is not None: layers[layer_name]['weight'] = get_pt_tensor(getattr(m.weight, dt)).numpy() if hasattr(m, 'bias') \ and m.bias is not None \ - and hasattr(m.bias, dt) \ - and getattr(m.bias, dt) is not None: + and getattr(m.bias, dt, None) is not None: layers[layer_name]['bias'] = get_pt_tensor(getattr(m.bias, dt)).numpy() return layers From 1f95c3bd460fc314d30fa7f6bc6bafac5f2d1e33 Mon Sep 17 00:00:00 2001 From: Harshavardhan Kamarthi Date: Tue, 28 Jun 2022 09:16:12 -0700 Subject: [PATCH 3/7] Update CHANGELOG.md --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ba51d526bd..7d2a48ad7e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ - Fix the loading logic of the `monaco editor` across the Aim Ui (arsengit) - Fix `Table` export functionality in Params and Scatters explorers (arsengit) - Allow mixing numeric types on a single Sequence (alberttorosyan) +- Check if gradient is None when tracking gradient distributions ## 3.11.0 Jun 21, 2022 From 0e95ec11d3da0eca233c8481c167ec976620c306 Mon Sep 17 00:00:00 2001 From: Harshavardhan Kamarthi Date: Tue, 28 Jun 2022 09:16:42 -0700 Subject: [PATCH 4/7] Update CHANGELOG.md --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7d2a48ad7e..56ed7bd63e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,7 @@ # Changelog +- Check if gradient is None when tracking gradient distributions + ## 3.11.1 Jun 27, 2022 - Replace base58 encoder with base64 (KaroMourad, VkoHov) @@ -7,7 +9,6 @@ - Fix the loading logic of the `monaco editor` across the Aim Ui (arsengit) - Fix `Table` export functionality in Params and Scatters explorers (arsengit) - Allow mixing numeric types on a single Sequence (alberttorosyan) -- Check if gradient is None when tracking gradient distributions ## 3.11.0 Jun 21, 2022 From 80766251f7568a799a61e6a4b79b91f71ae68e94 Mon Sep 17 00:00:00 2001 From: Gor Arakelyan Date: Tue, 28 Jun 2022 21:41:04 +0400 Subject: [PATCH 5/7] Include the username of the author in changelog --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 56ed7bd63e..360a85a919 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,10 @@ # Changelog -- Check if gradient is None when tracking gradient distributions +## Unreleased + +### Fixes: + +- Check if gradient is None when tracking gradient distributions (kage08) ## 3.11.1 Jun 27, 2022 From 0e4b9c904abad1f04b0be904239ca4992c811421 Mon Sep 17 00:00:00 2001 From: kage08 Date: Fri, 1 Jul 2022 00:32:41 +0000 Subject: [PATCH 6/7] Single getattr to fetch weight ang bias as well as check if None --- aim/sdk/adapters/pytorch.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/aim/sdk/adapters/pytorch.py b/aim/sdk/adapters/pytorch.py index 657728a506..db8b6b13aa 100644 --- a/aim/sdk/adapters/pytorch.py +++ b/aim/sdk/adapters/pytorch.py @@ -60,15 +60,17 @@ def get_model_layers(model, dt, parent_name=None): layers.update(get_model_layers(m, dt, layer_name)) else: layers[layer_name] = {} - if hasattr(m, 'weight') \ - and m.weight is not None \ - and getattr(m.weight, dt, None) is not None: - layers[layer_name]['weight'] = get_pt_tensor(getattr(m.weight, dt)).numpy() + weight = None + if hasattr(m, 'weight') and m.weight is not None: + weight = getattr(m.weigth, dt, None) + if weight is not None: + layers[layer_name]['weight'] = get_pt_tensor(weight).numpy() - if hasattr(m, 'bias') \ - and m.bias is not None \ - and getattr(m.bias, dt, None) is not None: - layers[layer_name]['bias'] = get_pt_tensor(getattr(m.bias, dt)).numpy() + bias = None + if hasattr(m, 'bias') and m.bias is not None: + bias = getattr(m, 'bias', None) + if bias is not None: + layers[layer_name]['bias'] = get_pt_tensor(bias).numpy() return layers From 06a0e4da6e5a0decf794c38c8c44091cc84f1094 Mon Sep 17 00:00:00 2001 From: Harshavardhan Kamarthi Date: Fri, 1 Jul 2022 13:35:03 -0400 Subject: [PATCH 7/7] Fix naming issues. --- aim/sdk/adapters/pytorch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/aim/sdk/adapters/pytorch.py b/aim/sdk/adapters/pytorch.py index db8b6b13aa..ba48c21125 100644 --- a/aim/sdk/adapters/pytorch.py +++ b/aim/sdk/adapters/pytorch.py @@ -62,13 +62,13 @@ def get_model_layers(model, dt, parent_name=None): layers[layer_name] = {} weight = None if hasattr(m, 'weight') and m.weight is not None: - weight = getattr(m.weigth, dt, None) + weight = getattr(m.weight, dt, None) if weight is not None: layers[layer_name]['weight'] = get_pt_tensor(weight).numpy() bias = None if hasattr(m, 'bias') and m.bias is not None: - bias = getattr(m, 'bias', None) + bias = getattr(m.bias, dt, None) if bias is not None: layers[layer_name]['bias'] = get_pt_tensor(bias).numpy()