Skip to content

Commit

Permalink
enhance norm intensity tests
Browse files Browse the repository at this point in the history
Signed-off-by: Wenqi Li <wenqil@nvidia.com>
  • Loading branch information
wyli committed Sep 15, 2021
1 parent a5e7a96 commit a8e9a1e
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 30 deletions.
42 changes: 18 additions & 24 deletions tests/test_normalize_intensity.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,51 +31,51 @@
"divisor": u(np.array([0.5, 0.5, 0.5, 0.5])),
"nonzero": True,
},
np.array([0.0, 3.0, 0.0, 4.0]),
np.array([0.0, -1.0, 0.0, 1.0]),
p(np.array([0.0, 3.0, 0.0, 4.0])),
p(np.array([0.0, -1.0, 0.0, 1.0])),
]
)
TESTS.append([p, {"nonzero": True}, np.array([0.0, 0.0, 0.0, 0.0]), np.array([0.0, 0.0, 0.0, 0.0])])
TESTS.append([p, {"nonzero": False}, np.array([0.0, 0.0, 0.0, 0.0]), np.array([0.0, 0.0, 0.0, 0.0])])
TESTS.append([p, {"nonzero": False}, np.array([1, 1, 1, 1]), np.array([0.0, 0.0, 0.0, 0.0])])
TESTS.append([p, {"nonzero": True}, p(np.array([0.0, 0.0, 0.0, 0.0])), p(np.array([0.0, 0.0, 0.0, 0.0]))])
TESTS.append([p, {"nonzero": False}, p(np.array([0.0, 0.0, 0.0, 0.0])), p(np.array([0.0, 0.0, 0.0, 0.0]))])
TESTS.append([p, {"nonzero": False}, p(np.array([1, 1, 1, 1])), p(np.array([0.0, 0.0, 0.0, 0.0]))])
TESTS.append(
[
p,
{"nonzero": False, "channel_wise": True, "subtrahend": [1, 2, 3]},
np.ones((3, 2, 2)),
np.array([[[0.0, 0.0], [0.0, 0.0]], [[-1.0, -1.0], [-1.0, -1.0]], [[-2.0, -2.0], [-2.0, -2.0]]]),
p(np.ones((3, 2, 2))),
p(np.array([[[0.0, 0.0], [0.0, 0.0]], [[-1.0, -1.0], [-1.0, -1.0]], [[-2.0, -2.0], [-2.0, -2.0]]])),
]
)
TESTS.append(
[
p,
{"nonzero": True, "channel_wise": True, "subtrahend": [1, 2, 3], "divisor": [0, 0, 2]},
np.ones((3, 2, 2)),
np.array([[[0.0, 0.0], [0.0, 0.0]], [[-1.0, -1.0], [-1.0, -1.0]], [[-1.0, -1.0], [-1.0, -1.0]]]),
p(np.ones((3, 2, 2))),
p(np.array([[[0.0, 0.0], [0.0, 0.0]], [[-1.0, -1.0], [-1.0, -1.0]], [[-1.0, -1.0], [-1.0, -1.0]]])),
]
)
TESTS.append(
[
p,
{"nonzero": True, "channel_wise": False, "subtrahend": 2, "divisor": 0},
np.ones((3, 2, 2)),
np.ones((3, 2, 2)) * -1.0,
p(np.ones((3, 2, 2))),
p(np.ones((3, 2, 2)) * -1.0),
]
)
TESTS.append(
[
p,
{"nonzero": True, "channel_wise": False, "subtrahend": np.ones((3, 2, 2)) * 0.5, "divisor": 0},
np.ones((3, 2, 2)),
np.ones((3, 2, 2)) * 0.5,
p(np.ones((3, 2, 2))),
p(np.ones((3, 2, 2)) * 0.5),
]
)
TESTS.append(
[
p,
{"nonzero": True, "channel_wise": True, "subtrahend": np.ones((3, 2, 2)) * 0.5, "divisor": [0, 1, 0]},
np.ones((3, 2, 2)),
np.ones((3, 2, 2)) * 0.5,
p(np.ones((3, 2, 2))),
p(np.ones((3, 2, 2)) * 0.5),
]
)

Expand All @@ -91,28 +91,22 @@ def test_default(self, im_type):
self.assertEqual(im.device, normalized.device)
self.assertTrue(normalized.dtype in (np.float32, torch.float32))
expected = (self.imt - np.mean(self.imt)) / np.std(self.imt)
assert_allclose(expected, normalized, type_test=False, rtol=1e-3)
assert_allclose(normalized, expected, type_test=False, rtol=1e-3)

@parameterized.expand(TESTS)
def test_nonzero(self, in_type, input_param, input_data, expected_data):
normalizer = NormalizeIntensity(**input_param)
im = in_type(input_data)
normalized = normalizer(im)
self.assertEqual(type(im), type(normalized))
if isinstance(normalized, torch.Tensor):
self.assertEqual(im.device, normalized.device)
assert_allclose(expected_data, normalized, type_test=False)
assert_allclose(normalized, in_type(expected_data))

@parameterized.expand([[p] for p in TEST_NDARRAYS])
def test_channel_wise(self, im_type):
normalizer = NormalizeIntensity(nonzero=True, channel_wise=True)
input_data = im_type(np.array([[0.0, 3.0, 0.0, 4.0], [0.0, 4.0, 0.0, 5.0]]))
expected = np.array([[0.0, -1.0, 0.0, 1.0], [0.0, -1.0, 0.0, 1.0]])
normalized = normalizer(input_data)
self.assertEqual(type(input_data), type(normalized))
if isinstance(normalized, torch.Tensor):
self.assertEqual(input_data.device, normalized.device)
assert_allclose(expected, normalized, type_test=False)
assert_allclose(normalized, im_type(expected))

@parameterized.expand([[p] for p in TEST_NDARRAYS])
def test_value_errors(self, im_type):
Expand Down
12 changes: 6 additions & 6 deletions tests/test_normalize_intensityd.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
[
{"keys": ["img"], "nonzero": True},
{"img": p(np.array([0.0, 3.0, 0.0, 4.0]))},
np.array([0.0, -1.0, 0.0, 1.0]),
p(np.array([0.0, -1.0, 0.0, 1.0])),
]
)
TESTS.append(
Expand All @@ -37,14 +37,14 @@
"nonzero": True,
},
{"img": p(np.array([0.0, 3.0, 0.0, 4.0]))},
np.array([0.0, -1.0, 0.0, 1.0]),
p(np.array([0.0, -1.0, 0.0, 1.0])),
]
)
TESTS.append(
[
{"keys": ["img"], "nonzero": True},
{"img": p(np.array([0.0, 0.0, 0.0, 0.0]))},
np.array([0.0, 0.0, 0.0, 0.0]),
p(np.array([0.0, 0.0, 0.0, 0.0])),
]
)

Expand All @@ -60,7 +60,7 @@ def test_image_normalize_intensityd(self, im_type):
self.assertEqual(type(im), type(normalized))
if isinstance(normalized, torch.Tensor):
self.assertEqual(im.device, normalized.device)
assert_allclose(normalized, expected, type_test=False, rtol=1e-3)
assert_allclose(normalized, im_type(expected), rtol=1e-3)

@parameterized.expand(TESTS)
def test_nonzero(self, input_param, input_data, expected_data):
Expand All @@ -70,7 +70,7 @@ def test_nonzero(self, input_param, input_data, expected_data):
self.assertEqual(type(input_data[key]), type(normalized))
if isinstance(normalized, torch.Tensor):
self.assertEqual(input_data[key].device, normalized.device)
assert_allclose(normalized, expected_data, type_test=False)
assert_allclose(normalized, expected_data)

@parameterized.expand([[p] for p in TEST_NDARRAYS])
def test_channel_wise(self, im_type):
Expand All @@ -82,7 +82,7 @@ def test_channel_wise(self, im_type):
if isinstance(normalized, torch.Tensor):
self.assertEqual(input_data[key].device, normalized.device)
expected = np.array([[0.0, -1.0, 0.0, 1.0], [0.0, -1.0, 0.0, 1.0]])
assert_allclose(normalized, expected, type_test=False)
assert_allclose(normalized, im_type(expected))


if __name__ == "__main__":
Expand Down

0 comments on commit a8e9a1e

Please sign in to comment.