diff --git a/monai/apps/auto3dseg/hpo_gen.py b/monai/apps/auto3dseg/hpo_gen.py index b755b99feb..ed6d903897 100644 --- a/monai/apps/auto3dseg/hpo_gen.py +++ b/monai/apps/auto3dseg/hpo_gen.py @@ -53,7 +53,7 @@ def update_params(self, *args, **kwargs): raise NotImplementedError @abstractmethod - def set_score(self): + def set_score(self, *args, **kwargs): """Report the evaluated results to HPO.""" raise NotImplementedError diff --git a/monai/apps/pathology/transforms/post/array.py b/monai/apps/pathology/transforms/post/array.py index 0aa8e14655..035bce2c69 100644 --- a/monai/apps/pathology/transforms/post/array.py +++ b/monai/apps/pathology/transforms/post/array.py @@ -28,7 +28,7 @@ SobelGradients, ) from monai.transforms.transform import Transform -from monai.transforms.utils_pytorch_numpy_unification import max, maximum, min, sum, unique +from monai.transforms.utils_pytorch_numpy_unification import max, maximum, min, sum, unique, where from monai.utils import TransformBackends, convert_to_numpy, optional_import from monai.utils.misc import ensure_tuple_rep from monai.utils.type_conversion import convert_to_dst_type, convert_to_tensor @@ -162,7 +162,8 @@ def __call__(self, prob_map: NdarrayOrTensor) -> NdarrayOrTensor: pred = label(pred)[0] if self.remove_small_objects is not None: pred = self.remove_small_objects(pred) - pred[pred > 0] = 1 + pred_indices = np.where(pred > 0) + pred[pred_indices] = 1 return convert_to_dst_type(pred, prob_map, dtype=self.dtype)[0] @@ -338,7 +339,8 @@ def __call__(self, mask: NdarrayOrTensor, instance_border: NdarrayOrTensor) -> N instance_border = instance_border >= self.threshold # uncertain area marker = mask - convert_to_dst_type(instance_border, mask)[0] # certain foreground - marker[marker < 0] = 0 + marker_indices = where(marker < 0) + marker[marker_indices] = 0 # type: ignore[index] marker = self.postprocess_fn(marker) marker = convert_to_numpy(marker) @@ -635,7 +637,7 @@ def __call__( # type: ignore seg_map_crop = convert_to_dst_type(seg_map_crop == instance_id, type_map_crop, dtype=bool)[0] - inst_type = type_map_crop[seg_map_crop] + inst_type = type_map_crop[seg_map_crop] # type: ignore[index] type_list, type_pixels = unique(inst_type, return_counts=True) type_list = list(zip(type_list, type_pixels)) type_list = sorted(type_list, key=lambda x: x[1], reverse=True) diff --git a/monai/data/meta_tensor.py b/monai/data/meta_tensor.py index cad0851a8e..2df4da4a35 100644 --- a/monai/data/meta_tensor.py +++ b/monai/data/meta_tensor.py @@ -505,7 +505,7 @@ def peek_pending_rank(self): a = self.pending_operations[-1].get(LazyAttr.AFFINE, None) if self.pending_operations else self.affine return 1 if a is None else int(max(1, len(a) - 1)) - def new_empty(self, size, dtype=None, device=None, requires_grad=False): + def new_empty(self, size, dtype=None, device=None, requires_grad=False): # type: ignore[override] """ must be defined for deepcopy to work @@ -580,7 +580,7 @@ def ensure_torch_and_prune_meta( img.affine = MetaTensor.get_default_affine() return img - def __repr__(self): + def __repr__(self): # type: ignore[override] """ Prints a representation of the tensor. Prepends "meta" to ``torch.Tensor.__repr__``. diff --git a/monai/networks/layers/simplelayers.py b/monai/networks/layers/simplelayers.py index 4ac621967f..4acd4a3622 100644 --- a/monai/networks/layers/simplelayers.py +++ b/monai/networks/layers/simplelayers.py @@ -452,7 +452,7 @@ def get_binary_kernel(window_size: Sequence[int], dtype=torch.float, device=None def median_filter( in_tensor: torch.Tensor, - kernel_size: Sequence[int] = (3, 3, 3), + kernel_size: Sequence[int] | int = (3, 3, 3), spatial_dims: int = 3, kernel: torch.Tensor | None = None, **kwargs, diff --git a/monai/networks/nets/quicknat.py b/monai/networks/nets/quicknat.py index bbc4e7e490..7e0f9c6b38 100644 --- a/monai/networks/nets/quicknat.py +++ b/monai/networks/nets/quicknat.py @@ -42,7 +42,7 @@ class SkipConnectionWithIdx(SkipConnection): Inherits from SkipConnection but provides the indizes with each forward pass. """ - def forward(self, input, indices): + def forward(self, input, indices): # type: ignore[override] return super().forward(input), indices @@ -57,7 +57,7 @@ class SequentialWithIdx(nn.Sequential): def __init__(self, *args): super().__init__(*args) - def forward(self, input, indices): + def forward(self, input, indices): # type: ignore[override] for module in self: input, indices = module(input, indices) return input, indices @@ -165,7 +165,7 @@ def _get_layer(self, in_channels, out_channels, dilation): ) return nn.Sequential(conv.get_submodule("adn"), conv.get_submodule("conv")) - def forward(self, input, _): + def forward(self, input, _): # type: ignore[override] i = 0 result = input result1 = input # this will not stay this value, needed here for pylint/mypy @@ -215,7 +215,7 @@ def __init__(self, in_channels: int, max_pool, se_layer, dropout, kernel_size, n super().__init__(in_channels, se_layer, dropout, kernel_size, num_filters) self.max_pool = max_pool - def forward(self, input, indices=None): + def forward(self, input, indices=None): # type: ignore[override] input, indices = self.max_pool(input) out_block, _ = super().forward(input, None) @@ -243,7 +243,7 @@ def __init__(self, in_channels: int, un_pool, se_layer, dropout, kernel_size, nu super().__init__(in_channels, se_layer, dropout, kernel_size, num_filters) self.un_pool = un_pool - def forward(self, input, indices): + def forward(self, input, indices): # type: ignore[override] out_block, _ = super().forward(input, None) out_block = self.un_pool(out_block, indices) return out_block, None @@ -270,7 +270,7 @@ def __init__(self, in_channels: int, se_layer, dropout, max_pool, un_pool, kerne self.max_pool = max_pool self.un_pool = un_pool - def forward(self, input, indices): + def forward(self, input, indices): # type: ignore[override] out_block, indices = self.max_pool(input) out_block, _ = super().forward(out_block, None) out_block = self.un_pool(out_block, indices) diff --git a/monai/transforms/croppad/array.py b/monai/transforms/croppad/array.py index ce3701b263..813f8c1d44 100644 --- a/monai/transforms/croppad/array.py +++ b/monai/transforms/croppad/array.py @@ -362,10 +362,10 @@ def __init__(self, lazy: bool = False): @staticmethod def compute_slices( - roi_center: Sequence[int] | NdarrayOrTensor | None = None, - roi_size: Sequence[int] | NdarrayOrTensor | None = None, - roi_start: Sequence[int] | NdarrayOrTensor | None = None, - roi_end: Sequence[int] | NdarrayOrTensor | None = None, + roi_center: Sequence[int] | int | NdarrayOrTensor | None = None, + roi_size: Sequence[int] | int | NdarrayOrTensor | None = None, + roi_start: Sequence[int] | int | NdarrayOrTensor | None = None, + roi_end: Sequence[int] | int | NdarrayOrTensor | None = None, roi_slices: Sequence[slice] | None = None, ) -> tuple[slice]: """ @@ -459,10 +459,10 @@ class SpatialCrop(Crop): def __init__( self, - roi_center: Sequence[int] | NdarrayOrTensor | None = None, - roi_size: Sequence[int] | NdarrayOrTensor | None = None, - roi_start: Sequence[int] | NdarrayOrTensor | None = None, - roi_end: Sequence[int] | NdarrayOrTensor | None = None, + roi_center: Sequence[int] | int | NdarrayOrTensor | None = None, + roi_size: Sequence[int] | int | NdarrayOrTensor | None = None, + roi_start: Sequence[int] | int | NdarrayOrTensor | None = None, + roi_end: Sequence[int] | int | NdarrayOrTensor | None = None, roi_slices: Sequence[slice] | None = None, lazy: bool = False, ) -> None: diff --git a/monai/transforms/croppad/dictionary.py b/monai/transforms/croppad/dictionary.py index be9441dc4a..cea11d9676 100644 --- a/monai/transforms/croppad/dictionary.py +++ b/monai/transforms/croppad/dictionary.py @@ -438,10 +438,10 @@ class SpatialCropd(Cropd): def __init__( self, keys: KeysCollection, - roi_center: Sequence[int] | None = None, - roi_size: Sequence[int] | None = None, - roi_start: Sequence[int] | None = None, - roi_end: Sequence[int] | None = None, + roi_center: Sequence[int] | int | None = None, + roi_size: Sequence[int] | int | None = None, + roi_start: Sequence[int] | int | None = None, + roi_end: Sequence[int] | int | None = None, roi_slices: Sequence[slice] | None = None, allow_missing_keys: bool = False, lazy: bool = False, diff --git a/monai/transforms/spatial/array.py b/monai/transforms/spatial/array.py index 094afdd3c4..3739a83e71 100644 --- a/monai/transforms/spatial/array.py +++ b/monai/transforms/spatial/array.py @@ -3441,7 +3441,7 @@ def filter_count(self, image_np: NdarrayOrTensor, locations: np.ndarray) -> tupl idx = self.R.permutation(image_np.shape[0]) idx = idx[: self.num_patches] idx_np = convert_data_type(idx, np.ndarray)[0] - image_np = image_np[idx] + image_np = image_np[idx] # type: ignore[index] locations = locations[idx_np] return image_np, locations elif self.sort_fn not in (None, GridPatchSort.MIN, GridPatchSort.MAX): diff --git a/monai/visualize/class_activation_maps.py b/monai/visualize/class_activation_maps.py index 6d1e8dfd03..489a563818 100644 --- a/monai/visualize/class_activation_maps.py +++ b/monai/visualize/class_activation_maps.py @@ -290,7 +290,7 @@ def __init__( ) self.fc_layers = fc_layers - def compute_map(self, x, class_idx=None, layer_idx=-1, **kwargs): + def compute_map(self, x, class_idx=None, layer_idx=-1, **kwargs): # type: ignore[override] logits, acti, _ = self.nn_module(x, **kwargs) acti = acti[layer_idx] if class_idx is None: @@ -302,7 +302,7 @@ def compute_map(self, x, class_idx=None, layer_idx=-1, **kwargs): output = torch.stack([output[i, b : b + 1] for i, b in enumerate(class_idx)], dim=0) return output.reshape(b, 1, *spatial) # resume the spatial dims on the selected class - def __call__(self, x, class_idx=None, layer_idx=-1, **kwargs): + def __call__(self, x, class_idx=None, layer_idx=-1, **kwargs): # type: ignore[override] """ Compute the activation map with upsampling and postprocessing. @@ -361,7 +361,7 @@ class GradCAM(CAMBase): """ - def compute_map(self, x, class_idx=None, retain_graph=False, layer_idx=-1, **kwargs): + def compute_map(self, x, class_idx=None, retain_graph=False, layer_idx=-1, **kwargs): # type: ignore[override] _, acti, grad = self.nn_module(x, class_idx=class_idx, retain_graph=retain_graph, **kwargs) acti, grad = acti[layer_idx], grad[layer_idx] b, c, *spatial = grad.shape @@ -369,7 +369,7 @@ def compute_map(self, x, class_idx=None, retain_graph=False, layer_idx=-1, **kwa acti_map = (weights * acti).sum(1, keepdim=True) return F.relu(acti_map) - def __call__(self, x, class_idx=None, layer_idx=-1, retain_graph=False, **kwargs): + def __call__(self, x, class_idx=None, layer_idx=-1, retain_graph=False, **kwargs): # type: ignore[override] """ Compute the activation map with upsampling and postprocessing. @@ -401,7 +401,7 @@ class GradCAMpp(GradCAM): """ - def compute_map(self, x, class_idx=None, retain_graph=False, layer_idx=-1, **kwargs): + def compute_map(self, x, class_idx=None, retain_graph=False, layer_idx=-1, **kwargs): # type: ignore[override] _, acti, grad = self.nn_module(x, class_idx=class_idx, retain_graph=retain_graph, **kwargs) acti, grad = acti[layer_idx], grad[layer_idx] b, c, *spatial = grad.shape diff --git a/tests/test_subpixel_upsample.py b/tests/test_subpixel_upsample.py index 5abbe57e11..fe9fb1c328 100644 --- a/tests/test_subpixel_upsample.py +++ b/tests/test_subpixel_upsample.py @@ -55,9 +55,9 @@ (2, 1, 32, 16, 8), ] -TEST_CASE_SUBPIXEL.append(TEST_CASE_SUBPIXEL_2D_EXTRA) -TEST_CASE_SUBPIXEL.append(TEST_CASE_SUBPIXEL_3D_EXTRA) -TEST_CASE_SUBPIXEL.append(TEST_CASE_SUBPIXEL_CONV_BLOCK_EXTRA) +TEST_CASE_SUBPIXEL.append(TEST_CASE_SUBPIXEL_2D_EXTRA) # type: ignore +TEST_CASE_SUBPIXEL.append(TEST_CASE_SUBPIXEL_3D_EXTRA) # type: ignore +TEST_CASE_SUBPIXEL.append(TEST_CASE_SUBPIXEL_CONV_BLOCK_EXTRA) # type: ignore # add every test back with the pad/pool sequential component omitted for tests in list(TEST_CASE_SUBPIXEL):