diff --git a/monai/transforms/utils.py b/monai/transforms/utils.py index 14f35e1219..560dbac346 100644 --- a/monai/transforms/utils.py +++ b/monai/transforms/utils.py @@ -2190,7 +2190,7 @@ def distance_transform_edt( if return_distances: dtype = torch.float64 if float64_distances else torch.float32 if distances is None: - distances = torch.zeros_like(img, dtype=dtype) # type: ignore + distances = torch.zeros_like(img, memory_format=torch.contiguous_format, dtype=dtype) # type: ignore else: if not isinstance(distances, torch.Tensor) and distances.device != img.device: raise TypeError("distances must be a torch.Tensor on the same device as img") diff --git a/tests/test_clip_intensity_percentiles.py b/tests/test_clip_intensity_percentiles.py index 01820e7115..af157446f6 100644 --- a/tests/test_clip_intensity_percentiles.py +++ b/tests/test_clip_intensity_percentiles.py @@ -18,7 +18,6 @@ from monai.transforms import ClipIntensityPercentiles from monai.transforms.utils import soft_clip from monai.transforms.utils_pytorch_numpy_unification import clip, percentile -from monai.utils.type_conversion import convert_to_tensor from tests.utils import TEST_NDARRAYS, NumpyImageTestCase2D, NumpyImageTestCase3D, assert_allclose @@ -30,7 +29,7 @@ def test_hard_clipping_two_sided(self, p): im = p(self.imt) result = hard_clipper(im) lower, upper = percentile(im, (5, 95)) - expected = clip(convert_to_tensor(im), lower, upper) + expected = clip(im, lower, upper) assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0) @parameterized.expand([[p] for p in TEST_NDARRAYS])