diff --git a/monai/apps/detection/networks/retinanet_detector.py b/monai/apps/detection/networks/retinanet_detector.py index a0573d6cd1..e996ae81bc 100644 --- a/monai/apps/detection/networks/retinanet_detector.py +++ b/monai/apps/detection/networks/retinanet_detector.py @@ -180,7 +180,7 @@ def forward(self, images: torch.Tensor): nesterov=True, ) torch.save(detector.network.state_dict(), 'model.pt') # save model - detector.network.load_state_dict(torch.load('model.pt')) # load model + detector.network.load_state_dict(torch.load('model.pt', weights_only=True)) # load model """ def __init__( diff --git a/monai/apps/mmars/mmars.py b/monai/apps/mmars/mmars.py index 31c88a17be..1fc0690cc9 100644 --- a/monai/apps/mmars/mmars.py +++ b/monai/apps/mmars/mmars.py @@ -241,7 +241,7 @@ def load_from_mmar( return torch.jit.load(_model_file, map_location=map_location) # loading with `torch.load` - model_dict = torch.load(_model_file, map_location=map_location) + model_dict = torch.load(_model_file, map_location=map_location, weights_only=True) if weights_only: return model_dict.get(model_key, model_dict) # model_dict[model_key] or model_dict directly diff --git a/monai/bundle/scripts.py b/monai/bundle/scripts.py index ccb8010e65..4f84e91f54 100644 --- a/monai/bundle/scripts.py +++ b/monai/bundle/scripts.py @@ -737,7 +737,7 @@ def load( if load_ts_module is True: return load_net_with_metadata(full_path, map_location=torch.device(device), more_extra_files=config_files) # loading with `torch.load` - model_dict = torch.load(full_path, map_location=torch.device(device)) + model_dict = torch.load(full_path, map_location=torch.device(device), weights_only=True) if not isinstance(model_dict, Mapping): warnings.warn(f"the state dictionary from {full_path} should be a dictionary but got {type(model_dict)}.") @@ -1306,7 +1306,7 @@ def _export( # here we use ignite Checkpoint to support nested weights and be compatible with MONAI CheckpointSaver Checkpoint.load_objects(to_load={key_in_ckpt: net}, checkpoint=ckpt_file) else: - ckpt = torch.load(ckpt_file) + ckpt = torch.load(ckpt_file, weights_only=True) copy_model_state(dst=net, src=ckpt if key_in_ckpt == "" else ckpt[key_in_ckpt]) # Use the given converter to convert a model and save with metadata, config content diff --git a/monai/data/dataset.py b/monai/data/dataset.py index 8c53338d66..6e7ad8ba48 100644 --- a/monai/data/dataset.py +++ b/monai/data/dataset.py @@ -372,10 +372,7 @@ def _cachecheck(self, item_transformed): if hashfile is not None and hashfile.is_file(): # cache hit try: - if "weights_only" in signature(torch.load).parameters: - return torch.load(hashfile, weights_only=False) - else: - return torch.load(hashfile) + return torch.load(hashfile, weights_only=False) except PermissionError as e: if sys.platform != "win32": raise e @@ -1674,7 +1671,4 @@ def _load_meta_cache(self, meta_hash_file_name): if meta_hash_file_name in self._meta_cache: return self._meta_cache[meta_hash_file_name] else: - if "weights_only" in signature(torch.load).parameters: - return torch.load(self.cache_dir / meta_hash_file_name, weights_only=False) - else: - return torch.load(self.cache_dir / meta_hash_file_name) + return torch.load(self.cache_dir / meta_hash_file_name, weights_only=False) diff --git a/monai/fl/client/monai_algo.py b/monai/fl/client/monai_algo.py index a3ac58c221..6e9a6fd1fe 100644 --- a/monai/fl/client/monai_algo.py +++ b/monai/fl/client/monai_algo.py @@ -574,7 +574,7 @@ def get_weights(self, extra=None): model_path = os.path.join(self.bundle_root, cast(str, self.model_filepaths[model_type])) if not os.path.isfile(model_path): raise ValueError(f"No best model checkpoint exists at {model_path}") - weights = torch.load(model_path, map_location="cpu") + weights = torch.load(model_path, map_location="cpu", weights_only=True) # if weights contain several state dicts, use the one defined by `save_dict_key` if isinstance(weights, dict) and self.save_dict_key in weights: weights = weights.get(self.save_dict_key) diff --git a/monai/handlers/checkpoint_loader.py b/monai/handlers/checkpoint_loader.py index f48968ecfd..105b4f3a79 100644 --- a/monai/handlers/checkpoint_loader.py +++ b/monai/handlers/checkpoint_loader.py @@ -122,7 +122,7 @@ def __call__(self, engine: Engine) -> None: Args: engine: Ignite Engine, it can be a trainer, validator or evaluator. """ - checkpoint = torch.load(self.load_path, map_location=self.map_location) + checkpoint = torch.load(self.load_path, map_location=self.map_location, weights_only=True) k, _ = list(self.load_dict.items())[0] # single object and checkpoint is directly a state_dict diff --git a/monai/losses/perceptual.py b/monai/losses/perceptual.py index a8ae90993a..ee653fac9d 100644 --- a/monai/losses/perceptual.py +++ b/monai/losses/perceptual.py @@ -374,7 +374,7 @@ def __init__( else: network = torchvision.models.resnet50(weights=None) if pretrained is True: - state_dict = torch.load(pretrained_path) + state_dict = torch.load(pretrained_path, weights_only=True) if pretrained_state_dict_key is not None: state_dict = state_dict[pretrained_state_dict_key] network.load_state_dict(state_dict) diff --git a/monai/networks/nets/hovernet.py b/monai/networks/nets/hovernet.py index 3745b66bb5..b773af91d4 100644 --- a/monai/networks/nets/hovernet.py +++ b/monai/networks/nets/hovernet.py @@ -633,9 +633,9 @@ def _remap_preact_resnet_model(model_url: str): # download the pretrained weights into torch hub's default dir weights_dir = os.path.join(torch.hub.get_dir(), "preact-resnet50.pth") download_url(model_url, fuzzy=True, filepath=weights_dir, progress=False) - state_dict = torch.load(weights_dir, map_location=None if torch.cuda.is_available() else torch.device("cpu"))[ - "desc" - ] + map_location = None if torch.cuda.is_available() else torch.device("cpu") + state_dict = torch.load(weights_dir, map_location=map_location, weights_only=True)["desc"] + for key in list(state_dict.keys()): new_key = None if pattern_conv0.match(key): @@ -668,7 +668,8 @@ def _remap_standard_resnet_model(model_url: str, state_dict_key: str | None = No # download the pretrained weights into torch hub's default dir weights_dir = os.path.join(torch.hub.get_dir(), "resnet50.pth") download_url(model_url, fuzzy=True, filepath=weights_dir, progress=False) - state_dict = torch.load(weights_dir, map_location=None if torch.cuda.is_available() else torch.device("cpu")) + map_location = None if torch.cuda.is_available() else torch.device("cpu") + state_dict = torch.load(weights_dir, map_location=map_location, weights_only=True) if state_dict_key is not None: state_dict = state_dict[state_dict_key] diff --git a/monai/networks/nets/resnet.py b/monai/networks/nets/resnet.py index d62722478e..d24b86d27d 100644 --- a/monai/networks/nets/resnet.py +++ b/monai/networks/nets/resnet.py @@ -493,7 +493,7 @@ def _resnet( if isinstance(pretrained, str): if Path(pretrained).exists(): logger.info(f"Loading weights from {pretrained}...") - model_state_dict = torch.load(pretrained, map_location=device) + model_state_dict = torch.load(pretrained, map_location=device, weights_only=True) else: # Throw error raise FileNotFoundError("The pretrained checkpoint file is not found") @@ -665,7 +665,7 @@ def get_pretrained_resnet_medicalnet(resnet_depth: int, device: str = "cpu", dat raise EntryNotFoundError( f"{filename} not found on {medicalnet_huggingface_repo_basename}{resnet_depth}" ) from None - checkpoint = torch.load(pretrained_path, map_location=torch.device(device)) + checkpoint = torch.load(pretrained_path, map_location=torch.device(device), weights_only=True) else: raise NotImplementedError("Supported resnet_depth are: [10, 18, 34, 50, 101, 152, 200]") logger.info(f"{filename} downloaded") diff --git a/monai/networks/nets/senet.py b/monai/networks/nets/senet.py index 51435a9ea2..c14118ad20 100644 --- a/monai/networks/nets/senet.py +++ b/monai/networks/nets/senet.py @@ -302,7 +302,7 @@ def _load_state_dict(model: nn.Module, arch: str, progress: bool): if isinstance(model_url, dict): download_url(model_url["url"], filepath=model_url["filename"]) - state_dict = torch.load(model_url["filename"], map_location=None) + state_dict = torch.load(model_url["filename"], map_location=None, weights_only=True) else: state_dict = load_state_dict_from_url(model_url, progress=progress) for key in list(state_dict.keys()): diff --git a/monai/networks/nets/swin_unetr.py b/monai/networks/nets/swin_unetr.py index cfc5dda41f..525e64ab3f 100644 --- a/monai/networks/nets/swin_unetr.py +++ b/monai/networks/nets/swin_unetr.py @@ -1118,7 +1118,7 @@ def filter_swinunetr(key, value): ) ssl_weights_path = "./ssl_pretrained_weights.pth" download_url(resource, ssl_weights_path) - ssl_weights = torch.load(ssl_weights_path)["model"] + ssl_weights = torch.load(ssl_weights_path, weights_only=True)["model"] dst_dict, loaded, not_loaded = copy_model_state(model, ssl_weights, filter_func=filter_swinunetr) diff --git a/monai/networks/nets/transchex.py b/monai/networks/nets/transchex.py index 6bfff3c956..68d917534a 100644 --- a/monai/networks/nets/transchex.py +++ b/monai/networks/nets/transchex.py @@ -68,7 +68,8 @@ def from_pretrained( weights_path = cached_file(path_or_repo_id, filename, cache_dir=cache_dir) model = cls(num_language_layers, num_vision_layers, num_mixed_layers, bert_config, *inputs, **kwargs) if state_dict is None and not from_tf: - state_dict = torch.load(weights_path, map_location="cpu" if not torch.cuda.is_available() else None) + map_location = "cpu" if not torch.cuda.is_available() else None + state_dict = torch.load(weights_path, map_location=map_location, weights_only=True) if from_tf: return load_tf_weights_in_bert(model, weights_path) old_keys = [] diff --git a/monai/utils/state_cacher.py b/monai/utils/state_cacher.py index 60a074544b..726d59273b 100644 --- a/monai/utils/state_cacher.py +++ b/monai/utils/state_cacher.py @@ -124,7 +124,7 @@ def retrieve(self, key: Hashable) -> Any: fn = self.cached[key]["obj"] # pytype: disable=attribute-error if not os.path.exists(fn): # pytype: disable=wrong-arg-types raise RuntimeError(f"Failed to load state in {fn}. File doesn't exist anymore.") - data_obj = torch.load(fn, map_location=lambda storage, location: storage) + data_obj = torch.load(fn, map_location=lambda storage, location: storage, weights_only=True) # copy back to device if necessary if "device" in self.cached[key]: data_obj = data_obj.to(self.cached[key]["device"]) diff --git a/tests/bundle/test_bundle_download.py b/tests/bundle/test_bundle_download.py index 38620d98ff..da58a6313e 100644 --- a/tests/bundle/test_bundle_download.py +++ b/tests/bundle/test_bundle_download.py @@ -266,6 +266,7 @@ def test_load_weights(self, bundle_files, bundle_name, repo, device, model_file) with skip_if_downloading_fails(): # download bundle, and load weights from the downloaded path with tempfile.TemporaryDirectory() as tempdir: + bundle_root = os.path.join(tempdir, bundle_name) # load weights weights = load( name=bundle_name, @@ -278,7 +279,7 @@ def test_load_weights(self, bundle_files, bundle_name, repo, device, model_file) return_state_dict=True, ) # prepare network - with open(os.path.join(tempdir, bundle_name, bundle_files[2])) as f: + with open(os.path.join(bundle_root, bundle_files[2])) as f: net_args = json.load(f)["network_def"] model_name = net_args["_target_"] del net_args["_target_"] @@ -288,9 +289,13 @@ def test_load_weights(self, bundle_files, bundle_name, repo, device, model_file) model.eval() # prepare data and test - input_tensor = torch.load(os.path.join(tempdir, bundle_name, bundle_files[4]), map_location=device) + input_tensor = torch.load( + os.path.join(bundle_root, bundle_files[4]), map_location=device, weights_only=True + ) output = model.forward(input_tensor) - expected_output = torch.load(os.path.join(tempdir, bundle_name, bundle_files[3]), map_location=device) + expected_output = torch.load( + os.path.join(bundle_root, bundle_files[3]), map_location=device, weights_only=True + ) assert_allclose(output, expected_output, atol=1e-4, rtol=1e-4, type_test=False) # load instantiated model directly and test, since the bundle has been downloaded, @@ -350,7 +355,7 @@ def test_load_weights_with_net_override(self, bundle_name, device, net_override) config_file=f"{tempdir}/spleen_ct_segmentation/configs/train.json", workflow_type="train" ) expected_model = workflow.network_def.to(device) - expected_model.load_state_dict(torch.load(model_path)) + expected_model.load_state_dict(torch.load(model_path, weights_only=True)) expected_output = expected_model(input_tensor) assert_allclose(output, expected_output, atol=1e-4, rtol=1e-4, type_test=False) @@ -378,6 +383,7 @@ def test_load_ts_module(self, bundle_files, bundle_name, version, repo, device, with skip_if_downloading_fails(): # load ts module with tempfile.TemporaryDirectory() as tempdir: + bundle_root = os.path.join(tempdir, bundle_name) # load ts module model_ts, metadata, extra_file_dict = load( name=bundle_name, @@ -393,9 +399,13 @@ def test_load_ts_module(self, bundle_files, bundle_name, version, repo, device, ) # prepare and test ts - input_tensor = torch.load(os.path.join(tempdir, bundle_name, bundle_files[1]), map_location=device) + input_tensor = torch.load( + os.path.join(bundle_root, bundle_files[1]), map_location=device, weights_only=True + ) output = model_ts.forward(input_tensor) - expected_output = torch.load(os.path.join(tempdir, bundle_name, bundle_files[0]), map_location=device) + expected_output = torch.load( + os.path.join(bundle_root, bundle_files[0]), map_location=device, weights_only=True + ) assert_allclose(output, expected_output, atol=1e-4, rtol=1e-4, type_test=False) # test metadata self.assertTrue(metadata["pytorch_version"] == "1.7.1") diff --git a/tests/data/meta_tensor/test_meta_tensor.py b/tests/data/meta_tensor/test_meta_tensor.py index 1c192257fe..772ea992af 100644 --- a/tests/data/meta_tensor/test_meta_tensor.py +++ b/tests/data/meta_tensor/test_meta_tensor.py @@ -245,7 +245,7 @@ def test_pickling(self): with tempfile.TemporaryDirectory() as tmp_dir: fname = os.path.join(tmp_dir, "im.pt") torch.save(m, fname) - m2 = torch.load(fname) + m2 = torch.load(fname, weights_only=True) self.check(m2, m, ids=False) @skip_if_no_cuda diff --git a/tests/integration/test_integration_classification_2d.py b/tests/integration/test_integration_classification_2d.py index fd9e58aaf8..aecfa2efab 100644 --- a/tests/integration/test_integration_classification_2d.py +++ b/tests/integration/test_integration_classification_2d.py @@ -166,7 +166,7 @@ def run_inference_test(root_dir, test_x, test_y, device="cuda:0", num_workers=10 model = DenseNet121(spatial_dims=2, in_channels=1, out_channels=len(np.unique(test_y))).to(device) model_filename = os.path.join(root_dir, "best_metric_model.pth") - model.load_state_dict(torch.load(model_filename)) + model.load_state_dict(torch.load(model_filename, weights_only=True)) y_true = [] y_pred = [] with eval_mode(model): diff --git a/tests/integration/test_integration_segmentation_3d.py b/tests/integration/test_integration_segmentation_3d.py index fb2937739f..7c30150505 100644 --- a/tests/integration/test_integration_segmentation_3d.py +++ b/tests/integration/test_integration_segmentation_3d.py @@ -216,7 +216,7 @@ def run_inference_test(root_dir, device="cuda:0"): ).to(device) model_filename = os.path.join(root_dir, "best_metric_model.pth") - model.load_state_dict(torch.load(model_filename)) + model.load_state_dict(torch.load(model_filename, weights_only=True)) with eval_mode(model): # resampling with align_corners=True or dtype=float64 will generate # slight different results between PyTorch 1.5 an 1.6 diff --git a/tests/networks/nets/test_autoencoderkl.py b/tests/networks/nets/test_autoencoderkl.py index 0a3db60830..2d4c5b66ca 100644 --- a/tests/networks/nets/test_autoencoderkl.py +++ b/tests/networks/nets/test_autoencoderkl.py @@ -330,7 +330,7 @@ def test_compatibility_with_monai_generative(self): weight_path = os.path.join(tmpdir, filename) download_url(url=url, filepath=weight_path, hash_val=hash_val, hash_type=hash_type) - net.load_old_state_dict(torch.load(weight_path), verbose=False) + net.load_old_state_dict(torch.load(weight_path, weights_only=True), verbose=False) if __name__ == "__main__": diff --git a/tests/networks/nets/test_controlnet.py b/tests/networks/nets/test_controlnet.py index 9503518762..6158dc2eef 100644 --- a/tests/networks/nets/test_controlnet.py +++ b/tests/networks/nets/test_controlnet.py @@ -208,7 +208,7 @@ def test_compatibility_with_monai_generative(self): weight_path = os.path.join(tmpdir, filename) download_url(url=url, filepath=weight_path, hash_val=hash_val, hash_type=hash_type) - net.load_old_state_dict(torch.load(weight_path), verbose=False) + net.load_old_state_dict(torch.load(weight_path, weights_only=True), verbose=False) if __name__ == "__main__": diff --git a/tests/networks/nets/test_diffusion_model_unet.py b/tests/networks/nets/test_diffusion_model_unet.py index a7c823709d..3bca26882c 100644 --- a/tests/networks/nets/test_diffusion_model_unet.py +++ b/tests/networks/nets/test_diffusion_model_unet.py @@ -578,7 +578,7 @@ def test_compatibility_with_monai_generative(self): weight_path = os.path.join(tmpdir, filename) download_url(url=url, filepath=weight_path, hash_val=hash_val, hash_type=hash_type) - net.load_old_state_dict(torch.load(weight_path), verbose=False) + net.load_old_state_dict(torch.load(weight_path, weights_only=True), verbose=False) if __name__ == "__main__": diff --git a/tests/networks/nets/test_network_consistency.py b/tests/networks/nets/test_network_consistency.py index e09826de75..4ce198b92f 100644 --- a/tests/networks/nets/test_network_consistency.py +++ b/tests/networks/nets/test_network_consistency.py @@ -55,7 +55,7 @@ def test_network_consistency(self, net_name, data_path, json_path): print("JSON path: " + json_path) # Load data - loaded_data = torch.load(data_path) + loaded_data = torch.load(data_path, weights_only=True) # Load json from file json_file = open(json_path) diff --git a/tests/networks/nets/test_swin_unetr.py b/tests/networks/nets/test_swin_unetr.py index 4908907bfe..2c4532ecc4 100644 --- a/tests/networks/nets/test_swin_unetr.py +++ b/tests/networks/nets/test_swin_unetr.py @@ -128,7 +128,7 @@ def test_filter_swinunetr(self, input_param, key, value): data_spec["url"], weight_path, hash_val=data_spec["hash_val"], hash_type=data_spec["hash_type"] ) - ssl_weight = torch.load(weight_path)["model"] + ssl_weight = torch.load(weight_path, weights_only=True)["model"] net = SwinUNETR(**input_param) dst_dict, loaded, not_loaded = copy_model_state(net, ssl_weight, filter_func=filter_swinunetr) assert_allclose(dst_dict[key][:8], value, atol=1e-4, rtol=1e-4, type_test=False) diff --git a/tests/networks/nets/test_transformer.py b/tests/networks/nets/test_transformer.py index f9264ba153..daf424c174 100644 --- a/tests/networks/nets/test_transformer.py +++ b/tests/networks/nets/test_transformer.py @@ -101,7 +101,7 @@ def test_compatibility_with_monai_generative(self): weight_path = os.path.join(tmpdir, filename) download_url(url=url, filepath=weight_path, hash_val=hash_val, hash_type=hash_type) - net.load_old_state_dict(torch.load(weight_path), verbose=False) + net.load_old_state_dict(torch.load(weight_path, weights_only=True), verbose=False) if __name__ == "__main__": diff --git a/tests/networks/test_save_state.py b/tests/networks/test_save_state.py index 0581a3ce1f..329065da2b 100644 --- a/tests/networks/test_save_state.py +++ b/tests/networks/test_save_state.py @@ -64,7 +64,7 @@ def test_file(self, src, expected_keys, create_dir=True, atomic=True, func=None, if kwargs is None: kwargs = {} save_state(src=src, path=path, create_dir=create_dir, atomic=atomic, func=func, **kwargs) - ckpt = dict(torch.load(path)) + ckpt = dict(torch.load(path, weights_only=True)) for k in ckpt.keys(): self.assertIn(k, expected_keys)