Skip to content

Commit 3d262bb

Browse files
authored
Pt2.6 compatibility (#5611)
1 parent 82b5877 commit 3d262bb

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

fairseq/checkpoint_utils.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -337,7 +337,7 @@ def load_checkpoint_to_cpu(path, arg_overrides=None, load_on_all_ranks=False):
337337
local_path = PathManager.get_local_path(path)
338338

339339
with open(local_path, "rb") as f:
340-
state = torch.load(f, map_location=torch.device("cpu"))
340+
state = torch.load(f, map_location=torch.device("cpu"), weights_only=False)
341341

342342
if "args" in state and state["args"] is not None and arg_overrides is not None:
343343
args = state["args"]
@@ -911,6 +911,7 @@ def load_ema_from_checkpoint(fpath):
911911
map_location=(
912912
lambda s, _: torch.serialization.default_restore_location(s, "cpu")
913913
),
914+
weights_only=False,
914915
)
915916

916917
# EMA model is stored in a separate "extra state"

0 commit comments

Comments
 (0)