Skip to content
Prev Previous commit
Next Next commit
Remove presumably superflous code from inject_adapter
This code was *probably* for dealing with modules_to_save when calling
inject_adapter directly. However, since the only place that does this is
the PEFT mixed module which already deals with modules_to_save this
code is deemed superfluous.

This also makes dealing with ignoring `modules_to_save` in during targeting
easier since we can use the code in `check_target_module_exists` for every
case (targeting nested layer in modules_to_save module + direct targeting of
modules_to_save module).
  • Loading branch information
nemo committed Apr 8, 2025
commit c2cfb68ede6c6532a512b026a1db13adf2fec684
29 changes: 0 additions & 29 deletions src/peft/tuners/tuners_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,6 @@ def inject_adapter(
self._check_new_adapter_config(peft_config)

_check_for_modules_to_save = getattr(peft_config, "modules_to_save", None) is not None
_has_modules_to_save = False

model_config = self.get_model_config(model)

Expand Down Expand Up @@ -472,28 +471,6 @@ def inject_adapter(
for key in key_list:
if not key:
continue
# Check for modules_to_save in case
#
# Note that this is redundant with PeftModel.set_additional_trainable_models but might be necessary
# when calling inject_adapter without a PEFT model. This is outdated as it only focuses on
# ModulesToSaveWrapper and ignores other potentially configured AuxiliaryTrainingWrapper instances.
#
# TODO: determine if there's a good reason for this and refactor to support AuxiliaryTrainingWrapper,
# or remove if superfluous.
if _check_for_modules_to_save and any(
key.endswith(module_to_save) for module_to_save in peft_config.modules_to_save
):
# Optionally set the modules to save
parent, target, target_name = _get_submodules(model, key)

if not isinstance(target, ModulesToSaveWrapper):
new_module = ModulesToSaveWrapper(target, adapter_name)
setattr(parent, target_name, new_module)
else:
target.update(adapter_name)

_has_modules_to_save = True
continue

result = self._check_target_module_exists(peft_config, key)
if isinstance(result, _ExcludedModule):
Expand Down Expand Up @@ -565,12 +542,6 @@ def inject_adapter(
if adapter_name in n:
p.requires_grad = False

if _has_modules_to_save:
if not hasattr(model, "modules_to_save"):
model.modules_to_save = set(peft_config.modules_to_save)
else:
model.modules_to_save.update(set(peft_config.modules_to_save))

def merge_adapter(self, adapter_names: Optional[list[str]] = None, safe_merge: bool = False) -> None:
"""
This method merges the adapter layers into the base model.
Expand Down