Skip to content
Next Next commit
added checks for layers to transforms and layer pattern lora
  • Loading branch information
JINO-ROHIT committed Oct 17, 2024
commit 50a1d5a24a59f4be7a043626390b95520198b2ae
7 changes: 7 additions & 0 deletions src/peft/tuners/lora/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,6 +338,13 @@ def __post_init__(self):
self.exclude_modules = (
set(self.exclude_modules) if isinstance(self.exclude_modules, list) else self.exclude_modules
)

# check for layers_to_transform and layers_pattern
if self.layers_to_transform is not None and self.layers_pattern is None:
raise ValueError(
"When `layers_to_transform` is specified, `layers_pattern` must also be specified. "
)

# if target_modules is a regex expression, then layers_to_transform should be None
if isinstance(self.target_modules, str) and self.layers_to_transform is not None:
raise ValueError("`layers_to_transform` cannot be used when `target_modules` is a str.")
Expand Down