Skip to content

Commit a717062

Browse files
authored
fix cell_ to module_ in mindnlp.peft (#1874)
1 parent dc565e8 commit a717062

File tree

14 files changed

+96
-96
lines changed

14 files changed

+96
-96
lines changed

mindnlp/peft/peft_model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -474,7 +474,7 @@ def __init__(self, model, peft_config: PeftConfig, adapter_name="default"):
474474
self.modules_to_save.update({"classifier", "score"})
475475

476476
for name, _ in self.base_model.cells_and_names():
477-
if any(cell_name in name for cell_name in self.modules_to_save):
477+
if any(module_name in name for module_name in self.modules_to_save):
478478
self.cls_layer_name = name
479479
break
480480

@@ -956,7 +956,7 @@ def __init__(self, model, peft_config: PeftConfig = None, adapter_name="default"
956956
self.modules_to_save.update({"classifier", "score"})
957957

958958
for name, _ in self.base_model.cells_and_names():
959-
if any(cell_name in name for cell_name in self.modules_to_save):
959+
if any(module_name in name for module_name in self.modules_to_save):
960960
self.cls_layer_name = name
961961
break
962962

mindnlp/peft/tuners/ia3/model.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
ModulesToSaveWrapper,
3838
_get_subcells,
3939
)
40-
from ..tuners_utils import BaseTuner, BaseTunerLayer, check_target_cell_exists
40+
from ..tuners_utils import BaseTuner, BaseTunerLayer, check_target_module_exists
4141
from .layer import Conv2d, IA3Layer, Linear
4242

4343

@@ -192,7 +192,7 @@ def _create_new_cell(ia3_config, adapter_name, target, **kwargs):
192192
return new_cell
193193

194194
@staticmethod
195-
def _check_target_cell_exists(ia3_config, key):
195+
def _check_target_module_exists(ia3_config, key):
196196
r"""
197197
Checks if the target cell exists in the IA3 configuration.
198198
@@ -209,7 +209,7 @@ def _check_target_cell_exists(ia3_config, key):
209209
Raises:
210210
None: This method does not raise any exceptions.
211211
"""
212-
return check_target_cell_exists(ia3_config, key)
212+
return check_target_module_exists(ia3_config, key)
213213

214214
def _mark_only_adapters_as_trainable(self, model: nn.Module) -> None:
215215
r"""
@@ -274,7 +274,7 @@ def _create_and_replace(self, ia3_config, adapter_name, target, target_name, par
274274
None
275275
"""
276276
current_key = optionnal_kwargs.pop('current_key')
277-
is_feedforward = self._check_target_cell_feedforward(ia3_config, current_key)
277+
is_feedforward = self._check_target_module_feedforward(ia3_config, current_key)
278278
kwargs = {'fan_in_fan_out': ia3_config.fan_in_fan_out, 'init_ia3_weights': ia3_config.init_ia3_weights, 'is_feedforward': is_feedforward}
279279
kwargs['loaded_in_8bit'] = optionnal_kwargs.pop('loaded_in_8bit', False)
280280
kwargs['loaded_in_4bit'] = optionnal_kwargs.pop('loaded_in_4bit', False)
@@ -287,7 +287,7 @@ def _create_and_replace(self, ia3_config, adapter_name, target, target_name, par
287287
self._replace_cell(parent, target_name, new_cell, target)
288288
# check if target cell is in feedforward_cells
289289
current_key = optionnal_kwargs.pop("current_key")
290-
is_feedforward = self._check_target_cell_feedforward(ia3_config, current_key)
290+
is_feedforward = self._check_target_module_feedforward(ia3_config, current_key)
291291

292292
kwargs = {
293293
"fan_in_fan_out": ia3_config.fan_in_fan_out,
@@ -309,7 +309,7 @@ def _create_and_replace(self, ia3_config, adapter_name, target, target_name, par
309309
self._replace_cell(parent, target_name, new_cell, target)
310310

311311
@staticmethod
312-
def _check_target_cell_feedforward(ia3_config, key) -> bool:
312+
def _check_target_module_feedforward(ia3_config, key) -> bool:
313313
"""
314314
A helper private method that checks if the target cell `key` matches with a feedforward cell specified in
315315
`ia3_config`

mindnlp/peft/tuners/ln_tuning/model.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
from mindnlp.peft.tuners.tuners_utils import (
2727
BaseTuner,
2828
_get_subcells,
29-
check_target_cell_exists,
29+
check_target_module_exists,
3030
)
3131
from mindnlp.peft.utils import (
3232
TRANSFORMERS_MODELS_TO_LNTUNING_TARGET_MODULES_MAPPING,
@@ -150,8 +150,8 @@ def _mark_only_adapters_as_trainable(self, model: Cell):
150150
else:
151151
p.requires_grad = True
152152

153-
def _check_target_cell_exists(self, peft_config: PeftConfig, key: str) -> bool:
154-
return check_target_cell_exists(peft_config, key)
153+
def _check_target_module_exists(self, peft_config: PeftConfig, key: str) -> bool:
154+
return check_target_module_exists(peft_config, key)
155155

156156
def _set_adapter_layers(self, enabled: bool) -> None:
157157
for cell in self.model.cells():

mindnlp/peft/tuners/lokr/config.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ class LoKrConfig(PeftConfig):
3030
target_modules (`Union[List[str],str]`): The names of the cells to apply Lora to.
3131
lora_alpha (`float`): The alpha parameter for Lokr scaling.
3232
rank_dropout (`float`):The dropout probability for rank dimension during training.
33-
cell_dropout (`float`): The dropout probability for LoKR layers.
33+
module_dropout (`float`): The dropout probability for LoKR layers.
3434
use_effective_conv2d (`bool`):
3535
Use parameter effective decomposition for
3636
Conv2d with ksize > 1 ("Proposition 3" from FedPara paper).
@@ -71,7 +71,7 @@ class LoKrConfig(PeftConfig):
7171
default=0.0,
7272
metadata={"help": "The dropout probability for rank dimension during training"},
7373
)
74-
cell_dropout: float = field(default=0.0, metadata={"help": "lokr dropout"})
74+
module_dropout: float = field(default=0.0, metadata={"help": "lokr dropout"})
7575
use_effective_conv2d: bool = field(
7676
default=False,
7777
metadata={

mindnlp/peft/tuners/lokr/layer.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ class LoKrLayer(nn.Module, BaseTunerLayer):
4949
- alpha: Dictionary storing alpha values for each adapter.
5050
- scaling: Dictionary storing scaling values for each adapter.
5151
- rank_dropout: Dictionary storing rank dropout probabilities for each adapter.
52-
- cell_dropout: Dictionary storing cell dropout probabilities for each adapter.
52+
- module_dropout: Dictionary storing cell dropout probabilities for each adapter.
5353
- _disable_adapters: Boolean flag indicating whether adapters are disabled.
5454
- merged_adapters: List of names of merged adapters.
5555
@@ -74,7 +74,7 @@ class LoKrLayer(nn.Module, BaseTunerLayer):
7474
Note:
7575
This class is intended for advanced neural network adaptation techniques and should be used in conjunction with PyTorch's nn.Module functionalities.
7676
"""
77-
other_param_names = ("r", "alpha", "scaling", "rank_dropout", "cell_dropout")
77+
other_param_names = ("r", "alpha", "scaling", "rank_dropout", "module_dropout")
7878
# All names of layers that may contain adapter weights
7979
adapter_layer_names = (
8080
"lokr_w1",
@@ -132,7 +132,7 @@ def __init__(self, base_layer: nn.Module) -> None:
132132
self.alpha = {}
133133
self.scaling = {}
134134
self.rank_dropout = {}
135-
self.cell_dropout = {}
135+
self.module_dropout = {}
136136

137137
# Tuner info
138138
self._disable_adapters = False
@@ -491,7 +491,7 @@ def update_layer(
491491
r: int,
492492
alpha: float,
493493
rank_dropout: float,
494-
cell_dropout: float,
494+
module_dropout: float,
495495
init_weights: bool,
496496
use_effective_conv2d: bool,
497497
decompose_both: bool,
@@ -505,7 +505,7 @@ def update_layer(
505505
r (`int`): Rank for the added adapter.
506506
alpha (`float`): Alpha for the added adapter.
507507
rank_dropout (`float`): The dropout probability for rank dimension during training
508-
cell_dropout (`float`): The dropout probability for disabling adapter during training.
508+
module_dropout (`float`): The dropout probability for disabling adapter during training.
509509
init_weights (`bool`): Whether to initialize adapter weights.
510510
use_effective_conv2d (`bool`): Use parameter effective decomposition for Conv2d with ksize > 1.
511511
decompose_both (`bool`): Perform rank decomposition of left kronecker product matrix.
@@ -520,7 +520,7 @@ def update_layer(
520520
self.alpha[adapter_name] = alpha
521521
self.scaling[adapter_name] = alpha / r
522522
self.rank_dropout[adapter_name] = rank_dropout
523-
self.cell_dropout[adapter_name] = cell_dropout
523+
self.module_dropout[adapter_name] = module_dropout
524524
base_layer = self.get_base_layer()
525525

526526
# Determine shape of LoKr weights
@@ -589,8 +589,8 @@ def set_adapter(self, adapter_names) -> None:
589589

590590
# Deactivate grads on the inactive adapter and activate grads on the active adapter
591591
for layer_name in self.adapter_layer_names:
592-
cell_dict = getattr(self, layer_name)
593-
for key, layer in cell_dict.items():
592+
module_dict = getattr(self, layer_name)
593+
for key, layer in module_dict.items():
594594
if key in adapter_names:
595595
# Note: It is possible that not a single layer is called with requires_grad_(True) here. This may
596596
# happen if a completely different adapter layer is being activated.
@@ -729,11 +729,11 @@ def forward(self, x: ms.Tensor, *args, **kwargs) -> ms.Tensor:
729729
if active_adapter not in self._available_adapters:
730730
continue
731731

732-
cell_dropout = self.cell_dropout[active_adapter]
732+
module_dropout = self.module_dropout[active_adapter]
733733

734734
# Modify current execution weights
735735
if (not self.training) or (
736-
self.training and ops.rand(1) > cell_dropout
736+
self.training and ops.rand(1) > module_dropout
737737
):
738738
result = result + self._get_delta_activations(
739739
active_adapter, x, *args, **kwargs
@@ -752,7 +752,7 @@ def __init__(
752752
r: int = 0,
753753
alpha: float = 0.0,
754754
rank_dropout: float = 0.0,
755-
cell_dropout: float = 0.0,
755+
module_dropout: float = 0.0,
756756
init_weights: bool = True,
757757
**kwargs,
758758
):
@@ -766,7 +766,7 @@ def __init__(
766766
r (int): The value of r for adapter update. Defaults to 0.
767767
alpha (float): The value of alpha for adapter update. Defaults to 0.0.
768768
rank_dropout (float): The dropout value for rank. Defaults to 0.0.
769-
cell_dropout (float): The dropout value for cell. Defaults to 0.0.
769+
module_dropout (float): The dropout value for cell. Defaults to 0.0.
770770
init_weights (bool): A flag to initialize weights. Defaults to True.
771771
**kwargs: Additional keyword arguments.
772772
@@ -781,7 +781,7 @@ def __init__(
781781
# Create adapter and set it active
782782
self._active_adapter = adapter_name
783783
self.update_layer(
784-
adapter_name, r, alpha, rank_dropout, cell_dropout, init_weights, **kwargs
784+
adapter_name, r, alpha, rank_dropout, module_dropout, init_weights, **kwargs
785785
)
786786

787787
def _get_delta_activations(
@@ -835,7 +835,7 @@ def __init__(
835835
r: int = 0,
836836
alpha: float = 0.0,
837837
rank_dropout: float = 0.0,
838-
cell_dropout: float = 0.0,
838+
module_dropout: float = 0.0,
839839
use_effective_conv2d: bool = False,
840840
init_weights: bool = True,
841841
**kwargs,
@@ -850,7 +850,7 @@ def __init__(
850850
r (int): The value of parameter 'r'.
851851
alpha (float): The value of parameter 'alpha'.
852852
rank_dropout (float): The value of rank dropout.
853-
cell_dropout (float): The value of cell dropout.
853+
module_dropout (float): The value of cell dropout.
854854
use_effective_conv2d (bool): Flag indicating whether to use effective Conv2d.
855855
init_weights (bool): Flag indicating whether to initialize weights.
856856
**kwargs: Additional keyword arguments.
@@ -870,7 +870,7 @@ def __init__(
870870
r,
871871
alpha,
872872
rank_dropout,
873-
cell_dropout,
873+
module_dropout,
874874
init_weights,
875875
use_effective_conv2d,
876876
**kwargs,

mindnlp/peft/tuners/lokr/model.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
from ..tuners_utils import (
2929
BaseTuner,
3030
BaseTunerLayer,
31-
check_target_cell_exists,
31+
check_target_module_exists,
3232
)
3333
from .layer import Conv2d, Dense, LoKrLayer
3434
from .config import LoKrConfig
@@ -59,7 +59,7 @@ class LoKrModel(BaseTuner):
5959
... lora_alpha=32,
6060
... target_modules=["k_proj", "q_proj", "v_proj", "out_proj", "fc1", "fc2"],
6161
... rank_dropout=0.0,
62-
... cell_dropout=0.0,
62+
... module_dropout=0.0,
6363
... init_weights=True,
6464
... )
6565
>>> config_unet = LoKrConfig(
@@ -76,7 +76,7 @@ class LoKrModel(BaseTuner):
7676
... "ff.net.2",
7777
... ],
7878
... rank_dropout=0.0,
79-
... cell_dropout=0.0,
79+
... module_dropout=0.0,
8080
... init_weights=True,
8181
... use_effective_conv2d=True,
8282
... )
@@ -153,22 +153,22 @@ def _create_new_cell(
153153
This occurs when the target cell type does not match any of the supported cell types in the layers_mapping attribute.
154154
"""
155155
# Find corresponding subtype of provided target cell
156-
new_cell_cls = None
156+
new_module_cls = None
157157
for subtype, target_cls in cls.layers_mapping.items():
158158
if (
159159
hasattr(target, "base_layer")
160160
and isinstance(target.get_base_layer(), subtype)
161161
and isinstance(target, BaseTunerLayer)
162162
):
163163
# nested tuner layers are allowed
164-
new_cell_cls = target_cls
164+
new_module_cls = target_cls
165165
break
166166
elif isinstance(target, subtype):
167-
new_cell_cls = target_cls
167+
new_module_cls = target_cls
168168
break
169169

170170
# We didn't find corresponding type, so adapter for this layer is not supported
171-
if new_cell_cls is None:
171+
if new_module_cls is None:
172172
supported_cells = ", ".join(
173173
layer.__name__ for layer in cls.layers_mapping.keys()
174174
)
@@ -183,9 +183,9 @@ def _create_new_cell(
183183
target_base_layer = target
184184

185185
if isinstance(target_base_layer, nn.Module):
186-
new_cell = new_cell_cls(target, adapter_name=adapter_name, **kwargs)
186+
new_cell = new_module_cls(target, adapter_name=adapter_name, **kwargs)
187187
elif isinstance(target_base_layer, nn.Module):
188-
new_cell = new_cell_cls(target, adapter_name=adapter_name, **kwargs)
188+
new_cell = new_module_cls(target, adapter_name=adapter_name, **kwargs)
189189
else:
190190
supported_cells = ", ".join(
191191
layer.__name__ for layer in cls.layers_mapping.keys()
@@ -388,7 +388,7 @@ def _prepare_adapter_config(peft_config, model_config):
388388
return peft_config
389389

390390
@staticmethod
391-
def _check_target_cell_exists(LoKR_config, key):
391+
def _check_target_module_exists(LoKR_config, key):
392392
r"""
393393
Checks if a target cell exists in the LoKR configuration.
394394
@@ -403,4 +403,4 @@ def _check_target_cell_exists(LoKR_config, key):
403403
This method does not raise any exceptions explicitly. However, if the target cell does not exist in the LoKR configuration, further handling may be required based on the context in which this
404404
method is used.
405405
"""
406-
return check_target_cell_exists(LoKR_config, key)
406+
return check_target_module_exists(LoKR_config, key)

mindnlp/peft/tuners/lora/model.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
from ..tuners_utils import (
3737
BaseTuner,
3838
BaseTunerLayer,
39-
check_target_cell_exists,
39+
check_target_module_exists,
4040
# onload_layer,
4141
replicate_layers,
4242
)
@@ -161,7 +161,7 @@ def _check_new_adapter_config(self, config: LoraConfig) -> None:
161161
)
162162

163163
@staticmethod
164-
def _check_target_cell_exists(lora_config, key):
164+
def _check_target_module_exists(lora_config, key):
165165
r"""
166166
Checks if the target cell exists in the LoRa configuration.
167167
@@ -190,7 +190,7 @@ def _check_target_cell_exists(lora_config, key):
190190
Raises:
191191
None: This method does not raise any exceptions.
192192
"""
193-
return check_target_cell_exists(lora_config, key)
193+
return check_target_module_exists(lora_config, key)
194194

195195
def _prepare_model(self, peft_config: LoraConfig, model: nn.Module):
196196
r"""
@@ -683,23 +683,23 @@ def _check_add_weighted_adapter(
683683
else:
684684
raise ValueError(f"Invalid combination_type: {combination_type}")
685685

686-
target_cell_types = [type(self.peft_config[adapter].target_modules) for adapter in adapters]
687-
if not target_cell_types:
686+
target_module_types = [type(self.peft_config[adapter].target_modules) for adapter in adapters]
687+
if not target_module_types:
688688
raise ValueError(f"Found no adapter matching the names in {adapters}")
689-
if len(set(target_cell_types)) > 1:
689+
if len(set(target_module_types)) > 1:
690690
raise ValueError(
691691
"all adapter configs should follow the same target cells type. "
692692
"Combining adapters with `target_modules` type being a mix of list/set and string is not supported."
693693
)
694694

695-
if target_cell_types[0] == str:
695+
if target_module_types[0] == str:
696696
new_target_modules = "|".join(f"({self.peft_config[adapter].target_modules})" for adapter in adapters)
697-
elif target_cell_types[0] == set:
697+
elif target_module_types[0] == set:
698698
new_target_modules = reduce(
699699
operator.or_, (self.peft_config[adapter].target_modules for adapter in adapters)
700700
)
701701
else:
702-
raise TypeError(f"Invalid type {target_cell_types[0]} found in target_modules")
702+
raise TypeError(f"Invalid type {target_module_types[0]} found in target_modules")
703703

704704
return combination_type, new_rank, new_target_modules
705705

0 commit comments

Comments
 (0)