Skip to content

Commit 9d7f17a

Browse files
authored
fix named_modules (#1877)
1 parent c4ebd98 commit 9d7f17a

File tree

4 files changed

+11
-10
lines changed

4 files changed

+11
-10
lines changed

mindnlp/peft/peft_model.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
from mindspore.train.serialization import _exec_save
2626

2727
from mindnlp.core import nn, ops
28+
from mindnlp.core.nn import functional as F
2829
from .config import PeftConfig, PromptLearningConfig
2930
from ..transformers import PreTrainedModel
3031

@@ -473,7 +474,7 @@ def __init__(self, model, peft_config: PeftConfig, adapter_name="default"):
473474
else:
474475
self.modules_to_save.update({"classifier", "score"})
475476

476-
for name, _ in self.base_model.modules_and_names():
477+
for name, _ in self.base_model.named_modules():
477478
if any(module_name in name for module_name in self.modules_to_save):
478479
self.cls_layer_name = name
479480
break
@@ -955,7 +956,7 @@ def __init__(self, model, peft_config: PeftConfig = None, adapter_name="default"
955956
else:
956957
self.modules_to_save.update({"classifier", "score"})
957958

958-
for name, _ in self.base_model.modules_and_names():
959+
for name, _ in self.base_model.named_modules():
959960
if any(module_name in name for module_name in self.modules_to_save):
960961
self.cls_layer_name = name
961962
break
@@ -1085,13 +1086,13 @@ def _prefix_tuning_forward(
10851086
raise ValueError("Model does not support past key values which are required for prefix tuning.")
10861087
outputs = transformer_backbone_name(**kwargs)
10871088
sequence_output = outputs[0]
1088-
if "dropout" in [name for name, _ in list(self.base_model.modules_and_names())]:
1089+
if "dropout" in [name for name, _ in list(self.base_model.named_modules())]:
10891090
sequence_output = self.base_model.dropout(sequence_output)
10901091
logits = self.base_model.get_submodule(self.cls_layer_name)(sequence_output)
10911092

10921093
loss = None
10931094
if labels is not None:
1094-
loss = ops.cross_entropy(logits.view(-1, self.num_labels), labels.view(-1))
1095+
loss = F.cross_entropy(logits.view(-1, self.num_labels), labels.view(-1))
10951096

10961097
output = (logits,) + outputs[2:]
10971098
return ((loss,) + output) if loss is not None else output

mindnlp/peft/tuners/lora/model.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -611,7 +611,7 @@ def _unload_and_optionally_merge(
611611
if merge:
612612
self._check_merge_allowed()
613613

614-
key_list = [key for key, _ in self.model.modules_and_names() if self.prefix not in key]
614+
key_list = [key for key, _ in self.model.named_modules() if self.prefix not in key]
615615
desc = "Unloading " + ("and merging " if merge else "") + "model"
616616
for key in tqdm(key_list, disable=not progressbar, desc=desc):
617617
try:
@@ -773,7 +773,7 @@ def add_weighted_adapter(
773773
# Do we really need that?
774774
_freeze_adapter(self.model, adapter_name)
775775

776-
key_list = [key for key, _ in self.model.modules_and_names() if self.prefix not in key]
776+
key_list = [key for key, _ in self.model.named_modules() if self.prefix not in key]
777777
for key in key_list:
778778
_, target, _ = _get_submodules(self.model, key)
779779
if isinstance(target, LoraLayer):
@@ -1015,7 +1015,7 @@ def delete_adapter(self, adapter_name: str) -> None:
10151015
raise ValueError(f"Adapter {adapter_name} does not exist")
10161016
del self.peft_config[adapter_name]
10171017

1018-
key_list = [key for key, _ in self.model.modules_and_names() if self.prefix not in key]
1018+
key_list = [key for key, _ in self.model.named_modules() if self.prefix not in key]
10191019
new_adapter = None
10201020
for key in key_list:
10211021
_, target, _ = _get_submodules(self.model, key)

mindnlp/peft/tuners/tuners_utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ def onload_layer(layer):
4747
layer with tuners to be merged
4848
"""
4949
offloaded_modules = []
50-
for name, cell in layer.modules_and_names():
50+
for name, module in layer.named_modules():
5151
if name in ["", "base_layer"]:
5252
continue
5353
# if hasattr(cell, "_hf_hook") and isinstance(cell._hf_hook, AlignDevicesHook) and cell._hf_hook.offload:
@@ -328,7 +328,7 @@ def inject_adapter(self, model: nn.Module, adapter_name: str):
328328
self._check_new_adapter_config(peft_config)
329329

330330
is_target_modules_in_base_model = False
331-
key_list = [key for key, _ in model.modules_and_names()] # named_modules
331+
key_list = [key for key, _ in model.named_modules()] # named_modules
332332

333333
model_config = getattr(model, "config", {"model_type": "custom"})
334334
if hasattr(model_config, "to_dict"):

mindnlp/peft/utils/other.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,7 @@ def _set_trainable(model, adapter_name):
292292
"""
293293
set trainable
294294
"""
295-
key_list = [key for key, _ in model.modules_and_names()] # named_modules modules_and_names
295+
key_list = [key for key, _ in model.named_modules()] # named_modules
296296
for key in key_list:
297297
target_module_found = any(key.endswith(target_key) for target_key in model.modules_to_save)
298298
if target_module_found:

0 commit comments

Comments
 (0)