From 3a18fdd666a93852d9779cdccb0165b5ea209f91 Mon Sep 17 00:00:00 2001 From: whitesword <1790513390@qq.com> Date: Mon, 22 Dec 2025 20:50:25 +0800 Subject: [PATCH] Fix: support loading DDP-saved LoRA weights for inference --- model/model_lora.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/model/model_lora.py b/model/model_lora.py index b7c1d4c..8011526 100644 --- a/model/model_lora.py +++ b/model/model_lora.py @@ -34,6 +34,16 @@ def apply_lora(model, rank=8): def load_lora(model, path): state_dict = torch.load(path, map_location=model.device) + + # 兼容DDP训练保存的权重(带有module.前缀),去除前缀以匹配当前模型 + new_state_dict = {} + for k, v in state_dict.items(): + if k.startswith('module.'): + new_state_dict[k[7:]] = v + else: + new_state_dict[k] = v + state_dict = new_state_dict + for name, module in model.named_modules(): if hasattr(module, 'lora'): lora_state = {k.replace(f'{name}.lora.', ''): v for k, v in state_dict.items() if f'{name}.lora.' in k}