From 4614ce3c3d928134d8bbcf7e270be92abc3ca80f Mon Sep 17 00:00:00 2001 From: Jintao Huang Date: Thu, 9 Apr 2026 23:45:44 +0800 Subject: [PATCH 1/2] Fix the multi-LoRA issue in Twinkle. --- src/mcore_bridge/bridge/gpt_bridge.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mcore_bridge/bridge/gpt_bridge.py b/src/mcore_bridge/bridge/gpt_bridge.py index 7039a91..4871c39 100644 --- a/src/mcore_bridge/bridge/gpt_bridge.py +++ b/src/mcore_bridge/bridge/gpt_bridge.py @@ -267,7 +267,7 @@ def _set_module(self, mg_module, hf_state_dict, hf_prefix: str, to_mcore: bool): new_state_dict = {} for k, v in hf_state_dict.items(): if self._peft_format: - if '.lora_A.' in k or '.lora_B.' in k or '.modules_to_save.' in k: + if ('.lora_A.' in k or '.lora_B.' in k or '.modules_to_save.' in k) and f'{self._adapter_name}.' in k: k = k.replace(f'{self._adapter_name}.', '') new_state_dict[k] = v else: @@ -1703,7 +1703,7 @@ def export_weights( self.config = mg_models[0].config with torch.no_grad(): for k, v in self._convert(mg_models, {}, hf_prefix, False, tqdm_desc=tqdm_desc): - if converter: + if converter and v is not None: kv = converter(k, v) if kv is None: continue From 609ebd492416df208434b54fefc01428f4ea4d3f Mon Sep 17 00:00:00 2001 From: Jintao Huang Date: Thu, 9 Apr 2026 23:48:16 +0800 Subject: [PATCH 2/2] lint pass --- src/mcore_bridge/bridge/gpt_bridge.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/mcore_bridge/bridge/gpt_bridge.py b/src/mcore_bridge/bridge/gpt_bridge.py index 4871c39..2a4ad4b 100644 --- a/src/mcore_bridge/bridge/gpt_bridge.py +++ b/src/mcore_bridge/bridge/gpt_bridge.py @@ -267,7 +267,8 @@ def _set_module(self, mg_module, hf_state_dict, hf_prefix: str, to_mcore: bool): new_state_dict = {} for k, v in hf_state_dict.items(): if self._peft_format: - if ('.lora_A.' in k or '.lora_B.' in k or '.modules_to_save.' in k) and f'{self._adapter_name}.' in k: + if ('.lora_A.' in k or '.lora_B.' in k + or '.modules_to_save.' in k) and f'{self._adapter_name}.' in k: k = k.replace(f'{self._adapter_name}.', '') new_state_dict[k] = v else: