From ab3a4bbb89c0364291d74861c656a3e002b2fb8b Mon Sep 17 00:00:00 2001 From: tastelikefeet Date: Thu, 9 Apr 2026 22:15:08 +0800 Subject: [PATCH 1/2] fix --- src/twinkle/model/multi_lora.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/twinkle/model/multi_lora.py b/src/twinkle/model/multi_lora.py index df60e9f4..549955ed 100644 --- a/src/twinkle/model/multi_lora.py +++ b/src/twinkle/model/multi_lora.py @@ -478,12 +478,19 @@ def _loader(self): def save_lora_converter(self, name, parameter, adapter_name): _lora = self.find_lora(adapter_name) + # Negative filter: skip weights belonging to OTHER adapters + negative_pattern = re.compile(r'\.lora_\w+\.(?!weight$)(\w+)\.') + neg_match = negative_pattern.search(name) + if neg_match and neg_match.group(1) != adapter_name: + return None pattern = re.compile(rf'\.lora_\w+\.{adapter_name}\.') pattern_no_adapter = re.compile(r'\.lora_\w+\.weight') if (pattern.search(name) or pattern_no_adapter.search(name)) and self.match_target_modules( name, _lora.tenant_config.target_modules): _param = torch_util.to_local_tensor(parameter) - if 'embedding_A' in name: + if _param is None: + pass + elif 'embedding_A' in name: _param = _param[:, :_lora.tenant_config.r] elif 'embedding_B' in name: _param = _param[:_lora.tenant_config.r, :] From d5a733dfaa35637ac518ffa246d8245f55a02458 Mon Sep 17 00:00:00 2001 From: tastelikefeet Date: Thu, 9 Apr 2026 23:23:54 +0800 Subject: [PATCH 2/2] fix --- src/twinkle/model/multi_lora.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/src/twinkle/model/multi_lora.py b/src/twinkle/model/multi_lora.py index 549955ed..d8be4832 100644 --- a/src/twinkle/model/multi_lora.py +++ b/src/twinkle/model/multi_lora.py @@ -478,14 +478,10 @@ def _loader(self): def save_lora_converter(self, name, parameter, adapter_name): _lora = self.find_lora(adapter_name) - # Negative filter: skip weights belonging to OTHER adapters - negative_pattern = re.compile(r'\.lora_\w+\.(?!weight$)(\w+)\.') - neg_match = negative_pattern.search(name) - if neg_match and neg_match.group(1) != adapter_name: + # Skip weights belonging to OTHER adapters + if re.search(r'\.lora_\w+\.\w+\.', name) and not re.search(rf'\.lora_\w+\.{adapter_name}\.', name): return None - pattern = re.compile(rf'\.lora_\w+\.{adapter_name}\.') - pattern_no_adapter = re.compile(r'\.lora_\w+\.weight') - if (pattern.search(name) or pattern_no_adapter.search(name)) and self.match_target_modules( + if re.search(rf'\.lora_\w+\.({adapter_name}|weight)', name) and self.match_target_modules( name, _lora.tenant_config.target_modules): _param = torch_util.to_local_tensor(parameter) if _param is None: