From 3ce0c6e71f08e0030cb1f16543687f49999b7933 Mon Sep 17 00:00:00 2001 From: Symbiomatrix Date: Sun, 22 Jun 2025 01:00:59 +0300 Subject: [PATCH] Fix. --- networks/resize_lora.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/networks/resize_lora.py b/networks/resize_lora.py index 531a7e7b..49b71481 100644 --- a/networks/resize_lora.py +++ b/networks/resize_lora.py @@ -240,7 +240,7 @@ def resize_lora_model(lora_sd, new_rank, new_conv_rank, save_dtype, device, dyna for key, value in tqdm(lora_sd.items()): weight_name = None if LORAFMT[0] in key: - block_down_name = key.rsplit(f".LORAFMT[0]", 1)[0] + block_down_name = key.rsplit(f".{LORAFMT[0]}", 1)[0] weight_name = key.rsplit(".", 1)[-1] lora_down_weight = value else: @@ -248,7 +248,7 @@ def resize_lora_model(lora_sd, new_rank, new_conv_rank, save_dtype, device, dyna # find corresponding lora_up and alpha block_up_name = block_down_name - lora_up_weight = lora_sd.get(block_up_name + f".LORAFMT[1]." + weight_name, None) + lora_up_weight = lora_sd.get(block_up_name + f".{LORAFMT[1]}." + weight_name, None) lora_alpha = lora_sd.get(block_down_name + ".alpha", None) weights_loaded = lora_down_weight is not None and lora_up_weight is not None @@ -286,8 +286,8 @@ def resize_lora_model(lora_sd, new_rank, new_conv_rank, save_dtype, device, dyna verbose_str += "\n" new_alpha = param_dict["new_alpha"] - o_lora_sd[block_down_name + f".LORAFMT[0].weight"] = param_dict[LORAFMT[0]].to(save_dtype).contiguous() - o_lora_sd[block_up_name + f".LORAFMT[1].weight"] = param_dict[LORAFMT[1]].to(save_dtype).contiguous() + o_lora_sd[block_down_name + f".{LORAFMT[0]}.weight"] = param_dict[LORAFMT[0]].to(save_dtype).contiguous() + o_lora_sd[block_up_name + f".{LORAFMT[1]}.weight"] = param_dict[LORAFMT[1]].to(save_dtype).contiguous() o_lora_sd[block_up_name + ".alpha"] = torch.tensor(param_dict["new_alpha"]).to(save_dtype) block_down_name = None