From 88dc3213a90fffce3586e2f87fa74cb106488f5a Mon Sep 17 00:00:00 2001 From: Kohya S Date: Sun, 13 Jul 2025 20:46:24 +0900 Subject: [PATCH] fix: support LoRA w/o TE for create_network_from_weights --- networks/lora_lumina.py | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/networks/lora_lumina.py b/networks/lora_lumina.py index e4149b4a..0929e839 100644 --- a/networks/lora_lumina.py +++ b/networks/lora_lumina.py @@ -562,23 +562,26 @@ class LoRANetwork(torch.nn.Module): # Set dim/alpha to modules dim/alpha if modules_dim is not None and modules_alpha is not None: - # モジュール指定あり + # network from weights if lora_name in modules_dim: dim = modules_dim[lora_name] alpha = modules_alpha[lora_name] + else: + dim = 0 # skip if not found - # Set dims to type_dims - if is_lumina and type_dims is not None: - identifier = [ - ("attention",), # attention layers - ("mlp",), # MLP layers - ("modulation",), # modulation layers - ("refiner",), # refiner blocks - ] - for i, d in enumerate(type_dims): - if d is not None and all([id in lora_name for id in identifier[i]]): - dim = d # may be 0 for skip - break + else: + # Set dims to type_dims + if is_lumina and type_dims is not None: + identifier = [ + ("attention",), # attention layers + ("mlp",), # MLP layers + ("modulation",), # modulation layers + ("refiner",), # refiner blocks + ] + for i, d in enumerate(type_dims): + if d is not None and all([id in lora_name for id in identifier[i]]): + dim = d # may be 0 for skip + break # Drop blocks if we are only training some blocks if (