mirror of
https://github.com/kohya-ss/sd-scripts.git
synced 2026-04-09 06:45:09 +00:00
fix default conv alpha to 1
This commit is contained in:
@@ -112,7 +112,7 @@ def create_network(multiplier, network_dim, network_alpha, vae, text_encoder, un
|
|||||||
if conv_dim is not None:
|
if conv_dim is not None:
|
||||||
conv_dim = int(conv_dim)
|
conv_dim = int(conv_dim)
|
||||||
if conv_alpha is None:
|
if conv_alpha is None:
|
||||||
conv_alpha = float(conv_dim)
|
conv_alpha = 1.0
|
||||||
else:
|
else:
|
||||||
conv_alpha = float(conv_alpha)
|
conv_alpha = float(conv_alpha)
|
||||||
|
|
||||||
@@ -169,7 +169,7 @@ def create_network_from_weights(multiplier, file, vae, text_encoder, unet, **kwa
|
|||||||
elif 'lora_down' in key:
|
elif 'lora_down' in key:
|
||||||
dim = value.size()[0]
|
dim = value.size()[0]
|
||||||
modules_dim[lora_name] = dim
|
modules_dim[lora_name] = dim
|
||||||
print(lora_name, value.size(), dim)
|
# print(lora_name, value.size(), dim)
|
||||||
|
|
||||||
# support old LoRA without alpha
|
# support old LoRA without alpha
|
||||||
for key in modules_dim.keys():
|
for key in modules_dim.keys():
|
||||||
|
|||||||
Reference in New Issue
Block a user