Fix default_lr being applied

This commit is contained in:
rockerBOO
2024-04-03 12:46:34 -04:00
parent c7691607ea
commit 1933ab4b48
3 changed files with 64 additions and 17 deletions

View File

@@ -1035,7 +1035,14 @@ class LoRANetwork(torch.nn.Module):
return lr_weight
# 二つのText Encoderに別々の学習率を設定できるようにするといいかも
def prepare_optimizer_params(self, text_encoder_lr, unet_lr, default_lr, unet_lora_plus_ratio=None, text_encoder_lora_plus_ratio=None):
def prepare_optimizer_params(
self,
text_encoder_lr,
unet_lr,
default_lr,
unet_lora_plus_ratio=None,
text_encoder_lora_plus_ratio=None
):
self.requires_grad_(True)
all_params = []
@@ -1070,7 +1077,11 @@ class LoRANetwork(torch.nn.Module):
return params
if self.text_encoder_loras:
params = assemble_params(self.text_encoder_loras, text_encoder_lr, text_encoder_lora_plus_ratio)
params = assemble_params(
self.text_encoder_loras,
text_encoder_lr if text_encoder_lr is not None else default_lr,
text_encoder_lora_plus_ratio
)
all_params.extend(params)
if self.unet_loras:
@@ -1085,14 +1096,19 @@ class LoRANetwork(torch.nn.Module):
# blockごとにパラメータを設定する
for idx, block_loras in block_idx_to_lora.items():
if unet_lr is not None:
params = assemble_params(block_loras, unet_lr * self.get_lr_weight(block_loras[0]), unet_lora_plus_ratio)
elif default_lr is not None:
params = assemble_params(block_loras, default_lr * self.get_lr_weight(block_loras[0]), unet_lora_plus_ratio)
params = assemble_params(
block_loras,
(unet_lr if unet_lr is not None else default_lr) * self.get_lr_weight(block_loras[0]),
unet_lora_plus_ratio
)
all_params.extend(params)
else:
params = assemble_params(self.unet_loras, unet_lr, unet_lora_plus_ratio)
params = assemble_params(
self.unet_loras,
default_lr if unet_lr is None else unet_lr,
unet_lora_plus_ratio
)
all_params.extend(params)
return all_params