Compare commits

...

5 Commits

Author SHA1 Message Date
Kohya S.
ef8fe92126 Merge a437949d47 into 5f793fb0f4 2026-03-29 18:47:19 +09:00
Kohya S.
5f793fb0f4 Log d*lr for ProdigyPlusScheduleFree (#2289) 2026-03-29 18:47:09 +09:00
Kohya S.
feb38356ea Merge pull request #2291 from kohya-ss/fix-anima-validation-with-text-encoder-output-cache
fix: Anima validation dataset not working with Text Encoder output cache
2026-03-22 22:23:27 +09:00
Kohya S
cdb49f9fe7 fix: Anima validation dataset not working with Text Encoder output caching due to caption dropout 2026-03-22 22:19:47 +09:00
woctordho
343c929e39 Log d*lr for ProdigyPlusScheduleFree 2026-03-21 11:09:56 +08:00
3 changed files with 18 additions and 28 deletions

View File

@@ -286,7 +286,9 @@ class AnimaNetworkTrainer(train_network.NetworkTrainer):
t.requires_grad_(True)
# Unpack text encoder conditions
prompt_embeds, attn_mask, t5_input_ids, t5_attn_mask = text_encoder_conds
prompt_embeds, attn_mask, t5_input_ids, t5_attn_mask = text_encoder_conds[
:4
] # ignore caption_dropout_rate which is not needed for training step
# Move to device
prompt_embeds = prompt_embeds.to(accelerator.device, dtype=weight_dtype)
@@ -353,7 +355,8 @@ class AnimaNetworkTrainer(train_network.NetworkTrainer):
text_encoder_outputs_list = anima_text_encoding_strategy.drop_cached_text_encoder_outputs(
*text_encoder_outputs_list, caption_dropout_rates=caption_dropout_rates
)
batch["text_encoder_outputs_list"] = text_encoder_outputs_list
# Add the caption dropout rates back to the list for validation dataset (which is re-used batch items)
batch["text_encoder_outputs_list"] = text_encoder_outputs_list + [caption_dropout_rates]
return super().process_batch(
batch,

View File

@@ -6265,10 +6265,14 @@ def append_lr_to_logs_with_names(logs, lr_scheduler, optimizer_type, names):
name = names[lr_index]
logs["lr/" + name] = float(lrs[lr_index])
if optimizer_type.lower().startswith("DAdapt".lower()) or optimizer_type.lower() == "Prodigy".lower():
if optimizer_type.lower().startswith("DAdapt".lower()) or optimizer_type.lower().startswith("Prodigy".lower()):
logs["lr/d*lr/" + name] = (
lr_scheduler.optimizers[-1].param_groups[lr_index]["d"] * lr_scheduler.optimizers[-1].param_groups[lr_index]["lr"]
)
if "effective_lr" in lr_scheduler.optimizers[-1].param_groups[lr_index]:
logs["lr/d*eff_lr/" + name] = (
lr_scheduler.optimizers[-1].param_groups[lr_index]["d"] * lr_scheduler.optimizers[-1].param_groups[lr_index]["effective_lr"]
)
# scheduler:

View File

@@ -90,40 +90,23 @@ class NetworkTrainer:
if lr_descriptions is not None:
lr_desc = lr_descriptions[i]
else:
idx = i - (0 if args.network_train_unet_only else -1)
idx = i - (0 if args.network_train_unet_only else 1)
if idx == -1:
lr_desc = "textencoder"
else:
if len(lrs) > 2:
lr_desc = f"group{idx}"
lr_desc = f"group{i}"
else:
lr_desc = "unet"
logs[f"lr/{lr_desc}"] = lr
if args.optimizer_type.lower().startswith("DAdapt".lower()) or args.optimizer_type.lower() == "Prodigy".lower():
# tracking d*lr value
logs[f"lr/d*lr/{lr_desc}"] = (
lr_scheduler.optimizers[-1].param_groups[i]["d"] * lr_scheduler.optimizers[-1].param_groups[i]["lr"]
)
if (
args.optimizer_type.lower().endswith("ProdigyPlusScheduleFree".lower()) and optimizer is not None
): # tracking d*lr value of unet.
logs["lr/d*lr"] = optimizer.param_groups[0]["d"] * optimizer.param_groups[0]["lr"]
else:
idx = 0
if not args.network_train_unet_only:
logs["lr/textencoder"] = float(lrs[0])
idx = 1
for i in range(idx, len(lrs)):
logs[f"lr/group{i}"] = float(lrs[i])
if args.optimizer_type.lower().startswith("DAdapt".lower()) or args.optimizer_type.lower() == "Prodigy".lower():
logs[f"lr/d*lr/group{i}"] = (
lr_scheduler.optimizers[-1].param_groups[i]["d"] * lr_scheduler.optimizers[-1].param_groups[i]["lr"]
)
if args.optimizer_type.lower().endswith("ProdigyPlusScheduleFree".lower()) and optimizer is not None:
logs[f"lr/d*lr/group{i}"] = optimizer.param_groups[i]["d"] * optimizer.param_groups[i]["lr"]
if args.optimizer_type.lower().startswith("DAdapt".lower()) or args.optimizer_type.lower().startswith("Prodigy".lower()):
opt = lr_scheduler.optimizers[-1] if hasattr(lr_scheduler, "optimizers") else optimizer
if opt is not None:
logs[f"lr/d*lr/{lr_desc}"] = opt.param_groups[i]["d"] * opt.param_groups[i]["lr"]
if "effective_lr" in opt.param_groups[i]:
logs[f"lr/d*eff_lr/{lr_desc}"] = opt.param_groups[i]["d"] * opt.param_groups[i]["effective_lr"]
return logs