From 0cfcb5a49cf813547d728101cc05edf1a9b7d06c Mon Sep 17 00:00:00 2001 From: Kohya S Date: Sat, 24 Jun 2023 08:36:09 +0900 Subject: [PATCH] fix lr/d*lr is not logged with prodigy in finetune --- fine_tune.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fine_tune.py b/fine_tune.py index d0013d53..fbb9e54c 100644 --- a/fine_tune.py +++ b/fine_tune.py @@ -397,7 +397,7 @@ def train(args): current_loss = loss.detach().item() # 平均なのでbatch sizeは関係ないはず if args.logging_dir is not None: logs = {"loss": current_loss, "lr": float(lr_scheduler.get_last_lr()[0])} - if args.optimizer_type.lower().startswith("DAdapt".lower()) or args.optimizer_type.lower() == "Prodigy": # tracking d*lr value + if args.optimizer_type.lower().startswith("DAdapt".lower()) or args.optimizer_type.lower() == "Prodigy".lower(): # tracking d*lr value logs["lr/d*lr"] = ( lr_scheduler.optimizers[0].param_groups[0]["d"] * lr_scheduler.optimizers[0].param_groups[0]["lr"] )