mirror of
https://github.com/kohya-ss/sd-scripts.git
synced 2026-04-09 06:45:09 +00:00
common lr checking for dadaptation and prodigy
This commit is contained in:
@@ -2752,15 +2752,7 @@ def get_optimizer(args, trainable_params):
|
|||||||
optimizer_class = torch.optim.SGD
|
optimizer_class = torch.optim.SGD
|
||||||
optimizer = optimizer_class(trainable_params, lr=lr, nesterov=True, **optimizer_kwargs)
|
optimizer = optimizer_class(trainable_params, lr=lr, nesterov=True, **optimizer_kwargs)
|
||||||
|
|
||||||
elif optimizer_type.startswith("DAdapt".lower()):
|
elif optimizer_type.startswith("DAdapt".lower()) or optimizer_type == "Prodigy".lower():
|
||||||
# DAdaptation family
|
|
||||||
# check dadaptation is installed
|
|
||||||
try:
|
|
||||||
import dadaptation
|
|
||||||
import dadaptation.experimental as experimental
|
|
||||||
except ImportError:
|
|
||||||
raise ImportError("No dadaptation / dadaptation がインストールされていないようです")
|
|
||||||
|
|
||||||
# check lr and lr_count, and print warning
|
# check lr and lr_count, and print warning
|
||||||
actual_lr = lr
|
actual_lr = lr
|
||||||
lr_count = 1
|
lr_count = 1
|
||||||
@@ -2773,14 +2765,23 @@ def get_optimizer(args, trainable_params):
|
|||||||
|
|
||||||
if actual_lr <= 0.1:
|
if actual_lr <= 0.1:
|
||||||
print(
|
print(
|
||||||
f"learning rate is too low. If using dadaptation, set learning rate around 1.0 / 学習率が低すぎるようです。1.0前後の値を指定してください: lr={actual_lr}"
|
f"learning rate is too low. If using D-Adaptation or Prodigy, set learning rate around 1.0 / 学習率が低すぎるようです。D-AdaptationまたはProdigyの使用時は1.0前後の値を指定してください: lr={actual_lr}"
|
||||||
)
|
)
|
||||||
print("recommend option: lr=1.0 / 推奨は1.0です")
|
print("recommend option: lr=1.0 / 推奨は1.0です")
|
||||||
if lr_count > 1:
|
if lr_count > 1:
|
||||||
print(
|
print(
|
||||||
f"when multiple learning rates are specified with dadaptation (e.g. for Text Encoder and U-Net), only the first one will take effect / D-Adaptationで複数の学習率を指定した場合(Text EncoderとU-Netなど)、最初の学習率のみが有効になります: lr={actual_lr}"
|
f"when multiple learning rates are specified with dadaptation (e.g. for Text Encoder and U-Net), only the first one will take effect / D-AdaptationまたはProdigyで複数の学習率を指定した場合(Text EncoderとU-Netなど)、最初の学習率のみが有効になります: lr={actual_lr}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if optimizer_type.startswith("DAdapt".lower()):
|
||||||
|
# DAdaptation family
|
||||||
|
# check dadaptation is installed
|
||||||
|
try:
|
||||||
|
import dadaptation
|
||||||
|
import dadaptation.experimental as experimental
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError("No dadaptation / dadaptation がインストールされていないようです")
|
||||||
|
|
||||||
# set optimizer
|
# set optimizer
|
||||||
if optimizer_type == "DAdaptation".lower() or optimizer_type == "DAdaptAdamPreprint".lower():
|
if optimizer_type == "DAdaptation".lower() or optimizer_type == "DAdaptAdamPreprint".lower():
|
||||||
optimizer_class = experimental.DAdaptAdamPreprint
|
optimizer_class = experimental.DAdaptAdamPreprint
|
||||||
@@ -2807,8 +2808,7 @@ def get_optimizer(args, trainable_params):
|
|||||||
raise ValueError(f"Unknown optimizer type: {optimizer_type}")
|
raise ValueError(f"Unknown optimizer type: {optimizer_type}")
|
||||||
|
|
||||||
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
|
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
|
||||||
|
else:
|
||||||
elif optimizer_type == "Prodigy".lower():
|
|
||||||
# Prodigy
|
# Prodigy
|
||||||
# check Prodigy is installed
|
# check Prodigy is installed
|
||||||
try:
|
try:
|
||||||
@@ -2816,26 +2816,6 @@ def get_optimizer(args, trainable_params):
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
raise ImportError("No Prodigy / Prodigy がインストールされていないようです")
|
raise ImportError("No Prodigy / Prodigy がインストールされていないようです")
|
||||||
|
|
||||||
# check lr and lr_count, and print warning
|
|
||||||
actual_lr = lr
|
|
||||||
lr_count = 1
|
|
||||||
if type(trainable_params) == list and type(trainable_params[0]) == dict:
|
|
||||||
lrs = set()
|
|
||||||
actual_lr = trainable_params[0].get("lr", actual_lr)
|
|
||||||
for group in trainable_params:
|
|
||||||
lrs.add(group.get("lr", actual_lr))
|
|
||||||
lr_count = len(lrs)
|
|
||||||
|
|
||||||
if actual_lr <= 0.1:
|
|
||||||
print(
|
|
||||||
f"learning rate is too low. If using Prodigy, set learning rate around 1.0 / 学習率が低すぎるようです。1.0前後の値を指定してください: lr={actual_lr}"
|
|
||||||
)
|
|
||||||
print("recommend option: lr=1.0 / 推奨は1.0です")
|
|
||||||
if lr_count > 1:
|
|
||||||
print(
|
|
||||||
f"when multiple learning rates are specified with Prodigy (e.g. for Text Encoder and U-Net), only the first one will take effect / Prodigyで複数の学習率を指定した場合(Text EncoderとU-Netなど)、最初の学習率のみが有効になります: lr={actual_lr}"
|
|
||||||
)
|
|
||||||
|
|
||||||
print(f"use Prodigy optimizer | {optimizer_kwargs}")
|
print(f"use Prodigy optimizer | {optimizer_kwargs}")
|
||||||
optimizer_class = prodigyopt.Prodigy
|
optimizer_class = prodigyopt.Prodigy
|
||||||
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
|
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
|
||||||
|
|||||||
Reference in New Issue
Block a user