npz check to use subset, add dadap warn close #274

This commit is contained in:
Kohya S
2023-03-10 21:31:59 +09:00
parent 75d1883da6
commit 618592c52b

View File

@@ -912,7 +912,7 @@ class FineTuningDataset(BaseDataset):
if os.path.exists(image_key): if os.path.exists(image_key):
abs_path = image_key abs_path = image_key
else: else:
npz_path = os.path.join(glob.escape(train_data_dir), image_key + ".npz") npz_path = os.path.join(subset.image_dir, image_key + ".npz")
if os.path.exists(npz_path): if os.path.exists(npz_path):
abs_path = npz_path abs_path = npz_path
else: else:
@@ -1761,15 +1761,22 @@ def get_optimizer(args, trainable_params):
raise ImportError("No dadaptation / dadaptation がインストールされていないようです") raise ImportError("No dadaptation / dadaptation がインストールされていないようです")
print(f"use D-Adaptation Adam optimizer | {optimizer_kwargs}") print(f"use D-Adaptation Adam optimizer | {optimizer_kwargs}")
min_lr = lr actual_lr = lr
lr_count = 1
if type(trainable_params) == list and type(trainable_params[0]) == dict: if type(trainable_params) == list and type(trainable_params[0]) == dict:
lrs = set()
actual_lr = trainable_params[0].get("lr", actual_lr)
for group in trainable_params: for group in trainable_params:
min_lr = min(min_lr, group.get("lr", lr)) lrs.add(group.get("lr", actual_lr))
lr_count = len(lrs)
if min_lr <= 0.1: if actual_lr <= 0.1:
print( print(
f'learning rate is too low. If using dadaptation, set learning rate around 1.0 / 学習率が低すぎるようです。1.0前後の値を指定してください: {min_lr}') f'learning rate is too low. If using dadaptation, set learning rate around 1.0 / 学習率が低すぎるようです。1.0前後の値を指定してください: lr={actual_lr}')
print('recommend option: lr=1.0 / 推奨は1.0です') print('recommend option: lr=1.0 / 推奨は1.0です')
if lr_count > 1:
print(
f"when multiple learning rates are specified with dadaptation (e.g. for Text Encoder and U-Net), only the first one will take effect / D-Adaptationで複数の学習率を指定した場合Text EncoderとU-Netなど、最初の学習率のみが有効になります: lr={actual_lr}")
optimizer_class = dadaptation.DAdaptAdam optimizer_class = dadaptation.DAdaptAdam
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs) optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)