fix placing of requires_grad_ of U-Net

This commit is contained in:
Kohya S
2023-10-01 16:41:48 +09:00
parent 81419f7f32
commit 4cc919607a

View File

@@ -12,10 +12,13 @@ import toml
from tqdm import tqdm from tqdm import tqdm
import torch import torch
try: try:
import intel_extension_for_pytorch as ipex import intel_extension_for_pytorch as ipex
if torch.xpu.is_available(): if torch.xpu.is_available():
from library.ipex import ipex_init from library.ipex import ipex_init
ipex_init() ipex_init()
except Exception: except Exception:
pass pass
@@ -428,7 +431,9 @@ class NetworkTrainer:
# set top parameter requires_grad = True for gradient checkpointing works # set top parameter requires_grad = True for gradient checkpointing works
if train_text_encoder: if train_text_encoder:
t_enc.text_model.embeddings.requires_grad_(True) t_enc.text_model.embeddings.requires_grad_(True)
else:
# set top parameter requires_grad = True for gradient checkpointing works
if not train_text_encoder: # train U-Net only
unet.parameters().__next__().requires_grad_(True) unet.parameters().__next__().requires_grad_(True)
else: else:
unet.eval() unet.eval()