mirror of
https://github.com/kohya-ss/sd-scripts.git
synced 2026-04-08 14:34:23 +00:00
Merge 1dc45f481a into fa53f71ec0
This commit is contained in:
@@ -1439,6 +1439,24 @@ class NetworkTrainer:
|
||||
if hasattr(network, "update_norms"):
|
||||
network.update_norms()
|
||||
|
||||
if (args.debug_info_steps > 0) and (step % args.debug_info_steps == 0):
|
||||
params = accelerator.unwrap_model(network).get_trainable_params()
|
||||
grads, weights, numels = [], [], []
|
||||
for p in params:
|
||||
if p.requires_grad:
|
||||
p_detached = p.detach()
|
||||
weights.append(p_detached.norm(p=1).item())
|
||||
if p.grad is not None:
|
||||
grads.append(p.grad.detach().norm(p=1).item())
|
||||
else:
|
||||
grads.append(0.0)
|
||||
numels.append(p_detached.numel())
|
||||
total_grad = sum(grads) / sum(numels)
|
||||
total_weight = sum(weights) / sum(numels)
|
||||
accelerator.print(
|
||||
f"\n[Step {step}] avr_grad={total_grad:.4E}, avr_weights={total_weight:.4E}"
|
||||
)
|
||||
|
||||
optimizer.step()
|
||||
lr_scheduler.step()
|
||||
optimizer.zero_grad(set_to_none=True)
|
||||
@@ -1734,6 +1752,9 @@ def setup_parser() -> argparse.ArgumentParser:
|
||||
help="[EXPERIMENTAL] enable offloading of tensors to CPU during checkpointing for U-Net or DiT, if supported"
|
||||
" / 勾配チェックポイント時にテンソルをCPUにオフロードする(U-NetまたはDiTのみ、サポートされている場合)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--debug_info_steps", type=int, default=0, help="Log gradient/weight norms every N steps"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no_metadata", action="store_true", help="do not save metadata in output model / メタデータを出力先モデルに保存しない"
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user