mirror of
https://github.com/kohya-ss/sd-scripts.git
synced 2026-04-15 16:39:42 +00:00
simplyfy code by updating accelerate to 0.30.0
This commit is contained in:
@@ -271,8 +271,8 @@ def train(args):
|
||||
|
||||
# make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used
|
||||
if use_schedule_free_optimizer:
|
||||
optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train()
|
||||
optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval()
|
||||
optimizer_train_if_needed = lambda: optimizer.train()
|
||||
optimizer_eval_if_needed = lambda: optimizer.eval()
|
||||
else:
|
||||
optimizer_train_if_needed = lambda: None
|
||||
optimizer_eval_if_needed = lambda: None
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
accelerate==0.29.2
|
||||
accelerate==0.30.0
|
||||
transformers==4.36.2
|
||||
diffusers[torch]==0.25.0
|
||||
ftfy==6.1.1
|
||||
@@ -9,6 +9,7 @@ pytorch-lightning==1.9.0
|
||||
bitsandbytes==0.43.0
|
||||
prodigyopt==1.0
|
||||
lion-pytorch==0.0.6
|
||||
schedulefree==1.2.5
|
||||
tensorboard
|
||||
safetensors==0.4.2
|
||||
# gradio==3.16.2
|
||||
|
||||
@@ -435,8 +435,8 @@ def train(args):
|
||||
|
||||
# make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used
|
||||
if use_schedule_free_optimizer:
|
||||
optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train()
|
||||
optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval()
|
||||
optimizer_train_if_needed = lambda: optimizer.train()
|
||||
optimizer_eval_if_needed = lambda: optimizer.eval()
|
||||
else:
|
||||
optimizer_train_if_needed = lambda: None
|
||||
optimizer_eval_if_needed = lambda: None
|
||||
|
||||
@@ -294,8 +294,8 @@ def train(args):
|
||||
|
||||
# make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used
|
||||
if use_schedule_free_optimizer:
|
||||
optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train()
|
||||
optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval()
|
||||
optimizer_train_if_needed = lambda: optimizer.train()
|
||||
optimizer_eval_if_needed = lambda: optimizer.eval()
|
||||
else:
|
||||
optimizer_train_if_needed = lambda: None
|
||||
optimizer_eval_if_needed = lambda: None
|
||||
|
||||
@@ -262,8 +262,8 @@ def train(args):
|
||||
|
||||
# make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used
|
||||
if use_schedule_free_optimizer:
|
||||
optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train()
|
||||
optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval()
|
||||
optimizer_train_if_needed = lambda: optimizer.train()
|
||||
optimizer_eval_if_needed = lambda: optimizer.eval()
|
||||
else:
|
||||
optimizer_train_if_needed = lambda: None
|
||||
optimizer_eval_if_needed = lambda: None
|
||||
|
||||
@@ -284,8 +284,8 @@ def train(args):
|
||||
|
||||
# make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used
|
||||
if use_schedule_free_optimizer:
|
||||
optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train()
|
||||
optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval()
|
||||
optimizer_train_if_needed = lambda: optimizer.train()
|
||||
optimizer_eval_if_needed = lambda: optimizer.eval()
|
||||
else:
|
||||
optimizer_train_if_needed = lambda: None
|
||||
optimizer_eval_if_needed = lambda: None
|
||||
|
||||
@@ -247,8 +247,8 @@ def train(args):
|
||||
|
||||
# make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used
|
||||
if use_schedule_free_optimizer:
|
||||
optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train()
|
||||
optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval()
|
||||
optimizer_train_if_needed = lambda: optimizer.train()
|
||||
optimizer_eval_if_needed = lambda: optimizer.eval()
|
||||
else:
|
||||
optimizer_train_if_needed = lambda: None
|
||||
optimizer_eval_if_needed = lambda: None
|
||||
|
||||
@@ -342,8 +342,8 @@ def train(args):
|
||||
|
||||
# make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used
|
||||
if use_schedule_free_optimizer:
|
||||
optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train()
|
||||
optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval()
|
||||
optimizer_train_if_needed = lambda: optimizer.train()
|
||||
optimizer_eval_if_needed = lambda: optimizer.eval()
|
||||
else:
|
||||
optimizer_train_if_needed = lambda: None
|
||||
optimizer_eval_if_needed = lambda: None
|
||||
|
||||
Reference in New Issue
Block a user