diff --git a/fine_tune.py b/fine_tune.py index 2c4d3685..b82a67ae 100644 --- a/fine_tune.py +++ b/fine_tune.py @@ -271,8 +271,8 @@ def train(args): # make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used if use_schedule_free_optimizer: - optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train() - optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval() + optimizer_train_if_needed = lambda: optimizer.train() + optimizer_eval_if_needed = lambda: optimizer.eval() else: optimizer_train_if_needed = lambda: None optimizer_eval_if_needed = lambda: None diff --git a/requirements.txt b/requirements.txt index e5fee6cf..f4d84dec 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -accelerate==0.29.2 +accelerate==0.30.0 transformers==4.36.2 diffusers[torch]==0.25.0 ftfy==6.1.1 @@ -9,6 +9,7 @@ pytorch-lightning==1.9.0 bitsandbytes==0.43.0 prodigyopt==1.0 lion-pytorch==0.0.6 +schedulefree==1.2.5 tensorboard safetensors==0.4.2 # gradio==3.16.2 diff --git a/sdxl_train.py b/sdxl_train.py index ed5a6493..3f36d439 100644 --- a/sdxl_train.py +++ b/sdxl_train.py @@ -435,8 +435,8 @@ def train(args): # make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used if use_schedule_free_optimizer: - optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train() - optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval() + optimizer_train_if_needed = lambda: optimizer.train() + optimizer_eval_if_needed = lambda: optimizer.eval() else: optimizer_train_if_needed = lambda: None optimizer_eval_if_needed = lambda: None diff --git a/sdxl_train_control_net_lllite.py b/sdxl_train_control_net_lllite.py index 54b6d0b0..6e0c2c8a 100644 --- a/sdxl_train_control_net_lllite.py +++ b/sdxl_train_control_net_lllite.py @@ -294,8 +294,8 @@ def train(args): # make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used if use_schedule_free_optimizer: - optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train() - optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval() + optimizer_train_if_needed = lambda: optimizer.train() + optimizer_eval_if_needed = lambda: optimizer.eval() else: optimizer_train_if_needed = lambda: None optimizer_eval_if_needed = lambda: None diff --git a/sdxl_train_control_net_lllite_old.py b/sdxl_train_control_net_lllite_old.py index babaa026..669994ce 100644 --- a/sdxl_train_control_net_lllite_old.py +++ b/sdxl_train_control_net_lllite_old.py @@ -262,8 +262,8 @@ def train(args): # make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used if use_schedule_free_optimizer: - optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train() - optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval() + optimizer_train_if_needed = lambda: optimizer.train() + optimizer_eval_if_needed = lambda: optimizer.eval() else: optimizer_train_if_needed = lambda: None optimizer_eval_if_needed = lambda: None diff --git a/train_controlnet.py b/train_controlnet.py index bc9da356..1785607b 100644 --- a/train_controlnet.py +++ b/train_controlnet.py @@ -284,8 +284,8 @@ def train(args): # make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used if use_schedule_free_optimizer: - optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train() - optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval() + optimizer_train_if_needed = lambda: optimizer.train() + optimizer_eval_if_needed = lambda: optimizer.eval() else: optimizer_train_if_needed = lambda: None optimizer_eval_if_needed = lambda: None diff --git a/train_db.py b/train_db.py index c56630da..c69d48d4 100644 --- a/train_db.py +++ b/train_db.py @@ -247,8 +247,8 @@ def train(args): # make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used if use_schedule_free_optimizer: - optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train() - optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval() + optimizer_train_if_needed = lambda: optimizer.train() + optimizer_eval_if_needed = lambda: optimizer.eval() else: optimizer_train_if_needed = lambda: None optimizer_eval_if_needed = lambda: None diff --git a/train_textual_inversion_XTI.py b/train_textual_inversion_XTI.py index a9d10d6e..180c64f6 100644 --- a/train_textual_inversion_XTI.py +++ b/train_textual_inversion_XTI.py @@ -342,8 +342,8 @@ def train(args): # make lambda function for calling optimizer.train() and optimizer.eval() if schedule-free optimizer is used if use_schedule_free_optimizer: - optimizer_train_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).train() - optimizer_eval_if_needed = lambda: (optimizer.optimizer if hasattr(optimizer, "optimizer") else optimizer).eval() + optimizer_train_if_needed = lambda: optimizer.train() + optimizer_eval_if_needed = lambda: optimizer.eval() else: optimizer_train_if_needed = lambda: None optimizer_eval_if_needed = lambda: None