Merge pull request #1640 from sdbds/ademamix8bit

New optimizer:AdEMAMix8bit and PagedAdEMAMix8bit
This commit is contained in:
Kohya S.
2024-09-26 21:20:19 +09:00
committed by GitHub
2 changed files with 3 additions and 3 deletions

View File

@@ -3014,7 +3014,7 @@ def add_optimizer_arguments(parser: argparse.ArgumentParser):
"--optimizer_type", "--optimizer_type",
type=str, type=str,
default="", default="",
help="Optimizer to use / オプティマイザの種類: AdamW (default), AdamW8bit, PagedAdamW, PagedAdamW8bit, PagedAdamW32bit, Lion8bit, PagedLion8bit, Lion, SGDNesterov, SGDNesterov8bit, DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, AdaFactor", help="Optimizer to use / オプティマイザの種類: AdamW (default), AdamW8bit, PagedAdamW, PagedAdamW8bit, PagedAdamW32bit, Lion8bit, PagedLion8bit, Lion, AdEMAMix8bit, PagedAdEMAMix8bit, SGDNesterov, SGDNesterov8bit, DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, AdaFactor",
) )
# backward compatibility # backward compatibility
@@ -4052,7 +4052,7 @@ def resume_from_local_or_hf_if_specified(accelerator, args):
def get_optimizer(args, trainable_params): def get_optimizer(args, trainable_params):
# "Optimizer to use: AdamW, AdamW8bit, Lion, SGDNesterov, SGDNesterov8bit, PagedAdamW, PagedAdamW8bit, PagedAdamW32bit, Lion8bit, PagedLion8bit, DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, Adafactor" # "Optimizer to use: AdamW, AdamW8bit, Lion, SGDNesterov, SGDNesterov8bit, PagedAdamW, PagedAdamW8bit, PagedAdamW32bit, Lion8bit, PagedLion8bit, AdEMAMix8bit, PagedAdEMAMix8bit, DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, Adafactor"
optimizer_type = args.optimizer_type optimizer_type = args.optimizer_type
if args.use_8bit_adam: if args.use_8bit_adam:

View File

@@ -6,7 +6,7 @@ ftfy==6.1.1
opencv-python==4.8.1.78 opencv-python==4.8.1.78
einops==0.7.0 einops==0.7.0
pytorch-lightning==1.9.0 pytorch-lightning==1.9.0
bitsandbytes==0.43.0 bitsandbytes==0.44.0
prodigyopt==1.0 prodigyopt==1.0
lion-pytorch==0.0.6 lion-pytorch==0.0.6
tensorboard tensorboard