mirror of
https://github.com/kohya-ss/sd-scripts.git
synced 2026-04-09 06:45:09 +00:00
Merge pull request #900 from xzuyn/paged_adamw_32bit
Add PagedAdamW32bit
This commit is contained in:
@@ -2657,7 +2657,7 @@ def add_optimizer_arguments(parser: argparse.ArgumentParser):
|
|||||||
"--optimizer_type",
|
"--optimizer_type",
|
||||||
type=str,
|
type=str,
|
||||||
default="",
|
default="",
|
||||||
help="Optimizer to use / オプティマイザの種類: AdamW (default), AdamW8bit, PagedAdamW8bit, Lion8bit, PagedLion8bit, Lion, SGDNesterov, SGDNesterov8bit, DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, AdaFactor",
|
help="Optimizer to use / オプティマイザの種類: AdamW (default), AdamW8bit, PagedAdamW8bit, PagedAdamW32bit, Lion8bit, PagedLion8bit, Lion, SGDNesterov, SGDNesterov8bit, DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, AdaFactor",
|
||||||
)
|
)
|
||||||
|
|
||||||
# backward compatibility
|
# backward compatibility
|
||||||
@@ -3370,7 +3370,7 @@ def resume_from_local_or_hf_if_specified(accelerator, args):
|
|||||||
|
|
||||||
|
|
||||||
def get_optimizer(args, trainable_params):
|
def get_optimizer(args, trainable_params):
|
||||||
# "Optimizer to use: AdamW, AdamW8bit, Lion, SGDNesterov, SGDNesterov8bit, PagedAdamW8bit, Lion8bit, PagedLion8bit, DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, Adafactor"
|
# "Optimizer to use: AdamW, AdamW8bit, Lion, SGDNesterov, SGDNesterov8bit, PagedAdamW8bit, PagedAdamW32bit, Lion8bit, PagedLion8bit, DAdaptation(DAdaptAdamPreprint), DAdaptAdaGrad, DAdaptAdam, DAdaptAdan, DAdaptAdanIP, DAdaptLion, DAdaptSGD, Adafactor"
|
||||||
|
|
||||||
optimizer_type = args.optimizer_type
|
optimizer_type = args.optimizer_type
|
||||||
if args.use_8bit_adam:
|
if args.use_8bit_adam:
|
||||||
@@ -3474,6 +3474,20 @@ def get_optimizer(args, trainable_params):
|
|||||||
|
|
||||||
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
|
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
|
||||||
|
|
||||||
|
elif optimizer_type == "PagedAdamW32bit".lower():
|
||||||
|
print(f"use 32-bit PagedAdamW optimizer | {optimizer_kwargs}")
|
||||||
|
try:
|
||||||
|
import bitsandbytes as bnb
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError("No bitsandbytes / bitsandbytesがインストールされていないようです")
|
||||||
|
try:
|
||||||
|
optimizer_class = bnb.optim.PagedAdamW32bit
|
||||||
|
except AttributeError:
|
||||||
|
raise AttributeError(
|
||||||
|
"No PagedAdamW32bit. The version of bitsandbytes installed seems to be old. Please install 0.39.0 or later. / PagedAdamW32bitが定義されていません。インストールされているbitsandbytesのバージョンが古いようです。0.39.0以上をインストールしてください"
|
||||||
|
)
|
||||||
|
optimizer = optimizer_class(trainable_params, lr=lr, **optimizer_kwargs)
|
||||||
|
|
||||||
elif optimizer_type == "SGDNesterov".lower():
|
elif optimizer_type == "SGDNesterov".lower():
|
||||||
print(f"use SGD with Nesterov optimizer | {optimizer_kwargs}")
|
print(f"use SGD with Nesterov optimizer | {optimizer_kwargs}")
|
||||||
if "momentum" not in optimizer_kwargs:
|
if "momentum" not in optimizer_kwargs:
|
||||||
|
|||||||
Reference in New Issue
Block a user