diff --git a/fine_tune.py b/fine_tune.py index a906b238..f89e897a 100644 --- a/fine_tune.py +++ b/fine_tune.py @@ -6,6 +6,7 @@ import gc import math import os from multiprocessing import Value +import toml from tqdm import tqdm import torch @@ -275,7 +276,10 @@ def train(args): custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler) if accelerator.is_main_process: - accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name) + init_kwargs = {} + if args.log_tracker_config is not None: + init_kwargs = toml.load(args.log_tracker_config) + accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs) for epoch in range(num_train_epochs): accelerator.print(f"\nepoch {epoch+1}/{num_train_epochs}") diff --git a/library/train_util.py b/library/train_util.py index 5f6e7d48..ce4b5959 100644 --- a/library/train_util.py +++ b/library/train_util.py @@ -2716,6 +2716,12 @@ def add_training_arguments(parser: argparse.ArgumentParser, support_dreambooth: default=None, help="name of tracker to use for logging, default is script-specific default name / ログ出力に使用するtrackerの名前、省略時はスクリプトごとのデフォルト名", ) + parser.add_argument( + "--log_tracker_config", + type=str, + default=None, + help="path to tracker config file to use for logging / ログ出力に使用するtrackerの設定ファイルのパス", + ) parser.add_argument( "--wandb_api_key", type=str, diff --git a/sdxl_train.py b/sdxl_train.py index 7e3a8416..d47720ac 100644 --- a/sdxl_train.py +++ b/sdxl_train.py @@ -5,6 +5,7 @@ import gc import math import os from multiprocessing import Value +import toml from tqdm import tqdm import torch @@ -355,7 +356,10 @@ def train(args): custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler) if accelerator.is_main_process: - accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name) + init_kwargs = {} + if args.log_tracker_config is not None: + init_kwargs = toml.load(args.log_tracker_config) + accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs) for epoch in range(num_train_epochs): accelerator.print(f"\nepoch {epoch+1}/{num_train_epochs}") diff --git a/train_controlnet.py b/train_controlnet.py index 39ac43e9..988304f6 100644 --- a/train_controlnet.py +++ b/train_controlnet.py @@ -7,6 +7,7 @@ import random import time from multiprocessing import Value from types import SimpleNamespace +import toml from tqdm import tqdm import torch @@ -324,7 +325,10 @@ def train(args): clip_sample=False, ) if accelerator.is_main_process: - accelerator.init_trackers("controlnet_train" if args.log_tracker_name is None else args.log_tracker_name) + init_kwargs = {} + if args.log_tracker_config is not None: + init_kwargs = toml.load(args.log_tracker_config) + accelerator.init_trackers("controlnet_train" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs) loss_list = [] loss_total = 0.0 diff --git a/train_db.py b/train_db.py index 7571efc3..72d634b8 100644 --- a/train_db.py +++ b/train_db.py @@ -7,6 +7,7 @@ import itertools import math import os from multiprocessing import Value +import toml from tqdm import tqdm import torch @@ -250,7 +251,10 @@ def train(args): custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler) if accelerator.is_main_process: - accelerator.init_trackers("dreambooth" if args.log_tracker_name is None else args.log_tracker_name) + init_kwargs = {} + if args.log_tracker_config is not None: + init_kwargs = toml.load(args.log_tracker_config) + accelerator.init_trackers("dreambooth" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs) loss_list = [] loss_total = 0.0 diff --git a/train_network.py b/train_network.py index 310f7506..b482c80a 100644 --- a/train_network.py +++ b/train_network.py @@ -8,6 +8,7 @@ import random import time import json from multiprocessing import Value +import toml from tqdm import tqdm import torch @@ -682,7 +683,10 @@ class NetworkTrainer: custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler) if accelerator.is_main_process: - accelerator.init_trackers("network_train" if args.log_tracker_name is None else args.log_tracker_name) + init_kwargs = {} + if args.log_tracker_config is not None: + init_kwargs = toml.load(args.log_tracker_config) + accelerator.init_trackers("network_train" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs) loss_list = [] loss_total = 0.0 diff --git a/train_textual_inversion.py b/train_textual_inversion.py index 265b244b..d3ff6456 100644 --- a/train_textual_inversion.py +++ b/train_textual_inversion.py @@ -3,6 +3,7 @@ import gc import math import os from multiprocessing import Value +import toml from tqdm import tqdm import torch @@ -492,7 +493,10 @@ class TextualInversionTrainer: custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler) if accelerator.is_main_process: - accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name) + init_kwargs = {} + if args.log_tracker_config is not None: + init_kwargs = toml.load(args.log_tracker_config) + accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs) # function for saving/removing def save_model(ckpt_name, embs_list, steps, epoch_no, force_sync_upload=False): diff --git a/train_textual_inversion_XTI.py b/train_textual_inversion_XTI.py index ba5c7d03..79c64cbe 100644 --- a/train_textual_inversion_XTI.py +++ b/train_textual_inversion_XTI.py @@ -388,7 +388,10 @@ def train(args): custom_train_functions.fix_noise_scheduler_betas_for_zero_terminal_snr(noise_scheduler) if accelerator.is_main_process: - accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name) + init_kwargs = {} + if args.log_tracker_config is not None: + init_kwargs = toml.load(args.log_tracker_config) + accelerator.init_trackers("textual_inversion" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs) # function for saving/removing def save_model(ckpt_name, embs, steps, epoch_no, force_sync_upload=False):