make tracker init_kwargs configurable

This commit is contained in:
ddPn08
2023-07-09 16:31:38 +09:00
parent f54b784d88
commit b841dd78fe
8 changed files with 40 additions and 7 deletions

View File

@@ -6,6 +6,7 @@ import gc
import math
import os
from multiprocessing import Value
import toml
from tqdm import tqdm
import torch
@@ -275,7 +276,10 @@ def train(args):
prepare_scheduler_for_custom_training(noise_scheduler, accelerator.device)
if accelerator.is_main_process:
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name)
init_kwargs = {}
if args.log_tracker_config is not None:
init_kwargs = toml.load(args.log_tracker_config)
accelerator.init_trackers("finetuning" if args.log_tracker_name is None else args.log_tracker_name, init_kwargs=init_kwargs)
for epoch in range(num_train_epochs):
accelerator.print(f"\nepoch {epoch+1}/{num_train_epochs}")