lora以外も対応

This commit is contained in:
u-haru
2023-03-26 02:19:55 +09:00
parent 9c80da6ac5
commit 4dc1124f93
5 changed files with 37 additions and 33 deletions

View File

@@ -4,6 +4,7 @@ import gc
import math
import os
import toml
from multiprocessing import Value
from tqdm import tqdm
import torch
@@ -71,10 +72,6 @@ imagenet_style_templates_small = [
]
def collate_fn(examples):
return examples[0]
def train(args):
if args.output_name is None:
args.output_name = args.token_string
@@ -186,6 +183,10 @@ def train(args):
config_util.blueprint_args_conflict(args,blueprint)
train_dataset_group = config_util.generate_dataset_group_by_blueprint(blueprint.dataset_group)
current_epoch = Value('i',0)
current_step = Value('i',0)
collater = train_util.collater_class(current_epoch,current_step)
# make captions: tokenstring tokenstring1 tokenstring2 ...tokenstringn という文字列に書き換える超乱暴な実装
if use_template:
print("use template for training captions. is object: {args.use_object_template}")
@@ -251,7 +252,7 @@ def train(args):
train_dataset_group,
batch_size=1,
shuffle=True,
collate_fn=collate_fn,
collate_fn=collater,
num_workers=n_workers,
persistent_workers=args.persistent_data_loader_workers,
)
@@ -335,13 +336,14 @@ def train(args):
for epoch in range(num_train_epochs):
print(f"epoch {epoch+1}/{num_train_epochs}")
train_dataset_group.set_current_epoch(epoch + 1)
train_dataset_group.set_current_step(global_step)
current_epoch.value = epoch+1
text_encoder.train()
loss_total = 0
for step, batch in enumerate(train_dataloader):
current_step.value = global_step
with accelerator.accumulate(text_encoder):
with torch.no_grad():
if "latents" in batch and batch["latents"] is not None: