add vae_batch_size option for faster caching

This commit is contained in:
Kohya S
2023-03-21 18:15:57 +09:00
parent cca3804503
commit 1816ac3271
5 changed files with 32 additions and 19 deletions

View File

@@ -139,7 +139,7 @@ def train(args):
vae.requires_grad_(False)
vae.eval()
with torch.no_grad():
train_dataset_group.cache_latents(vae)
train_dataset_group.cache_latents(vae, args.vae_batch_size)
vae.to("cpu")
if torch.cuda.is_available():
torch.cuda.empty_cache()