feat: kept caption dropout rate in cache and handle in training script

This commit is contained in:
kohya-ss
2026-02-08 15:35:53 +09:00
parent c3556d455f
commit 4f6511bf28
4 changed files with 74 additions and 134 deletions

View File

@@ -255,10 +255,8 @@ def train(args):
)
# Pre-cache unconditional embeddings for caption dropout before text encoder is deleted
caption_dropout_rate = getattr(args, "caption_dropout_rate", 0.0)
if caption_dropout_rate > 0.0:
with accelerator.autocast():
text_encoding_strategy.cache_uncond_embeddings(tokenize_strategy, [qwen3_text_encoder])
with accelerator.autocast():
text_encoding_strategy.cache_uncond_embeddings(tokenize_strategy, [qwen3_text_encoder])
accelerator.wait_for_everyone()