Fix additional LoRA to work

This commit is contained in:
kohya-ss
2024-10-29 22:29:24 +09:00
parent 80bb3f4ecf
commit ce5b532582

View File

@@ -428,7 +428,7 @@ class LoRANetwork(torch.nn.Module):
for filter, in_dim in zip( for filter, in_dim in zip(
[ [
"context_embedder", "context_embedder",
"t_embedder", "_t_embedder", # don't use "t_embedder" because it's used in "context_embedder"
"x_embedder", "x_embedder",
"y_embedder", "y_embedder",
"final_layer_adaLN_modulation", "final_layer_adaLN_modulation",