From 58bdf85ab4b5d40be017f99d1fb9a23d8e4a67f8 Mon Sep 17 00:00:00 2001 From: rockerBOO Date: Mon, 24 Mar 2025 04:22:12 -0400 Subject: [PATCH] Remove rank stabilization --- networks/lora_flux.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/networks/lora_flux.py b/networks/lora_flux.py index 3c405c7b..ddbef195 100644 --- a/networks/lora_flux.py +++ b/networks/lora_flux.py @@ -66,10 +66,7 @@ class LoRAModule(torch.nn.Module): if type(alpha) == torch.Tensor: alpha = alpha.detach().float().numpy() # without casting, bf16 causes error alpha = self.lora_dim if alpha is None or alpha == 0 else alpha - rank_factor = self.lora_dim - if rank_stabilized: - rank_factor = math.sqrt(rank_factor) - self.scale = alpha / rank_factor + self.scale = alpha / self.lora_dim self.register_buffer("alpha", torch.tensor(alpha)) # 定数として扱える self.split_dims = split_dims