diff --git a/networks/lora.py b/networks/lora.py index 9f2f5094..27f59344 100644 --- a/networks/lora.py +++ b/networks/lora.py @@ -400,7 +400,7 @@ def parse_block_lr_kwargs(nw_kwargs): return down_lr_weight, mid_lr_weight, up_lr_weight -def create_network(multiplier, network_dim, network_alpha, vae, text_encoder, unet, dropout=None, **kwargs): +def create_network(multiplier, network_dim, network_alpha, vae, text_encoder, unet, neuron_dropout=None, **kwargs): if network_dim is None: network_dim = 4 # default if network_alpha is None: @@ -455,7 +455,7 @@ def create_network(multiplier, network_dim, network_alpha, vae, text_encoder, un multiplier=multiplier, lora_dim=network_dim, alpha=network_alpha, - dropout=dropout, + dropout=neuron_dropout, rank_dropout=rank_dropout, module_dropout=module_dropout, conv_lora_dim=conv_dim, diff --git a/train_network.py b/train_network.py index c6ea7e4e..b62aef7e 100644 --- a/train_network.py +++ b/train_network.py @@ -212,7 +212,7 @@ def train(args): else: # LyCORIS will work with this... network = network_module.create_network( - 1.0, args.network_dim, args.network_alpha, vae, text_encoder, unet, dropout=args.network_dropout, **net_kwargs + 1.0, args.network_dim, args.network_alpha, vae, text_encoder, unet, neuron_dropout=args.network_dropout, **net_kwargs ) if network is None: return @@ -724,7 +724,7 @@ def train(args): progress_bar.set_postfix(**logs) if args.scale_weight_norms: - progress_bar.set_postfix(**{**max_mean_logs,**logs}) + progress_bar.set_postfix(**{**max_mean_logs, **logs}) if args.logging_dir is not None: logs = generate_step_logs(args, current_loss, avr_loss, lr_scheduler, keys_scaled, mean_norm, maximum_norm)