From c7b62f7474c198cb34ec046dab9f1bc12384159f Mon Sep 17 00:00:00 2001 From: araleza <70412719+araleza@users.noreply.github.com> Date: Sun, 24 Aug 2025 16:27:15 +0100 Subject: [PATCH] Fixed fused_backward_pass error message as it was not accessing the correct member variable --- flux_train.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flux_train.py b/flux_train.py index 99ca4641..9bb58c0f 100644 --- a/flux_train.py +++ b/flux_train.py @@ -502,7 +502,7 @@ def train(args): elif args.optimizer_type.lower() == "nadamoffload" or args.optimizer_type.lower() == "nadamwoffload": library.adamw_fused.patch_adamw_offload_fused(optimizer, True) # Nesterov else: - logger.error(f"Optimizer '{args.optimizer}' does not have a --fused_backward_pass implementation available") + logger.error(f"Optimizer '{args.optimizer_type}' does not have a --fused_backward_pass implementation available") for param_group, param_name_group in zip(optimizer.param_groups, param_names): for parameter, param_name in zip(param_group["params"], param_name_group):