Fixed fused_backward_pass error message as it was not accessing the correct member variable

This commit is contained in:
araleza
2025-08-24 16:27:15 +01:00
parent f583e35673
commit c7b62f7474

View File

@@ -502,7 +502,7 @@ def train(args):
elif args.optimizer_type.lower() == "nadamoffload" or args.optimizer_type.lower() == "nadamwoffload":
library.adamw_fused.patch_adamw_offload_fused(optimizer, True) # Nesterov
else:
logger.error(f"Optimizer '{args.optimizer}' does not have a --fused_backward_pass implementation available")
logger.error(f"Optimizer '{args.optimizer_type}' does not have a --fused_backward_pass implementation available")
for param_group, param_name_group in zip(optimizer.param_groups, param_names):
for parameter, param_name in zip(param_group["params"], param_name_group):