From da6416a2fc9b9f0e09754babb48ab44116f1212b Mon Sep 17 00:00:00 2001 From: araleza <70412719+araleza@users.noreply.github.com> Date: Wed, 23 Jul 2025 15:08:24 +0100 Subject: [PATCH] Restoring the deleted __main__ function and fixing a warning typo --- flux_train.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/flux_train.py b/flux_train.py index a631546b..d60fbb0e 100644 --- a/flux_train.py +++ b/flux_train.py @@ -394,7 +394,7 @@ def train(args): if args.optimizer_type != "adafactor": logger.warning("Kahan summation has been requested, but currently this is only supported by the supplied Adafactor optimizer.") if not args.full_bf16: - logger.warning("Kahan summation require --full_bf16") + logger.warning("Kahan summation requires --full_bf16") if args.blockwise_fused_optimizers: logger.warning("Kahan summation has been requested, but it is incompatible with --blockwise_fused_optimizer. "\ "Perhaps try --fused_backward_pass instead.") @@ -859,3 +859,13 @@ def setup_parser() -> argparse.ArgumentParser: help="[EXPERIMENTAL] enable offloading of tensors to CPU during checkpointing / チェックポイント時にテンソルをCPUにオフロードする", ) return parser + + +if __name__ == "__main__": + parser = setup_parser() + + args = parser.parse_args() + train_util.verify_command_line_training_args(args) + args = train_util.read_config_from_file(args, parser) + + train(args)