From 66d4a62792f94ff259dbcc6981aff24a0a0bdcf2 Mon Sep 17 00:00:00 2001 From: DKnight54 <126916963+DKnight54@users.noreply.github.com> Date: Sat, 25 Jan 2025 00:22:07 +0800 Subject: [PATCH] Update accel_sdxl_gen_img.py --- accel_sdxl_gen_img.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/accel_sdxl_gen_img.py b/accel_sdxl_gen_img.py index 5de9b282..31b34eac 100644 --- a/accel_sdxl_gen_img.py +++ b/accel_sdxl_gen_img.py @@ -2822,7 +2822,7 @@ def main(args): with distributed_state.split_between_processes(batch_data_split) as batch_list: logger.info(f"Loading batch of {len(batch_list)} prompts onto device {distributed_state.device}:") - for i in range(batch_list): + for i in len(batch_list): logger.info(f"Prompt {i}: {batch_list[i].base.prompt}") prev_image = process_batch(batch_list, highres_fix)[0] accelerator.wait_for_everyone() @@ -2835,7 +2835,7 @@ def main(args): with torch.no_grad(): with distributed_state.split_between_processes(batch_data_split) as batch_list: logger.info(f"Loading batch of {len(batch_list)} prompts onto device {distributed_state.device}:") - for i in range(batch_list): + for i in len(batch_list): logger.info(f"Prompt {i}: {batch_list[i].base.prompt}") prev_image = process_batch(batch_list, highres_fix)[0] accelerator.wait_for_everyone() @@ -2850,7 +2850,7 @@ def main(args): with torch.no_grad(): with distributed_state.split_between_processes(batch_data_split) as batch_list: logger.info(f"Loading batch of {len(batch_list)} prompts onto device {distributed_state.device}:") - for i in range(batch_list): + for i in len(batch_list): logger.info(f"Prompt {i}: {batch_list[i].base.prompt}") prev_image = process_batch(batch_list, highres_fix)[0] accelerator.wait_for_everyone()