From 9550106016be71112a28d9bd33ffb0e0da2c61e8 Mon Sep 17 00:00:00 2001 From: DKnight54 <126916963+DKnight54@users.noreply.github.com> Date: Sat, 25 Jan 2025 03:25:38 +0800 Subject: [PATCH] Update accel_sdxl_gen_img.py --- accel_sdxl_gen_img.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/accel_sdxl_gen_img.py b/accel_sdxl_gen_img.py index 2de568ce..9273ec53 100644 --- a/accel_sdxl_gen_img.py +++ b/accel_sdxl_gen_img.py @@ -2836,11 +2836,11 @@ def main(args): logger.info(f"test_batch_data_split: {test_batch_data_split}") with torch.no_grad(): with distributed_state.split_between_processes(batch_data_split) as batch_list: - - logger.info(f"Loading batch of {len(batch_list)} prompts onto device {distributed_state.device}:") - for i in range(len(batch_list)): + logger.info(f"Loading batch of {len(batch_list[0])} prompts onto device {distributed_state.device}:") + logger.info(f"batch_list: {batch_list}") + for i in range(len(batch_list[0])): logger.info(f"Prompt {i}: {batch_list[0][i].base.prompt}") - prev_image = process_batch(batch_list, highres_fix)[0] + prev_image = process_batch(batch_list[0], highres_fix)[0] accelerator.wait_for_everyone() batch_data.clear() @@ -2863,10 +2863,11 @@ def main(args): with torch.no_grad(): with distributed_state.split_between_processes(batch_data_split) as batch_list: - logger.info(f"Loading batch of {len(batch_list)} prompts onto device {distributed_state.device}:") - for i in range(len(batch_list)): + logger.info(f"Loading batch of {len(batch_list[0])} prompts onto device {distributed_state.device}:") + logger.info(f"batch_list: {batch_list}") + for i in range(len(batch_list[0])): logger.info(f"Prompt {i}: {batch_list[0][i].base.prompt}") - prev_image = process_batch(batch_list, highres_fix)[0] + prev_image = process_batch(batch_list[0], highres_fix)[0] accelerator.wait_for_everyone() batch_data.clear() @@ -2889,10 +2890,11 @@ def main(args): logger.info(f"{test_batch_data_split}") with torch.no_grad(): with distributed_state.split_between_processes(batch_data_split) as batch_list: - logger.info(f"Loading batch of {len(batch_list)} prompts onto device {distributed_state.device}:") - for i in range(len(batch_list)): + logger.info(f"Loading batch of {len(batch_list[0])} prompts onto device {distributed_state.device}:") + logger.info(f"batch_list: {batch_list}") + for i in range(len(batch_list[0])): logger.info(f"Prompt {i}: {batch_list[0][i].base.prompt}") - prev_image = process_batch(batch_list, highres_fix)[0] + prev_image = process_batch(batch_list[0], highres_fix)[0] accelerator.wait_for_everyone() batch_data.clear()