Fix weights checking script to use float32

This commit is contained in:
Kohya S
2023-01-22 10:44:29 +09:00
parent cae42728ab
commit 6e279730cf

View File

@@ -15,12 +15,13 @@ def main(file):
keys = list(sd.keys())
for key in keys:
if 'lora_up' in key:
if 'lora_up' in key or 'lora_down' in key:
values.append((key, sd[key]))
print(f"number of LoRA-up modules: {len(values)}")
print(f"number of LoRA modules: {len(values)}")
for key, value in values:
print(f"{key},{torch.mean(torch.abs(value))}")
value = value.to(torch.float32)
print(f"{key},{torch.mean(torch.abs(value))},{torch.min(torch.abs(value))}")
if __name__ == '__main__':