How to convert a LoRA trained by DiffSynth-Studio to be compatible with ComfyUI?

#2
by shuzhi - opened

I trained a LoRA for Wan using DiffSynth-Studio, but I can't load it in ComfyUI. Could you provide a conversion script for it? I’d really appreciate it!

I'm not sure if it is the good one, let's me know if it is correct.

import safetensors.torch
import torch

input_path = r"C:\ComfyDG\ComfyUI\models\loras\wan_1_3b\Wan2.1-lora.safetensors"
output_path = r"C:\ComfyDG\ComfyUI\models\loras\wan_1_3b\Wan2_1-final_lora.safetensors"
sd = safetensors.torch.load_file(input_path)

new_sd = {}

default_alpha = 1.0 # Peut-être 16.0 ou 32.0 selon le LoRA original

print("Original keys:")
for k in sorted(sd.keys()):
print(k)

new_key = k

if "lora_unet__blocks_" in new_key:
    parts = new_key.split("lora_unet__blocks_")
    block_num = parts[1].split("_")[0]  # Numéro du block (ex. 0, 1, 2, etc.)
    rest = "_".join(parts[1].split("_")[1:])  # Reste de la clé
    new_key = f"diffusion_model.keys.{block_num}.{rest.replace('_', '.')}"

elif "lora_unet__" in new_key:
    new_key = new_key.replace("lora_unet__", "diffusion.model.").replace("_", ".")

new_sd[new_key] = sd[k]

if new_key.endswith(".lora.down.weight"):
    alpha_key = new_key.replace(".lora.down.weight", ".alpha")
    if alpha_key not in new_sd:  # Éviter d'écraser si déjà présent
        new_sd[alpha_key] = torch.tensor(default_alpha, dtype=torch.float32)

print("\nTransformed keys:")
for k in sorted(new_sd.keys()):
print(k)

safetensors.torch.save_file(new_sd, output_path)
print(f"Saved to {output_path}")
#####

Sign up or log in to comment