Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -7,7 +7,7 @@ import json
|
|
7 |
import os
|
8 |
from PIL import Image
|
9 |
from diffusers import FluxKontextPipeline
|
10 |
-
from diffusers.utils import load_image
|
11 |
from huggingface_hub import hf_hub_download, HfFileSystem, ModelCard
|
12 |
from safetensors.torch import load_file
|
13 |
import requests
|
@@ -18,6 +18,18 @@ MAX_SEED = np.iinfo(np.int32).max
|
|
18 |
|
19 |
pipe = FluxKontextPipeline.from_pretrained("LPX55/FLUX.1_Kontext-Lightning", torch_dtype=torch.bfloat16).to("cuda")
|
20 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
# Load LoRA data (you'll need to create this JSON file or modify to load your LoRAs)
|
22 |
|
23 |
with open("flux_loras.json", "r") as file:
|
|
|
7 |
import os
|
8 |
from PIL import Image
|
9 |
from diffusers import FluxKontextPipeline
|
10 |
+
from diffusers.utils import load_image, peft_utils
|
11 |
from huggingface_hub import hf_hub_download, HfFileSystem, ModelCard
|
12 |
from safetensors.torch import load_file
|
13 |
import requests
|
|
|
18 |
|
19 |
pipe = FluxKontextPipeline.from_pretrained("LPX55/FLUX.1_Kontext-Lightning", torch_dtype=torch.bfloat16).to("cuda")
|
20 |
|
21 |
+
try: # A temp hack for some version diffusers lora loading problem
|
22 |
+
from diffusers.utils.peft_utils import _derive_exclude_modules
|
23 |
+
|
24 |
+
def new_derive_exclude_modules(*args, **kwargs):
|
25 |
+
exclude_modules = _derive_exclude_modules(*args, **kwargs)
|
26 |
+
if exclude_modules is not None:
|
27 |
+
exclude_modules = [n for n in exclude_modules if "proj_out" not in n]
|
28 |
+
return exclude_modules
|
29 |
+
peft_utils._derive_exclude_modules = new_derive_exclude_modules
|
30 |
+
except:
|
31 |
+
pass
|
32 |
+
|
33 |
# Load LoRA data (you'll need to create this JSON file or modify to load your LoRAs)
|
34 |
|
35 |
with open("flux_loras.json", "r") as file:
|