Spaces:
Sleeping
Sleeping
File size: 1,648 Bytes
772ea32 dcb02b2 772ea32 dcb02b2 772ea32 dcb02b2 772ea32 dcb02b2 96533d3 dcb02b2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
# from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
# import torch
# def generate_animagine_xl(prompt: str):
# model_id = "Linaqruf/animagine-xl"
# pipe = StableDiffusionXLPipeline.from_pretrained(
# model_id,
# torch_dtype=torch.float16,
# use_safetensors=True,
# variant="fp16"
# )
# pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
# pipe = pipe.to("cuda")
# image = pipe(prompt=prompt, width=1024, height=1024).images[0]
# image.save("output_animagine_xl.png")
# print("โ
์ ์ฅ ์๋ฃ: output_animagine_xl.png")
# return image
# if __name__ == "__main__":
# prompt = "๊ทธ๋
๋ฅผ ๋ฐ๋ผ๋ณด๋ ํ ๋จ์์ ์ผ๋ง"
# generate_animagine_xl(prompt)
from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
import torch
# (1) ๋ชจ๋ธ๊ณผ ์ค์ผ์ค๋ฌ๋ฅผ ์ ์ญ์์ ํ ๋ฒ๋ง ์ด๊ธฐํ
model_id = "Linaqruf/animagine-xl"
pipe = StableDiffusionXLPipeline.from_pretrained(
model_id,
torch_dtype=torch.float16,
use_safetensors=True,
variant="fp16"
)
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
pipe = pipe.to("cpu") # ๋๋ "cpu"๋ก ๋ณ๊ฒฝ ๊ฐ๋ฅ
# (2) ์ด๋ฏธ์ง ์์ฑ ํจ์
def generate_animagine_xl(prompt: str):
image = pipe(prompt=prompt, width=1024, height=1024).images[0]
image.save("output_animagine_xl.png")
print("โ
์ ์ฅ ์๋ฃ: output_animagine_xl.png")
return image
if __name__ == "__main__":
prompt = "๊ทธ๋
๋ฅผ ๋ฐ๋ผ๋ณด๋ ํ ๋จ์์ ์ผ๋ง"
generate_animagine_xl(prompt) |