Spaces:
Sleeping
Sleeping
# portfolio/npc_social_network/models/openkollm_setup.py | |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline | |
from dotenv import load_dotenv | |
import os | |
def load_openkollm(): | |
load_dotenv() # .env ํ์ผ์์ ํ๊ฒฝ ๋ณ์ ๋ก๋ | |
model_id = "beomi/KoAlpaca-Polyglot-12.8B" # GPU๊ฐ ์์ผ๋ฉด ๋ป์ | |
access_token = os.getenv("HF_token") | |
tokenizer = AutoTokenizer.from_pretrained(model_id, token=access_token) | |
model = AutoModelForCausalLM.from_pretrained( | |
model_id, | |
device_map="auto", # GPU ์๋ ํ ๋น | |
# load_in_8bit=True, # GPU ์ฌ์ฉํด์ 8bit๋ก ๋ก๋ | |
torch_dtype="auto" | |
) | |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) | |
return pipe |