EdmundYi's picture
Create model_wrappers/model_a.py
994b482 verified
raw
history blame
253 Bytes
# model_wrappers/model_a.py
from transformers import pipeline
model_a = pipeline("text-generation", model="gpt2")
def run_model_a(prompt: str) -> str:
output = model_a(prompt, max_length=100, do_sample=False)
return output[0]["generated_text"]