File size: 374 Bytes
098beb3
 
 
 
 
cbecf26
 
 
 
 
 
098beb3
1
2
3
4
5
6
7
8
9
10
11
12
from transformers import pipeline

model_b = pipeline("text-generation", model="distilgpt2")

def run_model_b(prompt: str) -> str:
    output = model_b(prompt,
                     max_length=100,
                     do_sample=True,
                     temperature=0.8,
                     top_k=50,
                     top_p=0.95)
    return output[0]["generated_text"]