# model_wrappers/model_a.py from transformers import pipeline model_a = pipeline("text-generation", model="gpt2") def run_model_a(prompt: str) -> str: output = model_a(prompt, max_length=100, do_sample=False) return output[0]["generated_text"]