from transformers import pipeline # 中文 GPT2 模型(兼容 Hugging Face pipeline) model_b = pipeline("text-generation", model="uer/gpt2-chinese-cluecorpussmall", tokenizer="uer/gpt2-chinese-cluecorpussmall") def run_model_b(prompt: str) -> str: output = model_b(prompt, max_length=100, do_sample=True, temperature=0.8, top_k=50, top_p=0.95) return output[0]["generated_text"]