File size: 555 Bytes
85ea4bd 6b88819 85ea4bd 6b88819 85ea4bd 6b88819 85ea4bd 6b88819 85ea4bd 6b88819 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
import gradio as gr
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
model_name = "AI4FinanceFoundation/FinGPT-Foundation"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto")
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
def answer(query):
response = pipe(query, max_new_tokens=300, do_sample=True)[0]["generated_text"]
return response
gr.Interface(fn=answer, inputs="text", outputs="text", title="FinGPT QA").launch()
|