fingpt-qa / app.py
xkakashi's picture
Upload app.py with huggingface_hub
6b88819 verified
raw
history blame
555 Bytes
import gradio as gr
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
model_name = "AI4FinanceFoundation/FinGPT-Foundation"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto")
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
def answer(query):
response = pipe(query, max_new_tokens=300, do_sample=True)[0]["generated_text"]
return response
gr.Interface(fn=answer, inputs="text", outputs="text", title="FinGPT QA").launch()