Upload app.py with huggingface_hub
Browse files
app.py
CHANGED
@@ -1,10 +1,11 @@
|
|
1 |
|
2 |
-
import gradio as gr
|
3 |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
4 |
|
5 |
-
|
6 |
-
|
7 |
|
|
|
|
|
8 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
9 |
|
10 |
chat_history = []
|
|
|
1 |
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
3 |
|
4 |
+
base_model = "meta‑llama/Llama‑2‑7b‑chat‑hf" # base tokenizer
|
5 |
+
adapter = "FinGPT/fingpt‑mt_llama2‑7b_lora"
|
6 |
|
7 |
+
tokenizer = AutoTokenizer.from_pretrained(base_model)
|
8 |
+
model = AutoModelForCausalLM.from_pretrained(adapter, device_map="auto")
|
9 |
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
10 |
|
11 |
chat_history = []
|