Spaces:
Runtime error
Runtime error
tomas.helmfridsson commited on
Commit Β·
1e5b35e
1
Parent(s): f9a8906
update 43 42 worked but short answer
Browse files
app.py
CHANGED
|
@@ -1,148 +1,132 @@
|
|
| 1 |
-
# ββ app.py βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 2 |
-
import os, logging,
|
| 3 |
import gradio as gr
|
| 4 |
-
from transformers import pipeline
|
| 5 |
from langchain_community.document_loaders import PyPDFLoader
|
| 6 |
from langchain_community.vectorstores import FAISS
|
| 7 |
from langchain_huggingface.embeddings import HuggingFaceEmbeddings
|
| 8 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 9 |
|
| 10 |
-
# ββ
|
| 11 |
DOCS_DIR = "document"
|
| 12 |
INDEX_DIR = "faiss_index"
|
| 13 |
EMB_MODEL = "KBLab/sentence-bert-swedish-cased"
|
| 14 |
LLM_MODEL = "tiiuae/falcon-rw-1b"
|
| 15 |
|
| 16 |
-
CHUNK_SIZE =
|
| 17 |
-
CHUNK_OVERLAP =
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
K = 10
|
| 21 |
DEFAULT_TEMP = 0.3
|
| 22 |
|
| 23 |
-
# ββ LOGGING ββββββββββββββββββββββββββββββββββββββββββββββ
|
| 24 |
-
logging.basicConfig(level=logging.INFO, format="%(asctime)s
|
| 25 |
-
|
| 26 |
|
| 27 |
-
# ββ 1) Index (bygg eller ladda) βββββββββββββββββββββββββ
|
| 28 |
emb = HuggingFaceEmbeddings(model_name=EMB_MODEL)
|
| 29 |
-
|
| 30 |
if os.path.isdir(INDEX_DIR):
|
| 31 |
-
|
| 32 |
vs = FAISS.load_local(INDEX_DIR, emb)
|
| 33 |
else:
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
chunk_overlap=CHUNK_OVERLAP)
|
| 37 |
-
docs, files = [], []
|
| 38 |
for fn in os.listdir(DOCS_DIR):
|
| 39 |
if fn.lower().endswith(".pdf"):
|
| 40 |
-
|
| 41 |
-
chunks = splitter.split_documents(pages)
|
| 42 |
for c in chunks:
|
| 43 |
c.metadata["source"] = fn
|
| 44 |
-
docs.extend(chunks)
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
vs.save_local(INDEX_DIR)
|
| 48 |
-
logger.info(f"β
Sparade index ({len(files)}β―PDF, {len(docs)}β―chunkar)")
|
| 49 |
|
| 50 |
retriever = vs.as_retriever(search_kwargs={"k": K})
|
| 51 |
|
| 52 |
-
# ββ 2) LLMβpipeline βββββββββββββββββββββββββββββββββββββ
|
| 53 |
-
|
| 54 |
-
gen_pipe
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
out = []
|
| 75 |
-
for i, d in enumerate(docs, 1):
|
| 76 |
-
src = d.metadata.get("source", "okΓ€nd")
|
| 77 |
-
snippet = d.page_content.replace("\n", " ")[:160]
|
| 78 |
-
out.append(f"{i}. ({src}) β¦{snippet}β¦")
|
| 79 |
-
return "\n\n".join(out)
|
| 80 |
-
|
| 81 |
-
# ββ 4) Chatβfunktionen (exakt 3β―param, 2β―retur) βββββββββ
|
| 82 |
-
def chat_fn(query: str, temperature: float, history: list[dict]):
|
| 83 |
-
history = history or []
|
| 84 |
-
history.append({"role": "user", "content": query})
|
| 85 |
-
|
| 86 |
-
# HΓ€mtar K chunkar
|
| 87 |
-
docs = retriever.get_relevant_documents(query)
|
| 88 |
-
if not docs:
|
| 89 |
-
ans = "π« Hittade inget relevant innehΓ₯ll i dokumenten."
|
| 90 |
-
history.append({"role": "assistant", "content": ans})
|
| 91 |
-
return history, history
|
| 92 |
-
|
| 93 |
-
# Bygg kontext och trimma
|
| 94 |
-
context = "\n\n---\n\n".join(d.page_content for d in docs)
|
| 95 |
-
context = truncate_tokens(context, CTX_TOKEN_MAX)
|
| 96 |
-
|
| 97 |
-
prompt = textwrap.dedent(f"""
|
| 98 |
Du Γ€r en hjΓ€lpsam assistent som svarar pΓ₯ svenska.
|
| 99 |
-
Kontext (ur PDFβdokument):
|
| 100 |
|
| 101 |
{context}
|
| 102 |
|
| 103 |
FrΓ₯ga: {query}
|
| 104 |
Svar (svenska):""").strip()
|
| 105 |
|
| 106 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 107 |
|
| 108 |
try:
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
|
|
|
|
|
|
| 116 |
except Exception as e:
|
| 117 |
-
|
| 118 |
-
|
| 119 |
|
| 120 |
-
src_hint = docs[0].metadata
|
| 121 |
-
history.append({"role": "assistant",
|
| 122 |
-
"content": f"**(KΓ€lla: {src_hint})**\n\n{resp}"})
|
| 123 |
return history, history
|
| 124 |
|
| 125 |
-
# ββ
|
| 126 |
with gr.Blocks() as demo:
|
| 127 |
-
gr.Markdown("# π
|
| 128 |
-
gr.Markdown(f"**PDFβfiler
|
| 129 |
|
| 130 |
with gr.Row():
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
|
| 134 |
|
| 135 |
with gr.Row():
|
| 136 |
-
q_in = gr.Textbox(placeholder="Ex: Vad
|
| 137 |
temp = gr.Slider(0, 1, value=DEFAULT_TEMP, step=0.05, label="Temperatur")
|
| 138 |
-
|
| 139 |
|
| 140 |
chat = gr.Chatbot(type="messages", label="Chat")
|
| 141 |
chat_hist = gr.State([])
|
| 142 |
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
send.click(chat_fn, inputs=[q_in, temp, chat_hist], outputs=[chat, chat_hist])
|
| 146 |
|
| 147 |
if __name__ == "__main__":
|
| 148 |
-
demo.launch(share=True)
|
|
|
|
| 1 |
+
# ββ app.py βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 2 |
+
import os, logging, textwrap
|
| 3 |
import gradio as gr
|
| 4 |
+
from transformers import pipeline, AutoTokenizer
|
| 5 |
from langchain_community.document_loaders import PyPDFLoader
|
| 6 |
from langchain_community.vectorstores import FAISS
|
| 7 |
from langchain_huggingface.embeddings import HuggingFaceEmbeddings
|
| 8 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 9 |
|
| 10 |
+
# ββ KONFIG βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 11 |
DOCS_DIR = "document"
|
| 12 |
INDEX_DIR = "faiss_index"
|
| 13 |
EMB_MODEL = "KBLab/sentence-bert-swedish-cased"
|
| 14 |
LLM_MODEL = "tiiuae/falcon-rw-1b"
|
| 15 |
|
| 16 |
+
CHUNK_SIZE = 400
|
| 17 |
+
CHUNK_OVERLAP = 40
|
| 18 |
+
CTX_TOK_MAX = 750 # sparar marginal till frΓ₯ga + svar
|
| 19 |
+
MAX_NEW_TOKENS = 128
|
| 20 |
+
K = 10
|
| 21 |
DEFAULT_TEMP = 0.3
|
| 22 |
|
| 23 |
+
# ββ LOGGING ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 24 |
+
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
|
| 25 |
+
log = logging.getLogger(__name__)
|
| 26 |
|
| 27 |
+
# ββ 1) Index (bygg eller ladda) βββββββββββββββββββββββββββββββββββββ
|
| 28 |
emb = HuggingFaceEmbeddings(model_name=EMB_MODEL)
|
|
|
|
| 29 |
if os.path.isdir(INDEX_DIR):
|
| 30 |
+
log.info(f"π Laddar index frΓ₯n {INDEX_DIR}")
|
| 31 |
vs = FAISS.load_local(INDEX_DIR, emb)
|
| 32 |
else:
|
| 33 |
+
splitter = RecursiveCharacterTextSplitter(chunk_size=CHUNK_SIZE, chunk_overlap=CHUNK_OVERLAP)
|
| 34 |
+
docs, pdfs = [], []
|
|
|
|
|
|
|
| 35 |
for fn in os.listdir(DOCS_DIR):
|
| 36 |
if fn.lower().endswith(".pdf"):
|
| 37 |
+
chunks = splitter.split_documents(PyPDFLoader(os.path.join(DOCS_DIR, fn)).load())
|
|
|
|
| 38 |
for c in chunks:
|
| 39 |
c.metadata["source"] = fn
|
| 40 |
+
docs.extend(chunks); pdfs.append(fn)
|
| 41 |
+
vs = FAISS.from_documents(docs, emb); vs.save_local(INDEX_DIR)
|
| 42 |
+
log.info(f"β
Byggt index β {len(pdfs)}β―PDF / {len(docs)}β―chunkar")
|
|
|
|
|
|
|
| 43 |
|
| 44 |
retriever = vs.as_retriever(search_kwargs={"k": K})
|
| 45 |
|
| 46 |
+
# ββ 2) LLMβpipeline & tokenizer βββββββββββββββββββββββββββββββββββββ
|
| 47 |
+
log.info("π Initierar LLM β¦")
|
| 48 |
+
gen_pipe = pipeline("text-generation", model=LLM_MODEL, device=-1, max_new_tokens=MAX_NEW_TOKENS)
|
| 49 |
+
tokenizer = AutoTokenizer.from_pretrained(LLM_MODEL)
|
| 50 |
+
log.info("β
LLM klar")
|
| 51 |
+
|
| 52 |
+
# ββ 3) HjΓ€lpfunktioner ββββββββββββββββββββββββββββββββββββββββββββββ
|
| 53 |
+
def build_prompt(query: str, docs):
|
| 54 |
+
"""
|
| 55 |
+
Tar sΓ₯ mΓ₯nga chunkar som ryms i CTX_TOK_MAX token
|
| 56 |
+
"""
|
| 57 |
+
context_parts = []
|
| 58 |
+
total_ctx_tok = 0
|
| 59 |
+
for d in docs:
|
| 60 |
+
tok_len = len(tokenizer.encode(d.page_content))
|
| 61 |
+
if total_ctx_tok + tok_len > CTX_TOK_MAX:
|
| 62 |
+
break
|
| 63 |
+
context_parts.append(d.page_content)
|
| 64 |
+
total_ctx_tok += tok_len
|
| 65 |
+
|
| 66 |
+
context = "\n\n---\n\n".join(context_parts)
|
| 67 |
+
return textwrap.dedent(f"""\
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 68 |
Du Γ€r en hjΓ€lpsam assistent som svarar pΓ₯ svenska.
|
| 69 |
+
Kontext (hΓ€mtat ur PDFβdokument):
|
| 70 |
|
| 71 |
{context}
|
| 72 |
|
| 73 |
FrΓ₯ga: {query}
|
| 74 |
Svar (svenska):""").strip()
|
| 75 |
|
| 76 |
+
def test_retrieval(q): # snabbβtest utan AI
|
| 77 |
+
docs = retriever.invoke(q)
|
| 78 |
+
return "\n\n".join([f"{i+1}. ({d.metadata['source']}) {d.page_content[:160]}β¦" for i, d in enumerate(docs)]) or "π« Inga trΓ€ffar"
|
| 79 |
+
|
| 80 |
+
def chat_fn(q, temp, history):
|
| 81 |
+
history = history or []
|
| 82 |
+
history.append({"role": "user", "content": q})
|
| 83 |
+
|
| 84 |
+
docs = retriever.invoke(q)
|
| 85 |
+
if not docs:
|
| 86 |
+
history.append({"role": "assistant", "content": "π« Hittade inget relevant."})
|
| 87 |
+
return history, history
|
| 88 |
+
|
| 89 |
+
prompt = build_prompt(q, docs)
|
| 90 |
+
log.info(f"Prompt tokens={len(tokenizer.encode(prompt))} temp={temp}")
|
| 91 |
|
| 92 |
try:
|
| 93 |
+
ans = gen_pipe(
|
| 94 |
+
prompt,
|
| 95 |
+
temperature=float(temp),
|
| 96 |
+
max_new_tokens=MAX_NEW_TOKENS,
|
| 97 |
+
pad_token_id=tokenizer.eos_token_id,
|
| 98 |
+
eos_token_id=tokenizer.eos_token_id,
|
| 99 |
+
do_sample=True,
|
| 100 |
+
return_full_text=False
|
| 101 |
+
)[0]["generated_text"]
|
| 102 |
except Exception as e:
|
| 103 |
+
log.exception("Genereringsβfel")
|
| 104 |
+
ans = f"β Fel:Β {e}"
|
| 105 |
|
| 106 |
+
src_hint = docs[0].metadata["source"]
|
| 107 |
+
history.append({"role": "assistant", "content": f"**(KΓ€lla: {src_hint})**\n\n{ans}"})
|
|
|
|
| 108 |
return history, history
|
| 109 |
|
| 110 |
+
# ββ 4) Gradio UI ββββββββββββββββββββββββββββββββββββββββββββββββββββ
|
| 111 |
with gr.Blocks() as demo:
|
| 112 |
+
gr.Markdown("# π Svensk RAGβchat")
|
| 113 |
+
gr.Markdown(f"**PDFβfiler:** {', '.join(os.listdir(DOCS_DIR)) or 'β'}")
|
| 114 |
|
| 115 |
with gr.Row():
|
| 116 |
+
q_test = gr.Textbox(label="π TestβRetrieval")
|
| 117 |
+
b_test = gr.Button("Testa")
|
| 118 |
+
o_test = gr.Textbox(label="Chunkar")
|
| 119 |
|
| 120 |
with gr.Row():
|
| 121 |
+
q_in = gr.Textbox(label="FrΓ₯ga", placeholder="Ex: Vad handlar dokumenten om?")
|
| 122 |
temp = gr.Slider(0, 1, value=DEFAULT_TEMP, step=0.05, label="Temperatur")
|
| 123 |
+
b_send = gr.Button("Skicka")
|
| 124 |
|
| 125 |
chat = gr.Chatbot(type="messages", label="Chat")
|
| 126 |
chat_hist = gr.State([])
|
| 127 |
|
| 128 |
+
b_test.click(test_retrieval, inputs=[q_test], outputs=[o_test])
|
| 129 |
+
b_send.click(chat_fn, inputs=[q_in, temp, chat_hist], outputs=[chat, chat_hist])
|
|
|
|
| 130 |
|
| 131 |
if __name__ == "__main__":
|
| 132 |
+
demo.launch(share=True) # ta bort share=True om du vill hΓ₯lla den privat
|