Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,7 @@ from langchain_google_genai import ChatGoogleGenerativeAI
|
|
8 |
from langchain.chains.question_answering import load_qa_chain
|
9 |
from langchain.prompts import PromptTemplate
|
10 |
|
11 |
-
# Hardcoded Gemini API keys
|
12 |
API_KEYS = [
|
13 |
"AIzaSyBYbyC4qCJoKxKR-r0oIn4SVqj4CfSdx4s",
|
14 |
"AIzaSyBfvYURYVijTurxvFUyV3vZkCojpOAAnFk"
|
@@ -37,7 +37,6 @@ def get_text_chunks(text):
|
|
37 |
def get_vector_store(text_chunks, api_key):
|
38 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=api_key)
|
39 |
vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
|
40 |
-
# Use /tmp for Hugging Face Spaces
|
41 |
vector_store.save_local("/tmp/faiss_index")
|
42 |
|
43 |
def get_conversational_chain(api_key):
|
@@ -57,7 +56,6 @@ def get_conversational_chain(api_key):
|
|
57 |
|
58 |
def user_input(user_question, api_key):
|
59 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=api_key)
|
60 |
-
# Use /tmp for Hugging Face Spaces
|
61 |
new_db = FAISS.load_local("/tmp/faiss_index", embeddings, allow_dangerous_deserialization=True)
|
62 |
docs = new_db.similarity_search(user_question)
|
63 |
chain = get_conversational_chain(api_key)
|
@@ -66,27 +64,47 @@ def user_input(user_question, api_key):
|
|
66 |
|
67 |
def main():
|
68 |
st.set_page_config(page_title="Chat PDF")
|
69 |
-
st.header("Retrieval-Augmented Generation-Gemini
|
70 |
st.markdown("---")
|
71 |
|
72 |
-
|
73 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
74 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
75 |
if user_question:
|
76 |
-
api_key = user_api_key if user_api_key else switch_api_key()
|
77 |
user_input(user_question, api_key)
|
78 |
|
79 |
-
pdf_docs = st.file_uploader("Upload PDF files", accept_multiple_files=True, type=['pdf'])
|
80 |
-
if st.button("Submit & Process"):
|
81 |
-
if pdf_docs:
|
82 |
-
with st.spinner("Processing..."):
|
83 |
-
api_key = user_api_key if user_api_key else switch_api_key()
|
84 |
-
raw_text = get_pdf_text(pdf_docs)
|
85 |
-
text_chunks = get_text_chunks(raw_text)
|
86 |
-
get_vector_store(text_chunks, api_key)
|
87 |
-
st.success("Done")
|
88 |
-
else:
|
89 |
-
st.error("Please upload at least one PDF file.")
|
90 |
-
|
91 |
if __name__ == "__main__":
|
92 |
-
main()
|
|
|
8 |
from langchain.chains.question_answering import load_qa_chain
|
9 |
from langchain.prompts import PromptTemplate
|
10 |
|
11 |
+
# Hardcoded Gemini API keys (for fallback)
|
12 |
API_KEYS = [
|
13 |
"AIzaSyBYbyC4qCJoKxKR-r0oIn4SVqj4CfSdx4s",
|
14 |
"AIzaSyBfvYURYVijTurxvFUyV3vZkCojpOAAnFk"
|
|
|
37 |
def get_vector_store(text_chunks, api_key):
|
38 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=api_key)
|
39 |
vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
|
|
|
40 |
vector_store.save_local("/tmp/faiss_index")
|
41 |
|
42 |
def get_conversational_chain(api_key):
|
|
|
56 |
|
57 |
def user_input(user_question, api_key):
|
58 |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001", google_api_key=api_key)
|
|
|
59 |
new_db = FAISS.load_local("/tmp/faiss_index", embeddings, allow_dangerous_deserialization=True)
|
60 |
docs = new_db.similarity_search(user_question)
|
61 |
chain = get_conversational_chain(api_key)
|
|
|
64 |
|
65 |
def main():
|
66 |
st.set_page_config(page_title="Chat PDF")
|
67 |
+
st.header("Retrieval-Augmented Generation - Gemini 2.0 Flash")
|
68 |
st.markdown("---")
|
69 |
|
70 |
+
# STEP 1: Enter API key
|
71 |
+
if "api_entered" not in st.session_state:
|
72 |
+
st.session_state["api_entered"] = False
|
73 |
+
if "pdf_processed" not in st.session_state:
|
74 |
+
st.session_state["pdf_processed"] = False
|
75 |
+
|
76 |
+
if not st.session_state["api_entered"]:
|
77 |
+
user_api_key = st.text_input("Enter your Gemini API key", type="password")
|
78 |
+
if st.button("Continue") and user_api_key:
|
79 |
+
st.session_state["user_api_key"] = user_api_key
|
80 |
+
st.session_state["api_entered"] = True
|
81 |
+
st.experimental_rerun()
|
82 |
+
st.stop()
|
83 |
+
|
84 |
+
api_key = st.session_state.get("user_api_key", switch_api_key())
|
85 |
|
86 |
+
# STEP 2: Upload PDF(s)
|
87 |
+
if not st.session_state["pdf_processed"]:
|
88 |
+
st.subheader("Step 2: Upload your PDF file(s)")
|
89 |
+
pdf_docs = st.file_uploader("Upload PDF files", accept_multiple_files=True, type=['pdf'])
|
90 |
+
if st.button("Submit & Process PDFs"):
|
91 |
+
if pdf_docs:
|
92 |
+
with st.spinner("Processing..."):
|
93 |
+
raw_text = get_pdf_text(pdf_docs)
|
94 |
+
text_chunks = get_text_chunks(raw_text)
|
95 |
+
get_vector_store(text_chunks, api_key)
|
96 |
+
st.session_state["pdf_processed"] = True
|
97 |
+
st.success("PDFs processed! You can now ask questions.")
|
98 |
+
st.experimental_rerun()
|
99 |
+
else:
|
100 |
+
st.error("Please upload at least one PDF file.")
|
101 |
+
st.stop()
|
102 |
+
|
103 |
+
# STEP 3: Ask questions
|
104 |
+
st.subheader("Step 3: Ask a question about your PDFs")
|
105 |
+
user_question = st.text_input("Ask a question")
|
106 |
if user_question:
|
|
|
107 |
user_input(user_question, api_key)
|
108 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
if __name__ == "__main__":
|
110 |
+
main()
|