Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -18,12 +18,20 @@ def set_api_key(api_key):
|
|
| 18 |
# --- Chat function ---
|
| 19 |
def chat(user_input, history):
|
| 20 |
global knowledge_base, global_api_key
|
|
|
|
|
|
|
| 21 |
if not global_api_key:
|
| 22 |
return history, "⚠️ Please set your Gemini API key first."
|
| 23 |
|
| 24 |
if not knowledge_base:
|
| 25 |
return history, "⚠️ Knowledge base is empty."
|
| 26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
# Check if user input exists in knowledge base
|
| 28 |
if user_input.lower() in knowledge_base.lower():
|
| 29 |
reply = "📖 Based on knowledge base: " + user_input
|
|
@@ -32,12 +40,12 @@ def chat(user_input, history):
|
|
| 32 |
try:
|
| 33 |
client = genai.Client(api_key=global_api_key)
|
| 34 |
chat_session = client.chats.create(model="gemini-2.5-flash")
|
| 35 |
-
response = chat_session.send_message(
|
| 36 |
reply = response.text
|
| 37 |
except Exception as e:
|
| 38 |
reply = f"❌ Error calling Gemini model: {e}"
|
| 39 |
|
| 40 |
-
history
|
| 41 |
return history, ""
|
| 42 |
|
| 43 |
# --- Gradio UI ---
|
|
|
|
| 18 |
# --- Chat function ---
|
| 19 |
def chat(user_input, history):
|
| 20 |
global knowledge_base, global_api_key
|
| 21 |
+
history = history or []
|
| 22 |
+
|
| 23 |
if not global_api_key:
|
| 24 |
return history, "⚠️ Please set your Gemini API key first."
|
| 25 |
|
| 26 |
if not knowledge_base:
|
| 27 |
return history, "⚠️ Knowledge base is empty."
|
| 28 |
|
| 29 |
+
# Combine knowledge base + chat history + current input
|
| 30 |
+
context_text = f"Knowledge base:\n{knowledge_base}\n\nChat history:\n"
|
| 31 |
+
for user_msg, bot_msg in history:
|
| 32 |
+
context_text += f"User: {user_msg}\nBot: {bot_msg}\n"
|
| 33 |
+
context_text += f"User: {user_input}"
|
| 34 |
+
|
| 35 |
# Check if user input exists in knowledge base
|
| 36 |
if user_input.lower() in knowledge_base.lower():
|
| 37 |
reply = "📖 Based on knowledge base: " + user_input
|
|
|
|
| 40 |
try:
|
| 41 |
client = genai.Client(api_key=global_api_key)
|
| 42 |
chat_session = client.chats.create(model="gemini-2.5-flash")
|
| 43 |
+
response = chat_session.send_message(context_text, max_output_tokens=300)
|
| 44 |
reply = response.text
|
| 45 |
except Exception as e:
|
| 46 |
reply = f"❌ Error calling Gemini model: {e}"
|
| 47 |
|
| 48 |
+
history.append((user_input, reply)) # store chat history
|
| 49 |
return history, ""
|
| 50 |
|
| 51 |
# --- Gradio UI ---
|