aifeifei798's picture
Upload 7 files
719390c verified
raw
history blame
7.05 kB
import gradio as gr
import os
import time
# ------------------------------------------------------------------
# 1. Load Environment Variables
# ------------------------------------------------------------------
api_key = os.environ.get("GEMINI_API_KEY")
if not api_key:
print(
"Warning: GEMINI_API_KEY not found. Please set it in your Hugging Face Spaces Secrets."
)
# ------------------------------------------------------------------
# 2. Initialize Backend
# ------------------------------------------------------------------
print("--- Starting FeiMatrix Synapse System ---")
try:
from database.setup import initialize_system
from core.agent import SmartAIAgent
print("Core modules imported successfully.")
registered_tools, tool_recommender = initialize_system()
print("System database and tool recommender initialized successfully.")
agent = SmartAIAgent(
tool_recommender=tool_recommender,
registered_tools=registered_tools,
api_key=api_key,
)
print("AI Agent Core created successfully.")
except Exception as e:
print(f"A critical error occurred during system initialization: {e}")
agent = None
print("--- FeiMatrix Synapse is ready ---")
# ------------------------------------------------------------------
# 3. Gradio Event Handler Functions
# ------------------------------------------------------------------
def handle_user_message(user_input, history):
if not user_input.strip():
return "", history
history.append([user_input, None])
return "", history
def generate_bot_response(history):
if agent is None:
history[-1][
1
] = "Sorry, the AI Assistant system failed to initialize and is currently unavailable."
yield history
return
user_question = history[-1][0]
bot_message = ""
history[-1][1] = bot_message
try:
for chunk in agent.stream_run(user_question):
bot_message += chunk
history[-1][1] = bot_message
yield history
time.sleep(0.01)
except Exception as e:
error_message = f"\n\n**Sorry, an error occurred while processing your request:**\n```\n{type(e).__name__}: {str(e)}\n```"
history[-1][1] += error_message
import traceback
traceback.print_exc()
yield history
# ------------------------------------------------------------------
# 4. Create Gradio Interface
# ------------------------------------------------------------------
custom_css = """
#chatbot .message-bubble-content { color: #000000 !important; }
#chatbot code { background-color: #f0f0f0; border-radius: 4px; padding: 2px 4px; color: #c7254e; }
#chatbot pre { background-color: #f5f5f5; padding: 10px; border-radius: 5px; }
.footer { text-align: center; color: #777; font-size: 0.9em; padding-top: 20px; }
"""
with gr.Blocks(
theme=gr.themes.Soft(primary_hue="teal", secondary_hue="lime"),
css=custom_css,
title="FeiMatrix Synapse",
) as demo:
gr.Markdown(
"""
# πŸš€ FeiMatrix Synapse - Intelligent AI Assistant
---
### Core Concept & Philosophy
**FeiMatrix Synapse** is a **Proof of Concept** for an advanced AI assistant.
Its core objective is to demonstrate an AI workflow made to **autonomously understand, plan, and utilize external tools** to solve problems.
### The Demo Flow Explained
When you ask a question, you can clearly observe each step of the AI's "thought process":
1. **πŸ€” Analyzing the Problem**: The AI first understands your natural language command.
2. **πŸ” Recommending Tools**: The system searches its internal "Tool Library" using vector similarity to recommend the most relevant tools.
3. **🧠 AI Decision-Making**: The AI Brain (Gemini Model) makes the final choice from the recommended tools and extracts the necessary parameters from your query.
4. **βš™οΈ Executing the Tool**: The system invokes the corresponding Python function (e.g., an API call or web scraper) to fetch external, real-time information.
5. **✍️ Generating the Answer**: The AI synthesizes the data returned by the tool with your original question to generate a final, natural-language response.
---
### ⚠️ Important Disclaimer
- **This is a technical demonstration, not a production-ready application.**
- **All data returned by the tools (e.g., stock prices, news) is for **demonstration purposes only** using simulated or non-real-time data. Do not use it for any real-world decisions.**
- **The primary focus of this project is to showcase the AI's "chain of thought" and "tool-using" capabilities.**
"""
)
chatbot = gr.Chatbot(
[],
elem_id="chatbot",
label="Chat Window",
height=650,
avatar_images=(None, "assets/web-app-manifest-192x192.png"),
)
with gr.Row():
text_input = gr.Textbox(
scale=4,
show_label=False,
placeholder="Ask a question, e.g., 'What is the stock price of Apple (AAPL)?' or 'What's the latest news on AI-driven drug discovery?'",
container=False,
)
submit_button = gr.Button("Send", variant="primary", scale=1, min_width=150)
gr.Examples(
examples=[
"What is the stock price of Apple (AAPL)?",
"What is the latest news about AI-driven drug discovery?",
"Hello, what can you do?",
"Write a quicksort algorithm in Python",
],
inputs=text_input,
label="Examples",
)
gr.Markdown(
"""
---
<div class="footer">
<p><strong>FeiMatrix Synapse v1.0</strong></p>
<p>This project was conceived and directed by <strong>FeiMatrix</strong>, who also led the debugging and deployment.</p>
<p>The system architecture, core program, and interface were co-developed with <strong>Gemini 2.5 Pro (AI)</strong>.</p>
</div>
""",
elem_classes="footer",
)
submit_event = text_input.submit(
fn=handle_user_message,
inputs=[text_input, chatbot],
outputs=[text_input, chatbot],
queue=False,
).then(fn=generate_bot_response, inputs=[chatbot], outputs=[chatbot])
submit_button.click(
fn=handle_user_message,
inputs=[text_input, chatbot],
outputs=[text_input, chatbot],
queue=False,
).then(fn=generate_bot_response, inputs=[chatbot], outputs=[chatbot])
# ------------------------------------------------------------------
# 5. Launch the Application
# ------------------------------------------------------------------
if __name__ == "__main__":
demo.queue()
demo.launch(debug=True, favicon_path="assets/favicon.ico")