career_chatbot / app.py
Aroyzz-o6's picture
Update app.py
a247459 verified
from dotenv import load_dotenv
import google.generativeai as genai
import json
import os
import requests
from pypdf import PdfReader
import gradio as gr
import re
load_dotenv(override=True)
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
def push(text):
requests.post(
"https://api.pushover.net/1/messages.json",
data={
"token": os.getenv("PUSHOVER_TOKEN"),
"user": os.getenv("PUSHOVER_USER"),
"message": text,
}
)
def record_user_details(email, name="Name not provided", notes="not provided"):
push(f"Recording interest from {name} with email {email} and notes {notes}")
return {"recorded": "ok"}
def record_unknown_question(question):
push(f"Recording {question} asked that I couldn't answer")
return {"recorded": "ok"}
record_user_details_json = {
"name": "record_user_details",
"description": "Record that a user is interested in being in touch and provided an email address. Expects a JSON object: { 'email': string, 'name': string (optional), 'notes': string (optional) }"
}
record_unknown_question_json = {
"name": "record_unknown_question",
"description": "Record any question that couldn't be answered. Expects a JSON object: { 'question': string }"
}
tools = {
"function_declarations": [
{
"name": "record_user_details",
"description": "Record that a user is interested in being in touch and provided an email address.",
},
{
"name": "record_unknown_question",
"description": "Record any question that couldn't be answered as you didn't know the answer.",
}
]
}
class Me:
def __init__(self):
self.model = genai.GenerativeModel("gemini-2.0-flash")
self.name = "Aritra Roy"
reader = PdfReader("me/linkedin.pdf")
self.linkedin = ""
for page in reader.pages:
text = page.extract_text()
if text:
self.linkedin += text
with open("me/summary.txt", "r", encoding="utf-8") as f:
self.summary = f.read()
def handle_tool_call(self, tool_call):
tool_name = tool_call.name
arguments = tool_call.args
print(f"Tool called: {tool_name} with args {arguments}", flush=True)
if tool_name == "record_user_details":
result = record_user_details(**arguments)
elif tool_name == "record_unknown_question":
result = record_unknown_question(**arguments)
else:
result = {}
return {"role": "tool", "parts": [json.dumps(result)], "tool_call_id": tool_call.name + "_id"}
def system_prompt(self):
prompt = f"You are acting as {self.name}. You are answering questions on {self.name}'s website, At no point you can say you are an AI assistant. YOU ARE {self.name}, ANSWERING TO POTENTIAL CLIENTS OR HIRERS." \
f"particularly questions related to {self.name}'s career, background, skills and experience. " \
f"Be professional and engaging. If you don't know the answer to any question, under no circumstances you can make up an answer, use your " \
f"record_unknown_question tool. If the user shares their email or wants to connect, use " \
f"record_user_details to record it."
prompt += f"\n\n## Summary:\n{self.summary}\n\n## LinkedIn Profile:\n{self.linkedin}\n\n"
prompt += f"With this context, please chat with the user, always staying in character as {self.name}."
return prompt
def chat(self, message, history):
messages = [{"role": "user", "parts": [self.system_prompt()]}]
for msg in history:
messages.append({"role": "user", "parts": [msg["content"]]})
messages.append({"role": "user", "parts": [message]})
# Hardcoded fallback: check if user shares email/name
if re.search(r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b", message) or \
"my name" in message.lower():
print("User provided email or name, forcing tool fallback...")
record_user_details(email=message) # Assuming user input has email
push(f"User details recorded: {message}")
return "I've recorded your details for review."
done = False
while not done:
response = self.model.generate_content(
messages,
tools=tools,
)
candidate = response.candidates[0]
finish_reason = candidate.finish_reason
if finish_reason == "TOOL_USE":
tool_call = candidate.content.parts[0].function_call
result = self.handle_tool_call(tool_call)
messages.append({"role": "model", "parts": [candidate.content.parts[0]]})
messages.append(result)
else:
done = True
return candidate.content.parts[0].text
if __name__ == "__main__":
me = Me()
# 1. Define the custom theme to match your portfolio
theme = gr.themes.Default(
primary_hue="teal",
font=gr.themes.GoogleFont("Inter")
).set(
body_background_fill="#1a202c",
body_background_fill_dark="#1a202c",
button_primary_background_fill="#38b2ac",
button_primary_background_fill_dark="#38b2ac",
)
# 2. Define the CSS to fix the layout and remove the external scrollbar
custom_css = """
/* Force the main container to have no padding and fill the iframe */
.gradio-container {
padding: 0 !important;
height: 100vh !important;
}
/* Target the ChatInterface container and make it a flex column */
#component-0 {
height: 100% !important;
display: flex !important;
flex-direction: column !important;
}
/* Make the chatbot history grow to fill space and have its own internal scrollbar */
#component-0 .chatbot {
flex-grow: 1 !important;
overflow-y: auto !important;
}
"""
# 3. Use gr.Blocks to apply the theme and CSS
with gr.Blocks(theme=theme, css=custom_css) as demo:
# 4. Place the ChatInterface inside, ensuring the chatbot's height is dynamic
gr.ChatInterface(me.chat, chatbot=gr.Chatbot())
# 5. Launch the final demo
demo.launch()