Spaces:
Running
Running
File size: 6,391 Bytes
0961cf2 2ae7276 e69fa84 77e12bf 0961cf2 e69fa84 77e12bf 0961cf2 e69fa84 77e12bf 24292c0 0961cf2 e69fa84 24292c0 e69fa84 0961cf2 24292c0 0961cf2 e69fa84 24292c0 e69fa84 24292c0 e69fa84 24292c0 e69fa84 24292c0 e69fa84 24292c0 e69fa84 6f04113 24292c0 e69fa84 6f04113 e69fa84 6f04113 e69fa84 6f04113 94c0a01 e69fa84 24292c0 0961cf2 e69fa84 a247459 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 |
from dotenv import load_dotenv
import google.generativeai as genai
import json
import os
import requests
from pypdf import PdfReader
import gradio as gr
import re
load_dotenv(override=True)
genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
def push(text):
requests.post(
"https://api.pushover.net/1/messages.json",
data={
"token": os.getenv("PUSHOVER_TOKEN"),
"user": os.getenv("PUSHOVER_USER"),
"message": text,
}
)
def record_user_details(email, name="Name not provided", notes="not provided"):
push(f"Recording interest from {name} with email {email} and notes {notes}")
return {"recorded": "ok"}
def record_unknown_question(question):
push(f"Recording {question} asked that I couldn't answer")
return {"recorded": "ok"}
record_user_details_json = {
"name": "record_user_details",
"description": "Record that a user is interested in being in touch and provided an email address. Expects a JSON object: { 'email': string, 'name': string (optional), 'notes': string (optional) }"
}
record_unknown_question_json = {
"name": "record_unknown_question",
"description": "Record any question that couldn't be answered. Expects a JSON object: { 'question': string }"
}
tools = {
"function_declarations": [
{
"name": "record_user_details",
"description": "Record that a user is interested in being in touch and provided an email address.",
},
{
"name": "record_unknown_question",
"description": "Record any question that couldn't be answered as you didn't know the answer.",
}
]
}
class Me:
def __init__(self):
self.model = genai.GenerativeModel("gemini-2.0-flash")
self.name = "Aritra Roy"
reader = PdfReader("me/linkedin.pdf")
self.linkedin = ""
for page in reader.pages:
text = page.extract_text()
if text:
self.linkedin += text
with open("me/summary.txt", "r", encoding="utf-8") as f:
self.summary = f.read()
def handle_tool_call(self, tool_call):
tool_name = tool_call.name
arguments = tool_call.args
print(f"Tool called: {tool_name} with args {arguments}", flush=True)
if tool_name == "record_user_details":
result = record_user_details(**arguments)
elif tool_name == "record_unknown_question":
result = record_unknown_question(**arguments)
else:
result = {}
return {"role": "tool", "parts": [json.dumps(result)], "tool_call_id": tool_call.name + "_id"}
def system_prompt(self):
prompt = f"You are acting as {self.name}. You are answering questions on {self.name}'s website, At no point you can say you are an AI assistant. YOU ARE {self.name}, ANSWERING TO POTENTIAL CLIENTS OR HIRERS." \
f"particularly questions related to {self.name}'s career, background, skills and experience. " \
f"Be professional and engaging. If you don't know the answer to any question, under no circumstances you can make up an answer, use your " \
f"record_unknown_question tool. If the user shares their email or wants to connect, use " \
f"record_user_details to record it."
prompt += f"\n\n## Summary:\n{self.summary}\n\n## LinkedIn Profile:\n{self.linkedin}\n\n"
prompt += f"With this context, please chat with the user, always staying in character as {self.name}."
return prompt
def chat(self, message, history):
messages = [{"role": "user", "parts": [self.system_prompt()]}]
for msg in history:
messages.append({"role": "user", "parts": [msg["content"]]})
messages.append({"role": "user", "parts": [message]})
# Hardcoded fallback: check if user shares email/name
if re.search(r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b", message) or \
"my name" in message.lower():
print("User provided email or name, forcing tool fallback...")
record_user_details(email=message) # Assuming user input has email
push(f"User details recorded: {message}")
return "I've recorded your details for review."
done = False
while not done:
response = self.model.generate_content(
messages,
tools=tools,
)
candidate = response.candidates[0]
finish_reason = candidate.finish_reason
if finish_reason == "TOOL_USE":
tool_call = candidate.content.parts[0].function_call
result = self.handle_tool_call(tool_call)
messages.append({"role": "model", "parts": [candidate.content.parts[0]]})
messages.append(result)
else:
done = True
return candidate.content.parts[0].text
if __name__ == "__main__":
me = Me()
# 1. Define the custom theme to match your portfolio
theme = gr.themes.Default(
primary_hue="teal",
font=gr.themes.GoogleFont("Inter")
).set(
body_background_fill="#1a202c",
body_background_fill_dark="#1a202c",
button_primary_background_fill="#38b2ac",
button_primary_background_fill_dark="#38b2ac",
)
# 2. Define the CSS to fix the layout and remove the external scrollbar
custom_css = """
/* Force the main container to have no padding and fill the iframe */
.gradio-container {
padding: 0 !important;
height: 100vh !important;
}
/* Target the ChatInterface container and make it a flex column */
#component-0 {
height: 100% !important;
display: flex !important;
flex-direction: column !important;
}
/* Make the chatbot history grow to fill space and have its own internal scrollbar */
#component-0 .chatbot {
flex-grow: 1 !important;
overflow-y: auto !important;
}
"""
# 3. Use gr.Blocks to apply the theme and CSS
with gr.Blocks(theme=theme, css=custom_css) as demo:
# 4. Place the ChatInterface inside, ensuring the chatbot's height is dynamic
gr.ChatInterface(me.chat, chatbot=gr.Chatbot())
# 5. Launch the final demo
demo.launch() |