ebuka_roast / ebapi.py
EbukaGaus's picture
Update
90ea714
#!/usr/bin/env python3
import os
import yaml
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from openai import OpenAI
from tavily import TavilyClient
from dotenv import load_dotenv
load_dotenv()
# Initialize clients
tavily_client = TavilyClient(api_key=os.getenv("TAVILY_API_KEY"))
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
# FastAPI app
app = FastAPI()
# Configure CORS
origins = ["*"]
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# A regular class for Tavily Web Search
class TavilySearch:
def run(self, search_query: str) -> str:
"""
Searches the web based on a search query and returns the contents.
"""
try:
response = tavily_client.search(query=search_query, search_depth="advanced", max_results=3)
# Format the results into a single string
results = "\n\n".join([f"Title: {res['title']}\nURL: {res['url']}\nContent: {res['content']}" for res in response.get('results', [])])
return results
except Exception as e:
print(f"Error during Tavily search: {e}")
return "Could not retrieve web search results."
# Load prompt from YAML file
def load_prompt_from_yaml(file_path: str, prompt_name: str) -> str:
with open(file_path, 'r') as file:
prompts = yaml.safe_load(file)
return prompts[prompt_name]
prompt_text = load_prompt_from_yaml("prompts.yaml", "titi_prompt")
tavily_tool = TavilySearch()
# Data models for API
class MessageInput(BaseModel):
message: str
class MessageResponse(BaseModel):
reply: str
# Initialize conversation history in the OpenAI format
conversation_history = [{"role": "system", "content": prompt_text}]
@app.post("/chat", response_model=MessageResponse)
async def chat_with_titi(message_input: MessageInput):
global conversation_history
try:
# User input
user_input = message_input.message
# Use Tavily Search Tool to get relevant web data
web_data = tavily_tool.run(user_input)
# Combine web data and user input for the model
combined_input = f"Based on this web data: {web_data}\n\nRespond to this user message: {user_input}"
# Append the combined message to the conversation history
conversation_history.append({"role": "user", "content": combined_input})
# Generate the response using the OpenAI model
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=conversation_history,
temperature=0.7
)
model_reply = response.choices[0].message.content
# Append assistant's response to the conversation history
conversation_history.append({"role": "assistant", "content": model_reply})
# Return the model's response
return MessageResponse(reply=model_reply)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))