Spaces:
Running
Running
more troubleshooting...
Browse files- app/main.py +74 -112
app/main.py
CHANGED
@@ -3,15 +3,13 @@ from langserve import add_routes
|
|
3 |
from langchain_core.runnables import RunnableLambda
|
4 |
from pydantic import BaseModel
|
5 |
from typing import List, Literal, Optional, Dict, Any
|
6 |
-
import gradio as gr
|
7 |
-
from datetime import datetime
|
8 |
import logging
|
9 |
|
10 |
# Set up logging
|
11 |
logger = logging.getLogger(__name__)
|
12 |
|
13 |
# βββββββββββββββββββββββββββββββββββββββββ
|
14 |
-
#
|
15 |
# βββββββββββββββββββββββββββββββββββββββββ
|
16 |
|
17 |
class ChatMessage(BaseModel):
|
@@ -22,7 +20,7 @@ class ChatUIInput(BaseModel):
|
|
22 |
messages: List[ChatMessage]
|
23 |
|
24 |
class ChatFedInput(BaseModel):
|
25 |
-
query: str
|
26 |
reports_filter: Optional[str] = ""
|
27 |
sources_filter: Optional[str] = ""
|
28 |
subtype_filter: Optional[str] = ""
|
@@ -30,12 +28,50 @@ class ChatFedInput(BaseModel):
|
|
30 |
session_id: Optional[str] = None
|
31 |
user_id: Optional[str] = None
|
32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
class ChatFedOutput(BaseModel):
|
34 |
result: str
|
35 |
metadata: Dict[str, Any]
|
36 |
|
37 |
# βββββββββββββββββββββββββββββββββββββββββ
|
38 |
-
#
|
39 |
# βββββββββββββββββββββββββββββββββββββββββ
|
40 |
|
41 |
def process_chatfed_query_core(query: str) -> ChatFedOutput:
|
@@ -47,139 +83,65 @@ def process_chatfed_query_core(query: str) -> ChatFedOutput:
|
|
47 |
}
|
48 |
)
|
49 |
|
50 |
-
def
|
51 |
-
"""Handler for
|
52 |
try:
|
53 |
-
|
54 |
-
if isinstance(data, dict):
|
55 |
-
messages = data.get("messages", [])
|
56 |
-
|
57 |
-
# NEW: Handle case where ChatUI sends 'text' instead of 'messages'
|
58 |
-
if not messages and "text" in data:
|
59 |
-
# Convert text to messages format
|
60 |
-
messages = [{"role": "user", "content": data["text"]}]
|
61 |
-
|
62 |
-
else:
|
63 |
-
messages = data.messages if hasattr(data, 'messages') else []
|
64 |
-
|
65 |
-
# Find the last user message
|
66 |
-
last_user_msg = ""
|
67 |
-
for msg in reversed(messages):
|
68 |
-
if isinstance(msg, dict):
|
69 |
-
if msg.get("role") == "user":
|
70 |
-
last_user_msg = msg.get("content", "")
|
71 |
-
break
|
72 |
-
else:
|
73 |
-
if hasattr(msg, 'role') and msg.role == "user":
|
74 |
-
last_user_msg = msg.content
|
75 |
-
break
|
76 |
|
77 |
-
result = process_chatfed_query_core(query=last_user_msg)
|
78 |
-
result.metadata["source"] = "chatui"
|
79 |
-
return result
|
80 |
-
|
81 |
-
except Exception as e:
|
82 |
-
return ChatFedOutput(
|
83 |
-
result=f"Error processing chat UI request: {str(e)}",
|
84 |
-
metadata={"error": True, "source": "chatui"}
|
85 |
-
)
|
86 |
-
|
87 |
-
def langserve_handler(data) -> ChatFedOutput:
|
88 |
-
"""Handler for LangServe input - accepts raw dict and returns ChatFedOutput"""
|
89 |
-
try:
|
90 |
-
# Handle both dict and Pydantic model input
|
91 |
-
if isinstance(data, dict):
|
92 |
-
# Try multiple possible field names that ChatUI might send
|
93 |
-
query = (data.get("query") or
|
94 |
-
data.get("text") or
|
95 |
-
data.get("input") or
|
96 |
-
data.get("prompt") or
|
97 |
-
data.get("content") or "")
|
98 |
-
|
99 |
-
# Also check if it's in messages format
|
100 |
-
if not query and "messages" in data:
|
101 |
-
messages = data["messages"]
|
102 |
-
for msg in reversed(messages):
|
103 |
-
if isinstance(msg, dict) and msg.get("role") == "user":
|
104 |
-
query = msg.get("content", "")
|
105 |
-
break
|
106 |
-
else:
|
107 |
-
# Handle Pydantic model
|
108 |
-
query = data.query if hasattr(data, 'query') else ""
|
109 |
-
|
110 |
if not query:
|
111 |
-
logger.warning(f"No query found in langserve input: {data}")
|
112 |
return ChatFedOutput(
|
113 |
-
result="No query
|
114 |
-
metadata={"error": True, "source": "
|
115 |
)
|
116 |
|
117 |
result = process_chatfed_query_core(query)
|
118 |
-
result.metadata
|
119 |
-
|
|
|
|
|
|
|
120 |
return result
|
121 |
|
122 |
except Exception as e:
|
123 |
-
logger.error(f"Error in
|
124 |
return ChatFedOutput(
|
125 |
-
result=f"Error
|
126 |
-
metadata={"error": True, "source": "
|
127 |
)
|
128 |
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
with gr.Column(scale=1):
|
141 |
-
btn = gr.Button("Send", variant="primary")
|
142 |
-
|
143 |
-
out = gr.Textbox(label="Response", lines=10)
|
144 |
-
|
145 |
-
def process_query(q: str) -> str:
|
146 |
-
if not q.strip():
|
147 |
-
return "Please enter a query."
|
148 |
-
try:
|
149 |
-
result = process_chatfed_query_core(q)
|
150 |
-
return result.result
|
151 |
-
except Exception as e:
|
152 |
-
return f"Error processing query: {str(e)}"
|
153 |
-
|
154 |
-
btn.click(fn=process_query, inputs=query, outputs=out)
|
155 |
-
query.submit(fn=process_query, inputs=query, outputs=out) # Allow Enter key
|
156 |
-
|
157 |
-
return demo
|
158 |
|
159 |
# βββββββββββββββββββββββββββββββββββββββββ
|
160 |
-
# App
|
161 |
# βββββββββββββββββββββββββββββββββββββββββ
|
162 |
|
163 |
-
app = FastAPI(
|
164 |
-
title="ChatFed API",
|
165 |
-
description="API for ChatFed query processing",
|
166 |
-
version="1.0.0"
|
167 |
-
)
|
168 |
|
169 |
-
#
|
170 |
add_routes(
|
171 |
app,
|
172 |
-
RunnableLambda(
|
173 |
-
path="/chatfed-
|
174 |
-
input_type=
|
175 |
output_type=ChatFedOutput
|
176 |
)
|
177 |
|
|
|
178 |
add_routes(
|
179 |
app,
|
180 |
-
RunnableLambda(
|
181 |
path="/chatfed",
|
182 |
-
input_type=
|
183 |
output_type=ChatFedOutput
|
184 |
)
|
185 |
|
|
|
3 |
from langchain_core.runnables import RunnableLambda
|
4 |
from pydantic import BaseModel
|
5 |
from typing import List, Literal, Optional, Dict, Any
|
|
|
|
|
6 |
import logging
|
7 |
|
8 |
# Set up logging
|
9 |
logger = logging.getLogger(__name__)
|
10 |
|
11 |
# βββββββββββββββββββββββββββββββββββββββββ
|
12 |
+
# Keep existing schemas for backwards compatibility
|
13 |
# βββββββββββββββββββββββββββββββββββββββββ
|
14 |
|
15 |
class ChatMessage(BaseModel):
|
|
|
20 |
messages: List[ChatMessage]
|
21 |
|
22 |
class ChatFedInput(BaseModel):
|
23 |
+
query: str # Keep original strict validation
|
24 |
reports_filter: Optional[str] = ""
|
25 |
sources_filter: Optional[str] = ""
|
26 |
subtype_filter: Optional[str] = ""
|
|
|
28 |
session_id: Optional[str] = None
|
29 |
user_id: Optional[str] = None
|
30 |
|
31 |
+
# βββββββββββββββββββββββββββββββββββββββββ
|
32 |
+
# Add new flexible schema for ChatUI compatibility
|
33 |
+
# βββββββββββββββββββββββββββββββββββββββββ
|
34 |
+
|
35 |
+
class FlexibleChatInput(BaseModel):
|
36 |
+
"""Flexible input that accepts multiple field names"""
|
37 |
+
query: Optional[str] = None
|
38 |
+
text: Optional[str] = None
|
39 |
+
input: Optional[str] = None
|
40 |
+
prompt: Optional[str] = None
|
41 |
+
content: Optional[str] = None
|
42 |
+
messages: Optional[List[Dict[str, Any]]] = None
|
43 |
+
|
44 |
+
# Keep RAG filters
|
45 |
+
reports_filter: Optional[str] = ""
|
46 |
+
sources_filter: Optional[str] = ""
|
47 |
+
subtype_filter: Optional[str] = ""
|
48 |
+
year_filter: Optional[str] = ""
|
49 |
+
session_id: Optional[str] = None
|
50 |
+
user_id: Optional[str] = None
|
51 |
+
|
52 |
+
def extract_query(self) -> str:
|
53 |
+
"""Extract query from any available field"""
|
54 |
+
# Try direct fields
|
55 |
+
for field in [self.query, self.text, self.input, self.prompt, self.content]:
|
56 |
+
if field and field.strip():
|
57 |
+
return field.strip()
|
58 |
+
|
59 |
+
# Try messages
|
60 |
+
if self.messages:
|
61 |
+
for msg in reversed(self.messages):
|
62 |
+
if isinstance(msg, dict) and msg.get('role') == 'user':
|
63 |
+
content = msg.get('content') or msg.get('text')
|
64 |
+
if content:
|
65 |
+
return str(content).strip()
|
66 |
+
|
67 |
+
return ""
|
68 |
+
|
69 |
class ChatFedOutput(BaseModel):
|
70 |
result: str
|
71 |
metadata: Dict[str, Any]
|
72 |
|
73 |
# βββββββββββββββββββββββββββββββββββββββββ
|
74 |
+
# Updated handlers
|
75 |
# βββββββββββββββββββββββββββββββββββββββββ
|
76 |
|
77 |
def process_chatfed_query_core(query: str) -> ChatFedOutput:
|
|
|
83 |
}
|
84 |
)
|
85 |
|
86 |
+
def flexible_handler(data: FlexibleChatInput) -> ChatFedOutput:
|
87 |
+
"""Handler for flexible input that accepts multiple formats"""
|
88 |
try:
|
89 |
+
query = data.extract_query()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
90 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
91 |
if not query:
|
|
|
92 |
return ChatFedOutput(
|
93 |
+
result="No query found. Please provide text in one of these fields: query, text, prompt, content, or messages array.",
|
94 |
+
metadata={"error": True, "source": "flexible", "provided_fields": list(data.__dict__.keys())}
|
95 |
)
|
96 |
|
97 |
result = process_chatfed_query_core(query)
|
98 |
+
result.metadata.update({
|
99 |
+
"source": "flexible",
|
100 |
+
"extracted_query": query,
|
101 |
+
"input_method": "flexible_handler"
|
102 |
+
})
|
103 |
return result
|
104 |
|
105 |
except Exception as e:
|
106 |
+
logger.error(f"Error in flexible_handler: {str(e)}")
|
107 |
return ChatFedOutput(
|
108 |
+
result=f"Error: {str(e)}",
|
109 |
+
metadata={"error": True, "source": "flexible"}
|
110 |
)
|
111 |
|
112 |
+
def legacy_langserve_handler(data: ChatFedInput) -> ChatFedOutput:
|
113 |
+
"""Original handler for backwards compatibility"""
|
114 |
+
try:
|
115 |
+
result = process_chatfed_query_core(data.query)
|
116 |
+
result.metadata["source"] = "legacy_langserve"
|
117 |
+
return result
|
118 |
+
except Exception as e:
|
119 |
+
return ChatFedOutput(
|
120 |
+
result=f"Error: {str(e)}",
|
121 |
+
metadata={"error": True, "source": "legacy_langserve"}
|
122 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
123 |
|
124 |
# βββββββββββββββββββββββββββββββββββββββββ
|
125 |
+
# App setup with multiple endpoints
|
126 |
# βββββββββββββββββββββββββββββββββββββββββ
|
127 |
|
128 |
+
app = FastAPI(title="ChatFed API", version="1.0.0")
|
|
|
|
|
|
|
|
|
129 |
|
130 |
+
# Legacy endpoint (strict validation)
|
131 |
add_routes(
|
132 |
app,
|
133 |
+
RunnableLambda(legacy_langserve_handler),
|
134 |
+
path="/chatfed-strict",
|
135 |
+
input_type=ChatFedInput,
|
136 |
output_type=ChatFedOutput
|
137 |
)
|
138 |
|
139 |
+
# New flexible endpoint for ChatUI (THIS IS WHAT YOU SHOULD USE)
|
140 |
add_routes(
|
141 |
app,
|
142 |
+
RunnableLambda(flexible_handler),
|
143 |
path="/chatfed",
|
144 |
+
input_type=FlexibleChatInput,
|
145 |
output_type=ChatFedOutput
|
146 |
)
|
147 |
|