mtyrrell commited on
Commit
e583f25
Β·
1 Parent(s): 2ade6fb

more troubleshooting

Browse files
Files changed (1) hide show
  1. app/main.py +289 -40
app/main.py CHANGED
@@ -1,15 +1,21 @@
 
 
 
1
  from fastapi import FastAPI, Request
2
  from langserve import add_routes
3
  from langchain_core.runnables import RunnableLambda
4
  from pydantic import BaseModel
5
  from typing import List, Literal, Optional, Dict, Any
 
 
6
  import logging
7
 
8
  # Set up logging
 
9
  logger = logging.getLogger(__name__)
10
 
11
  # ─────────────────────────────────────────
12
- # Keep existing schemas for backwards compatibility
13
  # ─────────────────────────────────────────
14
 
15
  class ChatMessage(BaseModel):
@@ -19,8 +25,9 @@ class ChatMessage(BaseModel):
19
  class ChatUIInput(BaseModel):
20
  messages: List[ChatMessage]
21
 
 
22
  class ChatFedInput(BaseModel):
23
- query: str # Keep original strict validation
24
  reports_filter: Optional[str] = ""
25
  sources_filter: Optional[str] = ""
26
  subtype_filter: Optional[str] = ""
@@ -28,20 +35,20 @@ class ChatFedInput(BaseModel):
28
  session_id: Optional[str] = None
29
  user_id: Optional[str] = None
30
 
31
- # ─────────────────────────────────────────
32
- # Add new flexible schema for ChatUI compatibility
33
- # ─────────────────────────────────────────
34
-
35
  class FlexibleChatInput(BaseModel):
36
- """Flexible input that accepts multiple field names"""
 
37
  query: Optional[str] = None
38
  text: Optional[str] = None
39
  input: Optional[str] = None
40
  prompt: Optional[str] = None
41
  content: Optional[str] = None
 
 
42
  messages: Optional[List[Dict[str, Any]]] = None
43
 
44
- # Keep RAG filters
45
  reports_filter: Optional[str] = ""
46
  sources_filter: Optional[str] = ""
47
  subtype_filter: Optional[str] = ""
@@ -50,13 +57,13 @@ class FlexibleChatInput(BaseModel):
50
  user_id: Optional[str] = None
51
 
52
  def extract_query(self) -> str:
53
- """Extract query from any available field"""
54
- # Try direct fields
55
  for field in [self.query, self.text, self.input, self.prompt, self.content]:
56
  if field and field.strip():
57
  return field.strip()
58
 
59
- # Try messages
60
  if self.messages:
61
  for msg in reversed(self.messages):
62
  if isinstance(msg, dict) and msg.get('role') == 'user':
@@ -71,7 +78,7 @@ class ChatFedOutput(BaseModel):
71
  metadata: Dict[str, Any]
72
 
73
  # ─────────────────────────────────────────
74
- # Updated handlers
75
  # ─────────────────────────────────────────
76
 
77
  def process_chatfed_query_core(query: str) -> ChatFedOutput:
@@ -83,60 +90,226 @@ def process_chatfed_query_core(query: str) -> ChatFedOutput:
83
  }
84
  )
85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  def flexible_handler(data: FlexibleChatInput) -> ChatFedOutput:
87
- """Handler for flexible input that accepts multiple formats"""
88
  try:
 
 
89
  query = data.extract_query()
90
 
91
  if not query:
92
  return ChatFedOutput(
93
  result="No query found. Please provide text in one of these fields: query, text, prompt, content, or messages array.",
94
- metadata={"error": True, "source": "flexible", "provided_fields": list(data.__dict__.keys())}
 
 
 
 
 
95
  )
96
 
97
  result = process_chatfed_query_core(query)
98
  result.metadata.update({
99
  "source": "flexible",
100
  "extracted_query": query,
101
- "input_method": "flexible_handler"
 
102
  })
103
  return result
104
 
105
  except Exception as e:
106
  logger.error(f"Error in flexible_handler: {str(e)}")
107
  return ChatFedOutput(
108
- result=f"Error: {str(e)}",
109
- metadata={"error": True, "source": "flexible"}
110
  )
111
 
112
  def legacy_langserve_handler(data: ChatFedInput) -> ChatFedOutput:
113
- """Original handler for backwards compatibility"""
114
  try:
 
 
115
  result = process_chatfed_query_core(data.query)
116
- result.metadata["source"] = "legacy_langserve"
 
 
 
117
  return result
 
118
  except Exception as e:
 
119
  return ChatFedOutput(
120
- result=f"Error: {str(e)}",
121
- metadata={"error": True, "source": "legacy_langserve"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
122
  )
123
 
124
  # ─────────────────────────────────────────
125
- # App setup with multiple endpoints
126
  # ─────────────────────────────────────────
127
 
128
- app = FastAPI(title="ChatFed API", version="1.0.0")
 
 
 
 
 
 
 
 
 
 
129
 
130
- # Legacy endpoint (strict validation)
131
- add_routes(
132
- app,
133
- RunnableLambda(legacy_langserve_handler),
134
- path="/chatfed-strict",
135
- input_type=ChatFedInput,
136
- output_type=ChatFedOutput
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137
  )
138
 
139
- # New flexible endpoint for ChatUI (THIS IS WHAT YOU SHOULD USE)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
140
  add_routes(
141
  app,
142
  RunnableLambda(flexible_handler),
@@ -145,27 +318,59 @@ add_routes(
145
  output_type=ChatFedOutput
146
  )
147
 
148
- # Add health check endpoint
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
  @app.get("/health")
150
  async def health_check():
151
  return {"status": "healthy", "timestamp": datetime.now().isoformat()}
152
 
153
- # Add a simple root endpoint
154
  @app.get("/")
155
  async def root():
156
  return {
157
  "message": "ChatFed API is running",
 
158
  "endpoints": {
159
- "chatfed": "/chatfed",
160
- "chatfed_chatui": "/chatfed-chatui",
 
 
 
161
  "gradio_ui": "/ui",
162
  "docs": "/docs",
163
- "health": "/health"
 
 
 
 
 
 
 
164
  }
165
  }
166
 
167
- # Add this to your FastAPI app after the existing routes
168
-
169
  @app.post("/v1/chat/completions")
170
  async def openai_compatible_endpoint(request: Request):
171
  """OpenAI-compatible endpoint that ChatUI might prefer"""
@@ -204,7 +409,7 @@ async def openai_compatible_endpoint(request: Request):
204
  logger.error(f"Error in OpenAI endpoint: {str(e)}")
205
  return {"error": str(e)}
206
 
207
- # Also add a simple text endpoint
208
  @app.post("/simple-chat")
209
  async def simple_chat_endpoint(request: Request):
210
  """Simple endpoint that accepts any text input"""
@@ -230,7 +435,51 @@ async def simple_chat_endpoint(request: Request):
230
  logger.error(f"Error in simple chat: {str(e)}")
231
  return {"error": str(e)}
232
 
233
- # Mount Gradio at a specific path instead of root to avoid conflicts
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
234
  demo = create_gradio_interface()
235
  app.mount("/ui", demo)
236
 
 
1
+ """
2
+ Complete ChatFed Orchestrator with flexible input handling for ChatUI compatibility
3
+ """
4
  from fastapi import FastAPI, Request
5
  from langserve import add_routes
6
  from langchain_core.runnables import RunnableLambda
7
  from pydantic import BaseModel
8
  from typing import List, Literal, Optional, Dict, Any
9
+ import gradio as gr
10
+ from datetime import datetime
11
  import logging
12
 
13
  # Set up logging
14
+ logging.basicConfig(level=logging.INFO)
15
  logger = logging.getLogger(__name__)
16
 
17
  # ─────────────────────────────────────────
18
+ # Schemas - Both Legacy and Flexible
19
  # ─────────────────────────────────────────
20
 
21
  class ChatMessage(BaseModel):
 
25
  class ChatUIInput(BaseModel):
26
  messages: List[ChatMessage]
27
 
28
+ # Legacy schema for backwards compatibility
29
  class ChatFedInput(BaseModel):
30
+ query: str
31
  reports_filter: Optional[str] = ""
32
  sources_filter: Optional[str] = ""
33
  subtype_filter: Optional[str] = ""
 
35
  session_id: Optional[str] = None
36
  user_id: Optional[str] = None
37
 
38
+ # New flexible schema for ChatUI compatibility
 
 
 
39
  class FlexibleChatInput(BaseModel):
40
+ """Flexible input that accepts multiple field names for the query"""
41
+ # Accept any of these fields for the main query
42
  query: Optional[str] = None
43
  text: Optional[str] = None
44
  input: Optional[str] = None
45
  prompt: Optional[str] = None
46
  content: Optional[str] = None
47
+
48
+ # ChatUI messages format
49
  messages: Optional[List[Dict[str, Any]]] = None
50
 
51
+ # RAG filters (optional)
52
  reports_filter: Optional[str] = ""
53
  sources_filter: Optional[str] = ""
54
  subtype_filter: Optional[str] = ""
 
57
  user_id: Optional[str] = None
58
 
59
  def extract_query(self) -> str:
60
+ """Extract query text from any of the available fields"""
61
+ # Try direct text fields first
62
  for field in [self.query, self.text, self.input, self.prompt, self.content]:
63
  if field and field.strip():
64
  return field.strip()
65
 
66
+ # Try messages format
67
  if self.messages:
68
  for msg in reversed(self.messages):
69
  if isinstance(msg, dict) and msg.get('role') == 'user':
 
78
  metadata: Dict[str, Any]
79
 
80
  # ─────────────────────────────────────────
81
+ # Core Logic
82
  # ─────────────────────────────────────────
83
 
84
  def process_chatfed_query_core(query: str) -> ChatFedOutput:
 
90
  }
91
  )
92
 
93
+ def extract_user_query_fallback(data: Any) -> str:
94
+ """Fallback function to extract user query from any input format"""
95
+ try:
96
+ if not data:
97
+ return ""
98
+
99
+ # Handle Pydantic models
100
+ if hasattr(data, '__dict__'):
101
+ data_dict = data.__dict__
102
+ elif isinstance(data, dict):
103
+ data_dict = data
104
+ else:
105
+ return str(data)
106
+
107
+ # Method 1: Direct query fields
108
+ for field in ["query", "text", "input", "prompt", "content", "question"]:
109
+ if field in data_dict and data_dict[field]:
110
+ return str(data_dict[field])
111
+
112
+ # Method 2: Messages format
113
+ if "messages" in data_dict:
114
+ messages = data_dict["messages"]
115
+ if isinstance(messages, list):
116
+ for msg in reversed(messages):
117
+ if isinstance(msg, dict) and msg.get("role") == "user":
118
+ content = msg.get("content") or msg.get("text")
119
+ if content:
120
+ return str(content)
121
+
122
+ # Method 3: If the whole thing looks like a message
123
+ if isinstance(data_dict, dict) and "role" in data_dict and data_dict.get("role") == "user":
124
+ return str(data_dict.get("content", data_dict.get("text", "")))
125
+
126
+ return ""
127
+
128
+ except Exception as e:
129
+ logger.error(f"Error extracting query from {data}: {e}")
130
+ return ""
131
+
132
+ # ─────────────────────────────────────────
133
+ # Handlers - Multiple for Different Use Cases
134
+ # ─────────────────────────────────────────
135
+
136
  def flexible_handler(data: FlexibleChatInput) -> ChatFedOutput:
137
+ """Primary handler for flexible input that accepts multiple formats"""
138
  try:
139
+ logger.info(f"flexible_handler received: {data}")
140
+
141
  query = data.extract_query()
142
 
143
  if not query:
144
  return ChatFedOutput(
145
  result="No query found. Please provide text in one of these fields: query, text, prompt, content, or messages array.",
146
+ metadata={
147
+ "error": True,
148
+ "source": "flexible",
149
+ "provided_fields": [k for k, v in data.__dict__.items() if v],
150
+ "help": "Supported formats: {'query': 'text'}, {'text': 'text'}, {'messages': [{'role': 'user', 'content': 'text'}]}"
151
+ }
152
  )
153
 
154
  result = process_chatfed_query_core(query)
155
  result.metadata.update({
156
  "source": "flexible",
157
  "extracted_query": query,
158
+ "input_method": "flexible_handler",
159
+ "extracted_from": next((k for k, v in data.__dict__.items() if v and k in ['query', 'text', 'input', 'prompt', 'content']), "messages")
160
  })
161
  return result
162
 
163
  except Exception as e:
164
  logger.error(f"Error in flexible_handler: {str(e)}")
165
  return ChatFedOutput(
166
+ result=f"Error processing request: {str(e)}",
167
+ metadata={"error": True, "source": "flexible", "exception": str(e)}
168
  )
169
 
170
  def legacy_langserve_handler(data: ChatFedInput) -> ChatFedOutput:
171
+ """Legacy handler for backwards compatibility - strict validation"""
172
  try:
173
+ logger.info(f"legacy_langserve_handler received: {data}")
174
+
175
  result = process_chatfed_query_core(data.query)
176
+ result.metadata.update({
177
+ "source": "legacy_langserve",
178
+ "input_method": "legacy_handler"
179
+ })
180
  return result
181
+
182
  except Exception as e:
183
+ logger.error(f"Error in legacy_langserve_handler: {str(e)}")
184
  return ChatFedOutput(
185
+ result=f"Error processing request: {str(e)}",
186
+ metadata={"error": True, "source": "legacy_langserve", "exception": str(e)}
187
+ )
188
+
189
+ def chatui_handler(data) -> ChatFedOutput:
190
+ """Handler for chat UI input - accepts raw dict and returns ChatFedOutput"""
191
+ try:
192
+ logger.info(f"chatui_handler received: {data}")
193
+
194
+ # Handle both dict and Pydantic model input
195
+ if isinstance(data, dict):
196
+ messages = data.get("messages", [])
197
+
198
+ # Handle case where ChatUI sends 'text' instead of 'messages'
199
+ if not messages and "text" in data:
200
+ # Convert text to messages format
201
+ messages = [{"role": "user", "content": data["text"]}]
202
+
203
+ else:
204
+ messages = data.messages if hasattr(data, 'messages') else []
205
+
206
+ # Find the last user message
207
+ last_user_msg = ""
208
+ for msg in reversed(messages):
209
+ if isinstance(msg, dict):
210
+ if msg.get("role") == "user":
211
+ last_user_msg = msg.get("content", "")
212
+ break
213
+ else:
214
+ if hasattr(msg, 'role') and msg.role == "user":
215
+ last_user_msg = msg.content
216
+ break
217
+
218
+ # Fallback to extract from any format
219
+ if not last_user_msg:
220
+ last_user_msg = extract_user_query_fallback(data)
221
+
222
+ if not last_user_msg.strip():
223
+ return ChatFedOutput(
224
+ result="No user message found in chat UI input",
225
+ metadata={"error": True, "source": "chatui", "raw_input": str(data)}
226
+ )
227
+
228
+ result = process_chatfed_query_core(last_user_msg.strip())
229
+ result.metadata.update({
230
+ "source": "chatui",
231
+ "extracted_query": last_user_msg.strip(),
232
+ "input_method": "chatui_handler"
233
+ })
234
+ return result
235
+
236
+ except Exception as e:
237
+ logger.error(f"Error in chatui_handler: {str(e)}")
238
+ return ChatFedOutput(
239
+ result=f"Error processing chat UI request: {str(e)}",
240
+ metadata={"error": True, "source": "chatui", "exception": str(e)}
241
  )
242
 
243
  # ─────────────────────────────────────────
244
+ # Gradio Interface
245
  # ─────────────────────────────────────────
246
 
247
+ def create_gradio_interface():
248
+ with gr.Blocks(title="ChatFed UI") as demo:
249
+ gr.Markdown("## ChatFed Dev UI")
250
+
251
+ with gr.Row():
252
+ with gr.Column(scale=4):
253
+ query = gr.Textbox(label="Query", placeholder="Enter your query here...")
254
+ with gr.Column(scale=1):
255
+ btn = gr.Button("Send", variant="primary")
256
+
257
+ out = gr.Textbox(label="Response", lines=10)
258
 
259
+ def process_query(q: str) -> str:
260
+ if not q.strip():
261
+ return "Please enter a query."
262
+ try:
263
+ result = process_chatfed_query_core(q)
264
+ return result.result
265
+ except Exception as e:
266
+ return f"Error processing query: {str(e)}"
267
+
268
+ btn.click(fn=process_query, inputs=query, outputs=out)
269
+ query.submit(fn=process_query, inputs=query, outputs=out) # Allow Enter key
270
+
271
+ return demo
272
+
273
+ # ─────────────────────────────────────────
274
+ # App Startup
275
+ # ─────────────────────────────────────────
276
+
277
+ app = FastAPI(
278
+ title="ChatFed API",
279
+ description="API for ChatFed query processing with flexible input handling",
280
+ version="1.0.0"
281
  )
282
 
283
+ # Add request logging middleware for debugging
284
+ @app.middleware("http")
285
+ async def log_requests(request: Request, call_next):
286
+ """Log incoming requests for debugging"""
287
+ if request.url.path.startswith("/chatfed") or request.url.path.startswith("/debug"):
288
+ try:
289
+ body = await request.body()
290
+ logger.info(f"=== REQUEST DEBUG ===")
291
+ logger.info(f"Path: {request.url.path}")
292
+ logger.info(f"Method: {request.method}")
293
+ logger.info(f"Headers: {dict(request.headers)}")
294
+ logger.info(f"Body: {body.decode('utf-8') if body else 'Empty'}")
295
+
296
+ # Recreate request for next handler
297
+ async def receive():
298
+ return {"type": "http.request", "body": body}
299
+
300
+ request._receive = receive
301
+
302
+ except Exception as e:
303
+ logger.error(f"Error logging request: {e}")
304
+
305
+ response = await call_next(request)
306
+ return response
307
+
308
+ # ─────────────────────────────────────────
309
+ # LangServe Routes
310
+ # ─────────────────────────────────────────
311
+
312
+ # Primary flexible endpoint for ChatUI (THIS IS THE MAIN ONE)
313
  add_routes(
314
  app,
315
  RunnableLambda(flexible_handler),
 
318
  output_type=ChatFedOutput
319
  )
320
 
321
+ # ChatUI specific endpoint with flexible handling
322
+ add_routes(
323
+ app,
324
+ RunnableLambda(chatui_handler),
325
+ path="/chatfed-chatui",
326
+ input_type=ChatUIInput,
327
+ output_type=ChatFedOutput
328
+ )
329
+
330
+ # Legacy endpoint for backwards compatibility (strict validation)
331
+ add_routes(
332
+ app,
333
+ RunnableLambda(legacy_langserve_handler),
334
+ path="/chatfed-strict",
335
+ input_type=ChatFedInput,
336
+ output_type=ChatFedOutput
337
+ )
338
+
339
+ # ─────────────────────────────────────────
340
+ # Additional Endpoints
341
+ # ─────────────────────────────────────────
342
+
343
+ # Health check endpoint
344
  @app.get("/health")
345
  async def health_check():
346
  return {"status": "healthy", "timestamp": datetime.now().isoformat()}
347
 
348
+ # Root endpoint
349
  @app.get("/")
350
  async def root():
351
  return {
352
  "message": "ChatFed API is running",
353
+ "status": "healthy",
354
  "endpoints": {
355
+ "primary": "/chatfed (flexible input - use this for ChatUI)",
356
+ "chatui": "/chatfed-chatui",
357
+ "legacy": "/chatfed-strict (requires 'query' field)",
358
+ "openai": "/v1/chat/completions",
359
+ "simple": "/simple-chat",
360
  "gradio_ui": "/ui",
361
  "docs": "/docs",
362
+ "health": "/health",
363
+ "debug": "/debug-input"
364
+ },
365
+ "supported_input_formats": {
366
+ "query_field": '{"query": "your text"}',
367
+ "text_field": '{"text": "your text"}',
368
+ "messages": '{"messages": [{"role": "user", "content": "your text"}]}',
369
+ "other_fields": "prompt, input, content are also supported"
370
  }
371
  }
372
 
373
+ # OpenAI-compatible endpoint
 
374
  @app.post("/v1/chat/completions")
375
  async def openai_compatible_endpoint(request: Request):
376
  """OpenAI-compatible endpoint that ChatUI might prefer"""
 
409
  logger.error(f"Error in OpenAI endpoint: {str(e)}")
410
  return {"error": str(e)}
411
 
412
+ # Simple chat endpoint
413
  @app.post("/simple-chat")
414
  async def simple_chat_endpoint(request: Request):
415
  """Simple endpoint that accepts any text input"""
 
435
  logger.error(f"Error in simple chat: {str(e)}")
436
  return {"error": str(e)}
437
 
438
+ # Debug endpoint for testing
439
+ @app.post("/debug-input")
440
+ async def debug_input_endpoint(request: Request):
441
+ """Debug endpoint to see exactly what's being sent and how it's processed"""
442
+ try:
443
+ # Get raw body
444
+ raw_body = await request.body()
445
+
446
+ # Parse JSON
447
+ data = await request.json()
448
+
449
+ # Create FlexibleChatInput to test extraction
450
+ try:
451
+ flexible_input = FlexibleChatInput(**data)
452
+ extracted_query = flexible_input.extract_query()
453
+ except Exception as e:
454
+ flexible_input = None
455
+ extracted_query = f"Error creating FlexibleChatInput: {str(e)}"
456
+
457
+ # Try fallback extraction
458
+ fallback_query = extract_user_query_fallback(data)
459
+
460
+ return {
461
+ "debug_info": {
462
+ "raw_body": raw_body.decode('utf-8'),
463
+ "parsed_data": data,
464
+ "data_keys": list(data.keys()) if isinstance(data, dict) else "not_dict",
465
+ "data_type": str(type(data)),
466
+ "flexible_input_created": flexible_input is not None,
467
+ "extracted_query_flexible": extracted_query,
468
+ "extracted_query_fallback": fallback_query,
469
+ },
470
+ "test_results": {
471
+ "would_process": extracted_query or fallback_query,
472
+ "final_query": extracted_query or fallback_query or "NO_QUERY_FOUND"
473
+ }
474
+ }
475
+
476
+ except Exception as e:
477
+ return {
478
+ "error": str(e),
479
+ "raw_body": raw_body.decode('utf-8') if 'raw_body' in locals() else "failed_to_read"
480
+ }
481
+
482
+ # Mount Gradio at a specific path
483
  demo = create_gradio_interface()
484
  app.mount("/ui", demo)
485