yangdx commited on
Commit
c1cfa92
·
1 Parent(s): ee17743

Directly pass the session title or keyword generation request from Open WebUI to the underlying LLM

Browse files
Files changed (1) hide show
  1. lightrag/api/lightrag_server.py +20 -1
lightrag/api/lightrag_server.py CHANGED
@@ -467,6 +467,7 @@ class OllamaChatRequest(BaseModel):
467
  messages: List[OllamaMessage]
468
  stream: bool = True # Default to streaming mode
469
  options: Optional[Dict[str, Any]] = None
 
470
 
471
 
472
  class OllamaChatResponse(BaseModel):
@@ -1536,7 +1537,25 @@ def create_app(args):
1536
  )
1537
  else:
1538
  first_chunk_time = time.time_ns()
1539
- response_text = await rag.aquery(cleaned_query, param=query_param)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1540
  last_chunk_time = time.time_ns()
1541
 
1542
  if not response_text:
 
467
  messages: List[OllamaMessage]
468
  stream: bool = True # Default to streaming mode
469
  options: Optional[Dict[str, Any]] = None
470
+ system: Optional[str] = None
471
 
472
 
473
  class OllamaChatResponse(BaseModel):
 
1537
  )
1538
  else:
1539
  first_chunk_time = time.time_ns()
1540
+
1541
+ # 判断是否包含特定字符串,使用正则表达式进行匹配
1542
+ logging.info(f"Cleaned query content: {cleaned_query}")
1543
+ match_result = re.search(r'\\n<chat_history>\\nUSER:', cleaned_query)
1544
+ logging.info(f"Regex match result: {bool(match_result)}")
1545
+
1546
+ if match_result:
1547
+
1548
+ if request.system:
1549
+ rag.llm_model_kwargs["system_prompt"] = request.system
1550
+
1551
+ response_text = await rag.llm_model_func(
1552
+ cleaned_query,
1553
+ stream=False,
1554
+ **rag.llm_model_kwargs
1555
+ )
1556
+ else:
1557
+ response_text = await rag.aquery(cleaned_query, param=query_param)
1558
+
1559
  last_chunk_time = time.time_ns()
1560
 
1561
  if not response_text: