Merge pull request #675 from danielaskdd/Fix-get_conversation_turns
Browse files- lightrag/utils.py +1 -1
- test_lightrag_ollama_chat.py +27 -49
lightrag/utils.py
CHANGED
@@ -655,7 +655,7 @@ def get_conversation_turns(conversation_history: list[dict], num_turns: int) ->
|
|
655 |
else:
|
656 |
turn = [msg1, msg2] # user, assistant
|
657 |
turns.append(turn)
|
658 |
-
i +=
|
659 |
|
660 |
# Keep only the most recent num_turns
|
661 |
if len(turns) > num_turns:
|
|
|
655 |
else:
|
656 |
turn = [msg1, msg2] # user, assistant
|
657 |
turns.append(turn)
|
658 |
+
i += 2
|
659 |
|
660 |
# Keep only the most recent num_turns
|
661 |
if len(turns) > num_turns:
|
test_lightrag_ollama_chat.py
CHANGED
@@ -18,6 +18,29 @@ from dataclasses import dataclass, asdict
|
|
18 |
from datetime import datetime
|
19 |
from pathlib import Path
|
20 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
|
22 |
class OutputControl:
|
23 |
"""Output control class, manages the verbosity of test output"""
|
@@ -99,22 +122,6 @@ class TestStats:
|
|
99 |
print(f"- {result.name}: {result.error}")
|
100 |
|
101 |
|
102 |
-
DEFAULT_CONFIG = {
|
103 |
-
"server": {
|
104 |
-
"host": "localhost",
|
105 |
-
"port": 9621,
|
106 |
-
"model": "lightrag:latest",
|
107 |
-
"timeout": 120,
|
108 |
-
"max_retries": 3,
|
109 |
-
"retry_delay": 1,
|
110 |
-
},
|
111 |
-
"test_cases": {
|
112 |
-
"basic": {"query": "唐僧有几个徒弟"},
|
113 |
-
"generate": {"query": "电视剧西游记导演是谁"},
|
114 |
-
},
|
115 |
-
}
|
116 |
-
|
117 |
-
|
118 |
def make_request(
|
119 |
url: str, data: Dict[str, Any], stream: bool = False
|
120 |
) -> requests.Response:
|
@@ -193,7 +200,6 @@ def create_chat_request_data(
|
|
193 |
stream: bool = False,
|
194 |
model: str = None,
|
195 |
conversation_history: List[Dict[str, str]] = None,
|
196 |
-
history_turns: int = None,
|
197 |
) -> Dict[str, Any]:
|
198 |
"""Create chat request data
|
199 |
Args:
|
@@ -206,10 +212,6 @@ def create_chat_request_data(
|
|
206 |
Dictionary containing complete chat request data
|
207 |
"""
|
208 |
messages = conversation_history or []
|
209 |
-
if history_turns is not None and conversation_history:
|
210 |
-
messages = messages[
|
211 |
-
-2 * history_turns :
|
212 |
-
] # Each turn has 2 messages (user + assistant)
|
213 |
messages.append({"role": "user", "content": content})
|
214 |
|
215 |
return {
|
@@ -273,23 +275,11 @@ def test_non_stream_chat() -> None:
|
|
273 |
"""Test non-streaming call to /api/chat endpoint"""
|
274 |
url = get_base_url()
|
275 |
|
276 |
-
#
|
277 |
-
conversation_history = [
|
278 |
-
{"role": "user", "content": "你好"},
|
279 |
-
{"role": "assistant", "content": "你好!我是一个AI助手,很高兴为你服务。"},
|
280 |
-
{"role": "user", "content": "西游记里有几个主要人物?"},
|
281 |
-
{
|
282 |
-
"role": "assistant",
|
283 |
-
"content": "西游记的主要人物有唐僧、孙悟空、猪八戒、沙和尚这四位主角。",
|
284 |
-
},
|
285 |
-
]
|
286 |
-
|
287 |
-
# Send request with conversation history and history turns
|
288 |
data = create_chat_request_data(
|
289 |
CONFIG["test_cases"]["basic"]["query"],
|
290 |
stream=False,
|
291 |
-
conversation_history=
|
292 |
-
history_turns=2, # Only include last 2 turns
|
293 |
)
|
294 |
response = make_request(url, data)
|
295 |
|
@@ -325,23 +315,11 @@ def test_stream_chat() -> None:
|
|
325 |
"""
|
326 |
url = get_base_url()
|
327 |
|
328 |
-
#
|
329 |
-
conversation_history = [
|
330 |
-
{"role": "user", "content": "你好"},
|
331 |
-
{"role": "assistant", "content": "你好!我是一个AI助手,很高兴为你服务。"},
|
332 |
-
{"role": "user", "content": "西游记里有几个主要人物?"},
|
333 |
-
{
|
334 |
-
"role": "assistant",
|
335 |
-
"content": "西游记的主要人物有唐僧、孙悟空、猪八戒、沙和尚这四位主角。",
|
336 |
-
},
|
337 |
-
]
|
338 |
-
|
339 |
-
# Send request with conversation history and history turns
|
340 |
data = create_chat_request_data(
|
341 |
CONFIG["test_cases"]["basic"]["query"],
|
342 |
stream=True,
|
343 |
-
conversation_history=
|
344 |
-
history_turns=2, # Only include last 2 turns
|
345 |
)
|
346 |
response = make_request(url, data, stream=True)
|
347 |
|
|
|
18 |
from datetime import datetime
|
19 |
from pathlib import Path
|
20 |
|
21 |
+
DEFAULT_CONFIG = {
|
22 |
+
"server": {
|
23 |
+
"host": "localhost",
|
24 |
+
"port": 9621,
|
25 |
+
"model": "lightrag:latest",
|
26 |
+
"timeout": 120,
|
27 |
+
"max_retries": 3,
|
28 |
+
"retry_delay": 1,
|
29 |
+
},
|
30 |
+
"test_cases": {
|
31 |
+
"basic": {"query": "唐僧有几个徒弟"},
|
32 |
+
"generate": {"query": "电视剧西游记导演是谁"},
|
33 |
+
},
|
34 |
+
}
|
35 |
+
|
36 |
+
# Example conversation history for testing
|
37 |
+
EXAMPLE_CONVERSATION = [
|
38 |
+
{"role": "user", "content": "你好"},
|
39 |
+
{"role": "assistant", "content": "你好!我是一个AI助手,很高兴为你服务。"},
|
40 |
+
{"role": "user", "content": "Who are you?"},
|
41 |
+
{"role": "assistant", "content": "I'm a Knowledge base query assistant."},
|
42 |
+
]
|
43 |
+
|
44 |
|
45 |
class OutputControl:
|
46 |
"""Output control class, manages the verbosity of test output"""
|
|
|
122 |
print(f"- {result.name}: {result.error}")
|
123 |
|
124 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
125 |
def make_request(
|
126 |
url: str, data: Dict[str, Any], stream: bool = False
|
127 |
) -> requests.Response:
|
|
|
200 |
stream: bool = False,
|
201 |
model: str = None,
|
202 |
conversation_history: List[Dict[str, str]] = None,
|
|
|
203 |
) -> Dict[str, Any]:
|
204 |
"""Create chat request data
|
205 |
Args:
|
|
|
212 |
Dictionary containing complete chat request data
|
213 |
"""
|
214 |
messages = conversation_history or []
|
|
|
|
|
|
|
|
|
215 |
messages.append({"role": "user", "content": content})
|
216 |
|
217 |
return {
|
|
|
275 |
"""Test non-streaming call to /api/chat endpoint"""
|
276 |
url = get_base_url()
|
277 |
|
278 |
+
# Send request with conversation history
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
279 |
data = create_chat_request_data(
|
280 |
CONFIG["test_cases"]["basic"]["query"],
|
281 |
stream=False,
|
282 |
+
conversation_history=EXAMPLE_CONVERSATION,
|
|
|
283 |
)
|
284 |
response = make_request(url, data)
|
285 |
|
|
|
315 |
"""
|
316 |
url = get_base_url()
|
317 |
|
318 |
+
# Send request with conversation history
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
319 |
data = create_chat_request_data(
|
320 |
CONFIG["test_cases"]["basic"]["query"],
|
321 |
stream=True,
|
322 |
+
conversation_history=EXAMPLE_CONVERSATION,
|
|
|
323 |
)
|
324 |
response = make_request(url, data, stream=True)
|
325 |
|