Daniel.y commited on
Commit
85c023d
·
unverified ·
2 Parent(s): b66d9e3 8ce7f38

Merge pull request #1217 from danielaskdd/main

Browse files

Improve OpenAI LLM logging with more detailed debug information

lightrag/api/webui/assets/{index-Bz28HSH8.js → index-BZZMDLIK.js} RENAMED
Binary files a/lightrag/api/webui/assets/index-Bz28HSH8.js and b/lightrag/api/webui/assets/index-BZZMDLIK.js differ
 
lightrag/api/webui/index.html CHANGED
Binary files a/lightrag/api/webui/index.html and b/lightrag/api/webui/index.html differ
 
lightrag/llm/openai.py CHANGED
@@ -90,11 +90,13 @@ async def openai_complete_if_cache(
90
  messages.extend(history_messages)
91
  messages.append({"role": "user", "content": prompt})
92
 
93
- logger.debug("===== Sending Query to LLM =====")
94
  logger.debug(f"Model: {model} Base URL: {base_url}")
95
  logger.debug(f"Additional kwargs: {kwargs}")
96
- verbose_debug(f"Query: {prompt}")
97
  verbose_debug(f"System prompt: {system_prompt}")
 
 
98
 
99
  try:
100
  if "response_format" in kwargs:
@@ -164,6 +166,9 @@ async def openai_complete_if_cache(
164
  }
165
  token_tracker.add_usage(token_counts)
166
 
 
 
 
167
  return content
168
 
169
 
 
90
  messages.extend(history_messages)
91
  messages.append({"role": "user", "content": prompt})
92
 
93
+ logger.debug("===== Entering func of LLM =====")
94
  logger.debug(f"Model: {model} Base URL: {base_url}")
95
  logger.debug(f"Additional kwargs: {kwargs}")
96
+ logger.debug(f"Num of history messages: {len(history_messages)}")
97
  verbose_debug(f"System prompt: {system_prompt}")
98
+ verbose_debug(f"Query: {prompt}")
99
+ logger.debug("===== Sending Query to LLM =====")
100
 
101
  try:
102
  if "response_format" in kwargs:
 
166
  }
167
  token_tracker.add_usage(token_counts)
168
 
169
+ logger.debug(f"Response content len: {len(content)}")
170
+ verbose_debug(f"Response: {response}")
171
+
172
  return content
173
 
174
 
lightrag/utils.py CHANGED
@@ -46,7 +46,7 @@ def verbose_debug(msg: str, *args, **kwargs):
46
  formatted_msg = msg
47
  # Then truncate the formatted message
48
  truncated_msg = (
49
- formatted_msg[:50] + "..." if len(formatted_msg) > 50 else formatted_msg
50
  )
51
  logger.debug(truncated_msg, **kwargs)
52
 
 
46
  formatted_msg = msg
47
  # Then truncate the formatted message
48
  truncated_msg = (
49
+ formatted_msg[:100] + "..." if len(formatted_msg) > 100 else formatted_msg
50
  )
51
  logger.debug(truncated_msg, **kwargs)
52
 
lightrag_webui/src/locales/ar.json CHANGED
@@ -215,7 +215,8 @@
215
  "entity_id": "الاسم",
216
  "entity_type": "النوع",
217
  "source_id": "معرف المصدر",
218
- "Neighbour": "الجار"
 
219
  }
220
  },
221
  "edge": {
 
215
  "entity_id": "الاسم",
216
  "entity_type": "النوع",
217
  "source_id": "معرف المصدر",
218
+ "Neighbour": "الجار",
219
+ "file_path": "المصدر"
220
  }
221
  },
222
  "edge": {
lightrag_webui/src/locales/en.json CHANGED
@@ -215,7 +215,8 @@
215
  "entity_id": "Name",
216
  "entity_type": "Type",
217
  "source_id": "SrcID",
218
- "Neighbour": "Neigh"
 
219
  }
220
  },
221
  "edge": {
 
215
  "entity_id": "Name",
216
  "entity_type": "Type",
217
  "source_id": "SrcID",
218
+ "Neighbour": "Neigh",
219
+ "file_path": "Source"
220
  }
221
  },
222
  "edge": {
lightrag_webui/src/locales/fr.json CHANGED
@@ -215,7 +215,8 @@
215
  "entity_id": "Nom",
216
  "entity_type": "Type",
217
  "source_id": "ID source",
218
- "Neighbour": "Voisin"
 
219
  }
220
  },
221
  "edge": {
 
215
  "entity_id": "Nom",
216
  "entity_type": "Type",
217
  "source_id": "ID source",
218
+ "Neighbour": "Voisin",
219
+ "file_path": "Source"
220
  }
221
  },
222
  "edge": {
lightrag_webui/src/locales/zh.json CHANGED
@@ -215,7 +215,8 @@
215
  "entity_id": "名称",
216
  "entity_type": "类型",
217
  "source_id": "信源ID",
218
- "Neighbour": "邻接"
 
219
  }
220
  },
221
  "edge": {
 
215
  "entity_id": "名称",
216
  "entity_type": "类型",
217
  "source_id": "信源ID",
218
+ "Neighbour": "邻接",
219
+ "file_path": "信源"
220
  }
221
  },
222
  "edge": {