gzdaniel commited on
Commit
55a25c8
·
1 Parent(s): f439a2a

Optimize log message

Browse files
Files changed (2) hide show
  1. lightrag/kg/postgres_impl.py +23 -12
  2. lightrag/lightrag.py +10 -2
lightrag/kg/postgres_impl.py CHANGED
@@ -1372,11 +1372,12 @@ class PGGraphStorage(BaseGraphStorage):
1372
  if record:
1373
  node = record[0]
1374
  node_dict = node["n"]["properties"]
1375
-
1376
  # Process string result, parse it to JSON dictionary
1377
  if isinstance(node_dict, str):
1378
  try:
1379
  import json
 
1380
  node_dict = json.loads(node_dict)
1381
  except json.JSONDecodeError:
1382
  logger.warning(f"Failed to parse node string: {node_dict}")
@@ -1428,15 +1429,16 @@ class PGGraphStorage(BaseGraphStorage):
1428
  record = await self._query(query)
1429
  if record and record[0] and record[0]["edge_properties"]:
1430
  result = record[0]["edge_properties"]
1431
-
1432
  # Process string result, parse it to JSON dictionary
1433
  if isinstance(result, str):
1434
  try:
1435
  import json
 
1436
  result = json.loads(result)
1437
  except json.JSONDecodeError:
1438
  logger.warning(f"Failed to parse edge string: {result}")
1439
-
1440
  return result
1441
 
1442
  async def get_node_edges(self, source_node_id: str) -> list[tuple[str, str]] | None:
@@ -1645,15 +1647,18 @@ class PGGraphStorage(BaseGraphStorage):
1645
  for result in results:
1646
  if result["node_id"] and result["n"]:
1647
  node_dict = result["n"]["properties"]
1648
-
1649
  # Process string result, parse it to JSON dictionary
1650
  if isinstance(node_dict, str):
1651
  try:
1652
  import json
 
1653
  node_dict = json.loads(node_dict)
1654
  except json.JSONDecodeError:
1655
- logger.warning(f"Failed to parse node string in batch: {node_dict}")
1656
-
 
 
1657
  # Remove the 'base' label if present in a 'labels' property
1658
  if "labels" in node_dict:
1659
  node_dict["labels"] = [
@@ -1806,31 +1811,37 @@ class PGGraphStorage(BaseGraphStorage):
1806
  for result in forward_results:
1807
  if result["source"] and result["target"] and result["edge_properties"]:
1808
  edge_props = result["edge_properties"]
1809
-
1810
  # Process string result, parse it to JSON dictionary
1811
  if isinstance(edge_props, str):
1812
  try:
1813
  import json
 
1814
  edge_props = json.loads(edge_props)
1815
  except json.JSONDecodeError:
1816
- logger.warning(f"Failed to parse edge properties string: {edge_props}")
 
 
1817
  continue
1818
-
1819
  edges_dict[(result["source"], result["target"])] = edge_props
1820
 
1821
  for result in backward_results:
1822
  if result["source"] and result["target"] and result["edge_properties"]:
1823
  edge_props = result["edge_properties"]
1824
-
1825
  # Process string result, parse it to JSON dictionary
1826
  if isinstance(edge_props, str):
1827
  try:
1828
  import json
 
1829
  edge_props = json.loads(edge_props)
1830
  except json.JSONDecodeError:
1831
- logger.warning(f"Failed to parse edge properties string: {edge_props}")
 
 
1832
  continue
1833
-
1834
  edges_dict[(result["source"], result["target"])] = edge_props
1835
 
1836
  return edges_dict
 
1372
  if record:
1373
  node = record[0]
1374
  node_dict = node["n"]["properties"]
1375
+
1376
  # Process string result, parse it to JSON dictionary
1377
  if isinstance(node_dict, str):
1378
  try:
1379
  import json
1380
+
1381
  node_dict = json.loads(node_dict)
1382
  except json.JSONDecodeError:
1383
  logger.warning(f"Failed to parse node string: {node_dict}")
 
1429
  record = await self._query(query)
1430
  if record and record[0] and record[0]["edge_properties"]:
1431
  result = record[0]["edge_properties"]
1432
+
1433
  # Process string result, parse it to JSON dictionary
1434
  if isinstance(result, str):
1435
  try:
1436
  import json
1437
+
1438
  result = json.loads(result)
1439
  except json.JSONDecodeError:
1440
  logger.warning(f"Failed to parse edge string: {result}")
1441
+
1442
  return result
1443
 
1444
  async def get_node_edges(self, source_node_id: str) -> list[tuple[str, str]] | None:
 
1647
  for result in results:
1648
  if result["node_id"] and result["n"]:
1649
  node_dict = result["n"]["properties"]
1650
+
1651
  # Process string result, parse it to JSON dictionary
1652
  if isinstance(node_dict, str):
1653
  try:
1654
  import json
1655
+
1656
  node_dict = json.loads(node_dict)
1657
  except json.JSONDecodeError:
1658
+ logger.warning(
1659
+ f"Failed to parse node string in batch: {node_dict}"
1660
+ )
1661
+
1662
  # Remove the 'base' label if present in a 'labels' property
1663
  if "labels" in node_dict:
1664
  node_dict["labels"] = [
 
1811
  for result in forward_results:
1812
  if result["source"] and result["target"] and result["edge_properties"]:
1813
  edge_props = result["edge_properties"]
1814
+
1815
  # Process string result, parse it to JSON dictionary
1816
  if isinstance(edge_props, str):
1817
  try:
1818
  import json
1819
+
1820
  edge_props = json.loads(edge_props)
1821
  except json.JSONDecodeError:
1822
+ logger.warning(
1823
+ f"Failed to parse edge properties string: {edge_props}"
1824
+ )
1825
  continue
1826
+
1827
  edges_dict[(result["source"], result["target"])] = edge_props
1828
 
1829
  for result in backward_results:
1830
  if result["source"] and result["target"] and result["edge_properties"]:
1831
  edge_props = result["edge_properties"]
1832
+
1833
  # Process string result, parse it to JSON dictionary
1834
  if isinstance(edge_props, str):
1835
  try:
1836
  import json
1837
+
1838
  edge_props = json.loads(edge_props)
1839
  except json.JSONDecodeError:
1840
+ logger.warning(
1841
+ f"Failed to parse edge properties string: {edge_props}"
1842
+ )
1843
  continue
1844
+
1845
  edges_dict[(result["source"], result["target"])] = edge_props
1846
 
1847
  return edges_dict
lightrag/lightrag.py CHANGED
@@ -994,10 +994,14 @@ class LightRAG:
994
 
995
  except Exception as e:
996
  # Log error and update pipeline status
997
- error_msg = f"Failed to extrat document {doc_id}: {traceback.format_exc()}"
 
998
  logger.error(error_msg)
999
  async with pipeline_status_lock:
1000
  pipeline_status["latest_message"] = error_msg
 
 
 
1001
  pipeline_status["history_messages"].append(error_msg)
1002
 
1003
  # Cancel other tasks as they are no longer meaningful
@@ -1080,10 +1084,14 @@ class LightRAG:
1080
 
1081
  except Exception as e:
1082
  # Log error and update pipeline status
1083
- error_msg = f"Merging stage failed in document {doc_id}: {traceback.format_exc()}"
 
1084
  logger.error(error_msg)
1085
  async with pipeline_status_lock:
1086
  pipeline_status["latest_message"] = error_msg
 
 
 
1087
  pipeline_status["history_messages"].append(error_msg)
1088
 
1089
  # Persistent llm cache
 
994
 
995
  except Exception as e:
996
  # Log error and update pipeline status
997
+ logger.error(traceback.format_exc())
998
+ error_msg = f"Failed to extrat document {current_file_number}/{total_files}: {file_path}"
999
  logger.error(error_msg)
1000
  async with pipeline_status_lock:
1001
  pipeline_status["latest_message"] = error_msg
1002
+ pipeline_status["history_messages"].append(
1003
+ traceback.format_exc()
1004
+ )
1005
  pipeline_status["history_messages"].append(error_msg)
1006
 
1007
  # Cancel other tasks as they are no longer meaningful
 
1084
 
1085
  except Exception as e:
1086
  # Log error and update pipeline status
1087
+ logger.error(traceback.format_exc())
1088
+ error_msg = f"Merging stage failed in document {current_file_number}/{total_files}: {file_path}"
1089
  logger.error(error_msg)
1090
  async with pipeline_status_lock:
1091
  pipeline_status["latest_message"] = error_msg
1092
+ pipeline_status["history_messages"].append(
1093
+ traceback.format_exc()
1094
+ )
1095
  pipeline_status["history_messages"].append(error_msg)
1096
 
1097
  # Persistent llm cache