support TiDBGraphStorage
Browse files- examples/lightrag_tidb_demo.py +3 -2
- lightrag/kg/tidb_impl.py +243 -41
- lightrag/lightrag.py +2 -0
examples/lightrag_tidb_demo.py
CHANGED
@@ -21,8 +21,7 @@ TIDB_HOST = ""
|
|
21 |
TIDB_PORT = ""
|
22 |
TIDB_USER = ""
|
23 |
TIDB_PASSWORD = ""
|
24 |
-
TIDB_DATABASE = ""
|
25 |
-
|
26 |
|
27 |
if not os.path.exists(WORKING_DIR):
|
28 |
os.mkdir(WORKING_DIR)
|
@@ -93,6 +92,7 @@ async def main():
|
|
93 |
),
|
94 |
kv_storage="TiDBKVStorage",
|
95 |
vector_storage="TiDBVectorDBStorage",
|
|
|
96 |
)
|
97 |
|
98 |
if rag.llm_response_cache:
|
@@ -102,6 +102,7 @@ async def main():
|
|
102 |
rag.entities_vdb.db = tidb
|
103 |
rag.relationships_vdb.db = tidb
|
104 |
rag.chunks_vdb.db = tidb
|
|
|
105 |
|
106 |
# Extract and Insert into LightRAG storage
|
107 |
with open("./dickens/demo.txt", "r", encoding="utf-8") as f:
|
|
|
21 |
TIDB_PORT = ""
|
22 |
TIDB_USER = ""
|
23 |
TIDB_PASSWORD = ""
|
24 |
+
TIDB_DATABASE = "lightrag"
|
|
|
25 |
|
26 |
if not os.path.exists(WORKING_DIR):
|
27 |
os.mkdir(WORKING_DIR)
|
|
|
92 |
),
|
93 |
kv_storage="TiDBKVStorage",
|
94 |
vector_storage="TiDBVectorDBStorage",
|
95 |
+
graph_storage="TiDBGraphStorage",
|
96 |
)
|
97 |
|
98 |
if rag.llm_response_cache:
|
|
|
102 |
rag.entities_vdb.db = tidb
|
103 |
rag.relationships_vdb.db = tidb
|
104 |
rag.chunks_vdb.db = tidb
|
105 |
+
rag.chunk_entity_relation_graph.db = tidb
|
106 |
|
107 |
# Extract and Insert into LightRAG storage
|
108 |
with open("./dickens/demo.txt", "r", encoding="utf-8") as f:
|
lightrag/kg/tidb_impl.py
CHANGED
@@ -7,7 +7,7 @@ import numpy as np
|
|
7 |
from sqlalchemy import create_engine, text
|
8 |
from tqdm import tqdm
|
9 |
|
10 |
-
from lightrag.base import BaseVectorStorage, BaseKVStorage
|
11 |
from lightrag.utils import logger
|
12 |
|
13 |
|
@@ -282,33 +282,180 @@ class TiDBVectorDBStorage(BaseVectorStorage):
|
|
282 |
if self.namespace == "entities":
|
283 |
data = []
|
284 |
for item in list_data:
|
285 |
-
|
286 |
-
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
296 |
|
297 |
elif self.namespace == "relationships":
|
298 |
data = []
|
299 |
for item in list_data:
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
312 |
|
313 |
|
314 |
N_T = {
|
@@ -362,14 +509,17 @@ TABLES = {
|
|
362 |
"ddl": """
|
363 |
CREATE TABLE LIGHTRAG_GRAPH_NODES (
|
364 |
`id` BIGINT PRIMARY KEY AUTO_RANDOM,
|
365 |
-
`entity_id` VARCHAR(256)
|
366 |
`workspace` varchar(1024),
|
367 |
`name` VARCHAR(2048),
|
|
|
|
|
|
|
368 |
`content` LONGTEXT,
|
369 |
`content_vector` VECTOR,
|
370 |
`createtime` DATETIME DEFAULT CURRENT_TIMESTAMP,
|
371 |
`updatetime` DATETIME DEFAULT NULL,
|
372 |
-
|
373 |
);
|
374 |
"""
|
375 |
},
|
@@ -377,15 +527,19 @@ TABLES = {
|
|
377 |
"ddl": """
|
378 |
CREATE TABLE LIGHTRAG_GRAPH_EDGES (
|
379 |
`id` BIGINT PRIMARY KEY AUTO_RANDOM,
|
380 |
-
`relation_id` VARCHAR(256)
|
381 |
`workspace` varchar(1024),
|
382 |
`source_name` VARCHAR(2048),
|
383 |
`target_name` VARCHAR(2048),
|
|
|
|
|
|
|
|
|
384 |
`content` LONGTEXT,
|
385 |
`content_vector` VECTOR,
|
386 |
`createtime` DATETIME DEFAULT CURRENT_TIMESTAMP,
|
387 |
`updatetime` DATETIME DEFAULT NULL,
|
388 |
-
|
389 |
);
|
390 |
"""
|
391 |
},
|
@@ -416,39 +570,87 @@ SQL_TEMPLATES = {
|
|
416 |
INSERT INTO LIGHTRAG_DOC_FULL (doc_id, content, workspace)
|
417 |
VALUES (:id, :content, :workspace)
|
418 |
ON DUPLICATE KEY UPDATE content = VALUES(content), workspace = VALUES(workspace), updatetime = CURRENT_TIMESTAMP
|
419 |
-
|
420 |
"upsert_chunk": """
|
421 |
INSERT INTO LIGHTRAG_DOC_CHUNKS(chunk_id, content, tokens, chunk_order_index, full_doc_id, content_vector, workspace)
|
422 |
VALUES (:id, :content, :tokens, :chunk_order_index, :full_doc_id, :content_vector, :workspace)
|
423 |
ON DUPLICATE KEY UPDATE
|
424 |
content = VALUES(content), tokens = VALUES(tokens), chunk_order_index = VALUES(chunk_order_index),
|
425 |
full_doc_id = VALUES(full_doc_id), content_vector = VALUES(content_vector), workspace = VALUES(workspace), updatetime = CURRENT_TIMESTAMP
|
426 |
-
|
427 |
# SQL for VectorStorage
|
428 |
"entities": """SELECT n.name as entity_name FROM
|
429 |
(SELECT entity_id as id, name, VEC_COSINE_DISTANCE(content_vector,:embedding_string) as distance
|
430 |
FROM LIGHTRAG_GRAPH_NODES WHERE workspace = :workspace) n
|
431 |
-
WHERE n.distance>:better_than_threshold ORDER BY n.distance DESC LIMIT :top_k
|
|
|
432 |
"relationships": """SELECT e.source_name as src_id, e.target_name as tgt_id FROM
|
433 |
(SELECT source_name, target_name, VEC_COSINE_DISTANCE(content_vector, :embedding_string) as distance
|
434 |
FROM LIGHTRAG_GRAPH_EDGES WHERE workspace = :workspace) e
|
435 |
-
WHERE e.distance>:better_than_threshold ORDER BY e.distance DESC LIMIT :top_k
|
|
|
436 |
"chunks": """SELECT c.id FROM
|
437 |
(SELECT chunk_id as id,VEC_COSINE_DISTANCE(content_vector, :embedding_string) as distance
|
438 |
FROM LIGHTRAG_DOC_CHUNKS WHERE workspace = :workspace) c
|
439 |
-
WHERE c.distance>:better_than_threshold ORDER BY c.distance DESC LIMIT :top_k
|
440 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
441 |
INSERT INTO LIGHTRAG_GRAPH_NODES(entity_id, name, content, content_vector, workspace)
|
442 |
VALUES(:id, :name, :content, :content_vector, :workspace)
|
443 |
-
|
444 |
-
|
445 |
-
workspace = VALUES(workspace), updatetime = CURRENT_TIMESTAMP
|
446 |
-
""",
|
447 |
-
"upsert_relationship": """
|
448 |
INSERT INTO LIGHTRAG_GRAPH_EDGES(relation_id, source_name, target_name, content, content_vector, workspace)
|
449 |
VALUES(:id, :source_name, :target_name, :content, :content_vector, :workspace)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
450 |
ON DUPLICATE KEY UPDATE
|
451 |
source_name = VALUES(source_name), target_name = VALUES(target_name), content = VALUES(content),
|
452 |
-
content_vector = VALUES(content_vector), workspace = VALUES(workspace), updatetime = CURRENT_TIMESTAMP
|
453 |
-
|
|
|
|
|
454 |
}
|
|
|
7 |
from sqlalchemy import create_engine, text
|
8 |
from tqdm import tqdm
|
9 |
|
10 |
+
from lightrag.base import BaseVectorStorage, BaseKVStorage, BaseGraphStorage
|
11 |
from lightrag.utils import logger
|
12 |
|
13 |
|
|
|
282 |
if self.namespace == "entities":
|
283 |
data = []
|
284 |
for item in list_data:
|
285 |
+
param = {
|
286 |
+
"id": item["id"],
|
287 |
+
"name": item["entity_name"],
|
288 |
+
"content": item["content"],
|
289 |
+
"content_vector": f"{item["content_vector"].tolist()}",
|
290 |
+
"workspace": self.db.workspace,
|
291 |
+
}
|
292 |
+
# update entity_id if node inserted by graph_storage_instance before
|
293 |
+
has = await self.db.query(SQL_TEMPLATES["has_entity"], param)
|
294 |
+
if has["cnt"] != 0:
|
295 |
+
await self.db.execute(SQL_TEMPLATES["update_entity"], param)
|
296 |
+
continue
|
297 |
+
|
298 |
+
data.append(param)
|
299 |
+
if data:
|
300 |
+
merge_sql = SQL_TEMPLATES["insert_entity"]
|
301 |
+
await self.db.execute(merge_sql, data)
|
302 |
|
303 |
elif self.namespace == "relationships":
|
304 |
data = []
|
305 |
for item in list_data:
|
306 |
+
param = {
|
307 |
+
"id": item["id"],
|
308 |
+
"source_name": item["src_id"],
|
309 |
+
"target_name": item["tgt_id"],
|
310 |
+
"content": item["content"],
|
311 |
+
"content_vector": f"{item["content_vector"].tolist()}",
|
312 |
+
"workspace": self.db.workspace,
|
313 |
+
}
|
314 |
+
# update relation_id if node inserted by graph_storage_instance before
|
315 |
+
has = await self.db.query(SQL_TEMPLATES["has_relationship"], param)
|
316 |
+
if has["cnt"] != 0:
|
317 |
+
await self.db.execute(SQL_TEMPLATES["update_relationship"], param)
|
318 |
+
continue
|
319 |
+
|
320 |
+
data.append(param)
|
321 |
+
if data:
|
322 |
+
merge_sql = SQL_TEMPLATES["insert_relationship"]
|
323 |
+
await self.db.execute(merge_sql, data)
|
324 |
+
|
325 |
+
|
326 |
+
@dataclass
|
327 |
+
class TiDBGraphStorage(BaseGraphStorage):
|
328 |
+
def __post_init__(self):
|
329 |
+
self._max_batch_size = self.global_config["embedding_batch_num"]
|
330 |
+
|
331 |
+
#################### upsert method ################
|
332 |
+
async def upsert_node(self, node_id: str, node_data: dict[str, str]):
|
333 |
+
entity_name = node_id
|
334 |
+
entity_type = node_data["entity_type"]
|
335 |
+
description = node_data["description"]
|
336 |
+
source_id = node_data["source_id"]
|
337 |
+
logger.debug(f"entity_name:{entity_name}, entity_type:{entity_type}")
|
338 |
+
content = entity_name + description
|
339 |
+
contents = [content]
|
340 |
+
batches = [
|
341 |
+
contents[i : i + self._max_batch_size]
|
342 |
+
for i in range(0, len(contents), self._max_batch_size)
|
343 |
+
]
|
344 |
+
embeddings_list = await asyncio.gather(
|
345 |
+
*[self.embedding_func(batch) for batch in batches]
|
346 |
+
)
|
347 |
+
embeddings = np.concatenate(embeddings_list)
|
348 |
+
content_vector = embeddings[0]
|
349 |
+
sql = SQL_TEMPLATES["upsert_node"]
|
350 |
+
data = {
|
351 |
+
"workspace": self.db.workspace,
|
352 |
+
"name": entity_name,
|
353 |
+
"entity_type": entity_type,
|
354 |
+
"description": description,
|
355 |
+
"source_chunk_id": source_id,
|
356 |
+
"content": content,
|
357 |
+
"content_vector": f"{content_vector.tolist()}",
|
358 |
+
}
|
359 |
+
await self.db.execute(sql, data)
|
360 |
+
|
361 |
+
async def upsert_edge(
|
362 |
+
self, source_node_id: str, target_node_id: str, edge_data: dict[str, str]
|
363 |
+
):
|
364 |
+
source_name = source_node_id
|
365 |
+
target_name = target_node_id
|
366 |
+
weight = edge_data["weight"]
|
367 |
+
keywords = edge_data["keywords"]
|
368 |
+
description = edge_data["description"]
|
369 |
+
source_chunk_id = edge_data["source_id"]
|
370 |
+
logger.debug(
|
371 |
+
f"source_name:{source_name}, target_name:{target_name}, keywords: {keywords}"
|
372 |
+
)
|
373 |
+
|
374 |
+
content = keywords + source_name + target_name + description
|
375 |
+
contents = [content]
|
376 |
+
batches = [
|
377 |
+
contents[i : i + self._max_batch_size]
|
378 |
+
for i in range(0, len(contents), self._max_batch_size)
|
379 |
+
]
|
380 |
+
embeddings_list = await asyncio.gather(
|
381 |
+
*[self.embedding_func(batch) for batch in batches]
|
382 |
+
)
|
383 |
+
embeddings = np.concatenate(embeddings_list)
|
384 |
+
content_vector = embeddings[0]
|
385 |
+
merge_sql = SQL_TEMPLATES["upsert_edge"]
|
386 |
+
data = {
|
387 |
+
"workspace": self.db.workspace,
|
388 |
+
"source_name": source_name,
|
389 |
+
"target_name": target_name,
|
390 |
+
"weight": weight,
|
391 |
+
"keywords": keywords,
|
392 |
+
"description": description,
|
393 |
+
"source_chunk_id": source_chunk_id,
|
394 |
+
"content": content,
|
395 |
+
"content_vector": f"{content_vector.tolist()}",
|
396 |
+
}
|
397 |
+
await self.db.execute(merge_sql, data)
|
398 |
+
|
399 |
+
async def embed_nodes(self, algorithm: str) -> tuple[np.ndarray, list[str]]:
|
400 |
+
if algorithm not in self._node_embed_algorithms:
|
401 |
+
raise ValueError(f"Node embedding algorithm {algorithm} not supported")
|
402 |
+
return await self._node_embed_algorithms[algorithm]()
|
403 |
+
|
404 |
+
# Query
|
405 |
+
|
406 |
+
async def has_node(self, node_id: str) -> bool:
|
407 |
+
sql = SQL_TEMPLATES["has_entity"]
|
408 |
+
param = {"name": node_id, "workspace": self.db.workspace}
|
409 |
+
has = await self.db.query(sql, param)
|
410 |
+
return has["cnt"] != 0
|
411 |
+
|
412 |
+
async def has_edge(self, source_node_id: str, target_node_id: str) -> bool:
|
413 |
+
sql = SQL_TEMPLATES["has_relationship"]
|
414 |
+
param = {
|
415 |
+
"source_name": source_node_id,
|
416 |
+
"target_name": target_node_id,
|
417 |
+
"workspace": self.db.workspace,
|
418 |
+
}
|
419 |
+
has = await self.db.query(sql, param)
|
420 |
+
return has["cnt"] != 0
|
421 |
+
|
422 |
+
async def node_degree(self, node_id: str) -> int:
|
423 |
+
sql = SQL_TEMPLATES["node_degree"]
|
424 |
+
param = {"name": node_id, "workspace": self.db.workspace}
|
425 |
+
result = await self.db.query(sql, param)
|
426 |
+
return result["cnt"]
|
427 |
+
|
428 |
+
async def edge_degree(self, src_id: str, tgt_id: str) -> int:
|
429 |
+
degree = await self.node_degree(src_id) + await self.node_degree(tgt_id)
|
430 |
+
return degree
|
431 |
+
|
432 |
+
async def get_node(self, node_id: str) -> Union[dict, None]:
|
433 |
+
sql = SQL_TEMPLATES["get_node"]
|
434 |
+
param = {"name": node_id, "workspace": self.db.workspace}
|
435 |
+
return await self.db.query(sql, param)
|
436 |
+
|
437 |
+
async def get_edge(
|
438 |
+
self, source_node_id: str, target_node_id: str
|
439 |
+
) -> Union[dict, None]:
|
440 |
+
sql = SQL_TEMPLATES["get_edge"]
|
441 |
+
param = {
|
442 |
+
"source_name": source_node_id,
|
443 |
+
"target_name": target_node_id,
|
444 |
+
"workspace": self.db.workspace,
|
445 |
+
}
|
446 |
+
return await self.db.query(sql, param)
|
447 |
+
|
448 |
+
async def get_node_edges(
|
449 |
+
self, source_node_id: str
|
450 |
+
) -> Union[list[tuple[str, str]], None]:
|
451 |
+
sql = SQL_TEMPLATES["get_node_edges"]
|
452 |
+
param = {"source_name": source_node_id, "workspace": self.db.workspace}
|
453 |
+
res = await self.db.query(sql, param, multirows=True)
|
454 |
+
if res:
|
455 |
+
data = [(i["source_name"], i["target_name"]) for i in res]
|
456 |
+
return data
|
457 |
+
else:
|
458 |
+
return []
|
459 |
|
460 |
|
461 |
N_T = {
|
|
|
509 |
"ddl": """
|
510 |
CREATE TABLE LIGHTRAG_GRAPH_NODES (
|
511 |
`id` BIGINT PRIMARY KEY AUTO_RANDOM,
|
512 |
+
`entity_id` VARCHAR(256),
|
513 |
`workspace` varchar(1024),
|
514 |
`name` VARCHAR(2048),
|
515 |
+
`entity_type` VARCHAR(1024),
|
516 |
+
`description` LONGTEXT,
|
517 |
+
`source_chunk_id` VARCHAR(256),
|
518 |
`content` LONGTEXT,
|
519 |
`content_vector` VECTOR,
|
520 |
`createtime` DATETIME DEFAULT CURRENT_TIMESTAMP,
|
521 |
`updatetime` DATETIME DEFAULT NULL,
|
522 |
+
KEY (`entity_id`)
|
523 |
);
|
524 |
"""
|
525 |
},
|
|
|
527 |
"ddl": """
|
528 |
CREATE TABLE LIGHTRAG_GRAPH_EDGES (
|
529 |
`id` BIGINT PRIMARY KEY AUTO_RANDOM,
|
530 |
+
`relation_id` VARCHAR(256),
|
531 |
`workspace` varchar(1024),
|
532 |
`source_name` VARCHAR(2048),
|
533 |
`target_name` VARCHAR(2048),
|
534 |
+
`weight` DECIMAL,
|
535 |
+
`keywords` TEXT,
|
536 |
+
`description` LONGTEXT,
|
537 |
+
`source_chunk_id` varchar(256),
|
538 |
`content` LONGTEXT,
|
539 |
`content_vector` VECTOR,
|
540 |
`createtime` DATETIME DEFAULT CURRENT_TIMESTAMP,
|
541 |
`updatetime` DATETIME DEFAULT NULL,
|
542 |
+
KEY (`relation_id`)
|
543 |
);
|
544 |
"""
|
545 |
},
|
|
|
570 |
INSERT INTO LIGHTRAG_DOC_FULL (doc_id, content, workspace)
|
571 |
VALUES (:id, :content, :workspace)
|
572 |
ON DUPLICATE KEY UPDATE content = VALUES(content), workspace = VALUES(workspace), updatetime = CURRENT_TIMESTAMP
|
573 |
+
""",
|
574 |
"upsert_chunk": """
|
575 |
INSERT INTO LIGHTRAG_DOC_CHUNKS(chunk_id, content, tokens, chunk_order_index, full_doc_id, content_vector, workspace)
|
576 |
VALUES (:id, :content, :tokens, :chunk_order_index, :full_doc_id, :content_vector, :workspace)
|
577 |
ON DUPLICATE KEY UPDATE
|
578 |
content = VALUES(content), tokens = VALUES(tokens), chunk_order_index = VALUES(chunk_order_index),
|
579 |
full_doc_id = VALUES(full_doc_id), content_vector = VALUES(content_vector), workspace = VALUES(workspace), updatetime = CURRENT_TIMESTAMP
|
580 |
+
""",
|
581 |
# SQL for VectorStorage
|
582 |
"entities": """SELECT n.name as entity_name FROM
|
583 |
(SELECT entity_id as id, name, VEC_COSINE_DISTANCE(content_vector,:embedding_string) as distance
|
584 |
FROM LIGHTRAG_GRAPH_NODES WHERE workspace = :workspace) n
|
585 |
+
WHERE n.distance>:better_than_threshold ORDER BY n.distance DESC LIMIT :top_k
|
586 |
+
""",
|
587 |
"relationships": """SELECT e.source_name as src_id, e.target_name as tgt_id FROM
|
588 |
(SELECT source_name, target_name, VEC_COSINE_DISTANCE(content_vector, :embedding_string) as distance
|
589 |
FROM LIGHTRAG_GRAPH_EDGES WHERE workspace = :workspace) e
|
590 |
+
WHERE e.distance>:better_than_threshold ORDER BY e.distance DESC LIMIT :top_k
|
591 |
+
""",
|
592 |
"chunks": """SELECT c.id FROM
|
593 |
(SELECT chunk_id as id,VEC_COSINE_DISTANCE(content_vector, :embedding_string) as distance
|
594 |
FROM LIGHTRAG_DOC_CHUNKS WHERE workspace = :workspace) c
|
595 |
+
WHERE c.distance>:better_than_threshold ORDER BY c.distance DESC LIMIT :top_k
|
596 |
+
""",
|
597 |
+
"has_entity": """
|
598 |
+
SELECT COUNT(id) AS cnt FROM LIGHTRAG_GRAPH_NODES WHERE name = :name AND workspace = :workspace
|
599 |
+
""",
|
600 |
+
"has_relationship": """
|
601 |
+
SELECT COUNT(id) AS cnt FROM LIGHTRAG_GRAPH_EDGES WHERE source_name = :source_name AND target_name = :target_name AND workspace = :workspace
|
602 |
+
""",
|
603 |
+
"update_entity": """
|
604 |
+
UPDATE LIGHTRAG_GRAPH_NODES SET
|
605 |
+
entity_id = :id, content = :content, content_vector = :content_vector, updatetime = CURRENT_TIMESTAMP
|
606 |
+
WHERE workspace = :workspace AND name = :name
|
607 |
+
""",
|
608 |
+
"update_relationship": """
|
609 |
+
UPDATE LIGHTRAG_GRAPH_EDGES SET
|
610 |
+
relation_id = :id, content = :content, content_vector = :content_vector, updatetime = CURRENT_TIMESTAMP
|
611 |
+
WHERE workspace = :workspace AND source_name = :source_name AND target_name = :target_name
|
612 |
+
""",
|
613 |
+
"insert_entity": """
|
614 |
INSERT INTO LIGHTRAG_GRAPH_NODES(entity_id, name, content, content_vector, workspace)
|
615 |
VALUES(:id, :name, :content, :content_vector, :workspace)
|
616 |
+
""",
|
617 |
+
"insert_relationship": """
|
|
|
|
|
|
|
618 |
INSERT INTO LIGHTRAG_GRAPH_EDGES(relation_id, source_name, target_name, content, content_vector, workspace)
|
619 |
VALUES(:id, :source_name, :target_name, :content, :content_vector, :workspace)
|
620 |
+
""",
|
621 |
+
# SQL for GraphStorage
|
622 |
+
"get_node": """
|
623 |
+
SELECT entity_id AS id, workspace, name, entity_type, description, source_chunk_id AS source_id, content, content_vector
|
624 |
+
FROM LIGHTRAG_GRAPH_NODES WHERE name = :name AND workspace = :workspace
|
625 |
+
""",
|
626 |
+
"get_edge": """
|
627 |
+
SELECT relation_id AS id, workspace, source_name, target_name, weight, keywords, description, source_chunk_id AS source_id, content, content_vector
|
628 |
+
FROM LIGHTRAG_GRAPH_EDGES WHERE source_name = :source_name AND target_name = :target_name AND workspace = :workspace
|
629 |
+
""",
|
630 |
+
"get_node_edges": """
|
631 |
+
SELECT relation_id AS id, workspace, source_name, target_name, weight, keywords, description, source_chunk_id, content, content_vector
|
632 |
+
FROM LIGHTRAG_GRAPH_EDGES WHERE source_name = :source_name AND workspace = :workspace
|
633 |
+
""",
|
634 |
+
"node_degree": """
|
635 |
+
SELECT COUNT(id) AS cnt FROM LIGHTRAG_GRAPH_EDGES WHERE workspace = :workspace AND :name IN (source_name, target_name)
|
636 |
+
""",
|
637 |
+
"upsert_node": """
|
638 |
+
INSERT INTO LIGHTRAG_GRAPH_NODES(name, content, content_vector, workspace, source_chunk_id, entity_type, description)
|
639 |
+
VALUES(:name, :content, :content_vector, :workspace, :source_chunk_id, :entity_type, :description)
|
640 |
+
ON DUPLICATE KEY UPDATE
|
641 |
+
name = VALUES(name), content = VALUES(content), content_vector = VALUES(content_vector),
|
642 |
+
workspace = VALUES(workspace), updatetime = CURRENT_TIMESTAMP,
|
643 |
+
source_chunk_id = VALUES(source_chunk_id), entity_type = VALUES(entity_type), description = VALUES(description)
|
644 |
+
""",
|
645 |
+
"upsert_edge": """
|
646 |
+
INSERT INTO LIGHTRAG_GRAPH_EDGES(source_name, target_name, content, content_vector,
|
647 |
+
workspace, weight, keywords, description, source_chunk_id)
|
648 |
+
VALUES(:source_name, :target_name, :content, :content_vector,
|
649 |
+
:workspace, :weight, :keywords, :description, :source_chunk_id)
|
650 |
ON DUPLICATE KEY UPDATE
|
651 |
source_name = VALUES(source_name), target_name = VALUES(target_name), content = VALUES(content),
|
652 |
+
content_vector = VALUES(content_vector), workspace = VALUES(workspace), updatetime = CURRENT_TIMESTAMP,
|
653 |
+
weight = VALUES(weight), keywords = VALUES(keywords), description = VALUES(description),
|
654 |
+
source_chunk_id = VALUES(source_chunk_id)
|
655 |
+
""",
|
656 |
}
|
lightrag/lightrag.py
CHANGED
@@ -79,6 +79,7 @@ MongoKVStorage = lazy_external_import(".kg.mongo_impl", "MongoKVStorage")
|
|
79 |
ChromaVectorDBStorage = lazy_external_import(".kg.chroma_impl", "ChromaVectorDBStorage")
|
80 |
TiDBKVStorage = lazy_external_import(".kg.tidb_impl", "TiDBKVStorage")
|
81 |
TiDBVectorDBStorage = lazy_external_import(".kg.tidb_impl", "TiDBVectorDBStorage")
|
|
|
82 |
AGEStorage = lazy_external_import(".kg.age_impl", "AGEStorage")
|
83 |
|
84 |
|
@@ -282,6 +283,7 @@ class LightRAG:
|
|
282 |
"Neo4JStorage": Neo4JStorage,
|
283 |
"OracleGraphStorage": OracleGraphStorage,
|
284 |
"AGEStorage": AGEStorage,
|
|
|
285 |
# "ArangoDBStorage": ArangoDBStorage
|
286 |
}
|
287 |
|
|
|
79 |
ChromaVectorDBStorage = lazy_external_import(".kg.chroma_impl", "ChromaVectorDBStorage")
|
80 |
TiDBKVStorage = lazy_external_import(".kg.tidb_impl", "TiDBKVStorage")
|
81 |
TiDBVectorDBStorage = lazy_external_import(".kg.tidb_impl", "TiDBVectorDBStorage")
|
82 |
+
TiDBGraphStorage = lazy_external_import(".kg.tidb_impl", "TiDBGraphStorage")
|
83 |
AGEStorage = lazy_external_import(".kg.age_impl", "AGEStorage")
|
84 |
|
85 |
|
|
|
283 |
"Neo4JStorage": Neo4JStorage,
|
284 |
"OracleGraphStorage": OracleGraphStorage,
|
285 |
"AGEStorage": AGEStorage,
|
286 |
+
"TiDBGraphStorage": TiDBGraphStorage,
|
287 |
# "ArangoDBStorage": ArangoDBStorage
|
288 |
}
|
289 |
|