Commit
·
552eb31
1
Parent(s):
7a4a21b
Fix the demo issue of PG to cater with new LightRag changes
Browse files
examples/lightrag_zhipu_postgres_demo.py
CHANGED
@@ -37,20 +37,22 @@ async def main():
|
|
37 |
llm_model_max_token_size=32768,
|
38 |
enable_llm_cache_for_entity_extract=True,
|
39 |
embedding_func=EmbeddingFunc(
|
40 |
-
embedding_dim=
|
41 |
max_token_size=8192,
|
42 |
func=lambda texts: ollama_embedding(
|
43 |
-
texts, embed_model="
|
44 |
),
|
45 |
),
|
46 |
kv_storage="PGKVStorage",
|
47 |
doc_status_storage="PGDocStatusStorage",
|
48 |
graph_storage="PGGraphStorage",
|
49 |
vector_storage="PGVectorStorage",
|
|
|
50 |
)
|
51 |
|
52 |
# add embedding_func for graph database, it's deleted in commit 5661d76860436f7bf5aef2e50d9ee4a59660146c
|
53 |
rag.chunk_entity_relation_graph.embedding_func = rag.embedding_func
|
|
|
54 |
|
55 |
with open(f"{ROOT_DIR}/book.txt", "r", encoding="utf-8") as f:
|
56 |
await rag.ainsert(f.read())
|
|
|
37 |
llm_model_max_token_size=32768,
|
38 |
enable_llm_cache_for_entity_extract=True,
|
39 |
embedding_func=EmbeddingFunc(
|
40 |
+
embedding_dim=1024,
|
41 |
max_token_size=8192,
|
42 |
func=lambda texts: ollama_embedding(
|
43 |
+
texts, embed_model="bge-m3", host="http://localhost:11434"
|
44 |
),
|
45 |
),
|
46 |
kv_storage="PGKVStorage",
|
47 |
doc_status_storage="PGDocStatusStorage",
|
48 |
graph_storage="PGGraphStorage",
|
49 |
vector_storage="PGVectorStorage",
|
50 |
+
auto_manage_storages_states=False,
|
51 |
)
|
52 |
|
53 |
# add embedding_func for graph database, it's deleted in commit 5661d76860436f7bf5aef2e50d9ee4a59660146c
|
54 |
rag.chunk_entity_relation_graph.embedding_func = rag.embedding_func
|
55 |
+
await rag.initialize_storages()
|
56 |
|
57 |
with open(f"{ROOT_DIR}/book.txt", "r", encoding="utf-8") as f:
|
58 |
await rag.ainsert(f.read())
|