zrguo commited on
Commit
9a811be
·
2 Parent(s): 2ef2ae4 7441782

Merge pull request #116 from Dormiveglia-elf/hotfix/embedding-dim

Browse files
examples/lightrag_openai_compatible_demo.py CHANGED
@@ -34,6 +34,13 @@ async def embedding_func(texts: list[str]) -> np.ndarray:
34
  )
35
 
36
 
 
 
 
 
 
 
 
37
  # function test
38
  async def test_funcs():
39
  result = await llm_model_func("How are you?")
@@ -43,37 +50,46 @@ async def test_funcs():
43
  print("embedding_func: ", result)
44
 
45
 
46
- asyncio.run(test_funcs())
 
 
 
 
 
47
 
 
 
 
 
 
 
 
48
 
49
- rag = LightRAG(
50
- working_dir=WORKING_DIR,
51
- llm_model_func=llm_model_func,
52
- embedding_func=EmbeddingFunc(
53
- embedding_dim=4096, max_token_size=8192, func=embedding_func
54
- ),
55
- )
56
 
 
 
57
 
58
- with open("./book.txt", "r", encoding="utf-8") as f:
59
- rag.insert(f.read())
 
 
60
 
61
- # Perform naive search
62
- print(
63
- rag.query("What are the top themes in this story?", param=QueryParam(mode="naive"))
64
- )
65
 
66
- # Perform local search
67
- print(
68
- rag.query("What are the top themes in this story?", param=QueryParam(mode="local"))
69
- )
70
 
71
- # Perform global search
72
- print(
73
- rag.query("What are the top themes in this story?", param=QueryParam(mode="global"))
74
- )
 
 
75
 
76
- # Perform hybrid search
77
- print(
78
- rag.query("What are the top themes in this story?", param=QueryParam(mode="hybrid"))
79
- )
 
34
  )
35
 
36
 
37
+ async def get_embedding_dim():
38
+ test_text = ["This is a test sentence."]
39
+ embedding = await embedding_func(test_text)
40
+ embedding_dim = embedding.shape[1]
41
+ return embedding_dim
42
+
43
+
44
  # function test
45
  async def test_funcs():
46
  result = await llm_model_func("How are you?")
 
50
  print("embedding_func: ", result)
51
 
52
 
53
+ # asyncio.run(test_funcs())
54
+
55
+ async def main():
56
+ try:
57
+ embedding_dimension = await get_embedding_dim()
58
+ print(f"Detected embedding dimension: {embedding_dimension}")
59
 
60
+ rag = LightRAG(
61
+ working_dir=WORKING_DIR,
62
+ llm_model_func=llm_model_func,
63
+ embedding_func=EmbeddingFunc(
64
+ embedding_dim=embedding_dimension, max_token_size=8192, func=embedding_func
65
+ ),
66
+ )
67
 
 
 
 
 
 
 
 
68
 
69
+ with open("./book.txt", "r", encoding="utf-8") as f:
70
+ rag.insert(f.read())
71
 
72
+ # Perform naive search
73
+ print(
74
+ rag.query("What are the top themes in this story?", param=QueryParam(mode="naive"))
75
+ )
76
 
77
+ # Perform local search
78
+ print(
79
+ rag.query("What are the top themes in this story?", param=QueryParam(mode="local"))
80
+ )
81
 
82
+ # Perform global search
83
+ print(
84
+ rag.query("What are the top themes in this story?", param=QueryParam(mode="global"))
85
+ )
86
 
87
+ # Perform hybrid search
88
+ print(
89
+ rag.query("What are the top themes in this story?", param=QueryParam(mode="hybrid"))
90
+ )
91
+ except Exception as e:
92
+ print(f"An error occurred: {e}")
93
 
94
+ if __name__ == "__main__":
95
+ asyncio.run(main())