gzdaniel commited on
Commit
75ddef0
Β·
1 Parent(s): c38d821

Remove deprected demo code

Browse files
examples/lightrag_multi_model_all_modes_demo.py DELETED
@@ -1,88 +0,0 @@
1
- import os
2
- import asyncio
3
- from lightrag import LightRAG, QueryParam
4
- from lightrag.llm.openai import gpt_4o_mini_complete, gpt_4o_complete, openai_embed
5
- from lightrag.kg.shared_storage import initialize_pipeline_status
6
-
7
- WORKING_DIR = "./lightrag_demo"
8
-
9
- if not os.path.exists(WORKING_DIR):
10
- os.mkdir(WORKING_DIR)
11
-
12
-
13
- async def initialize_rag():
14
- rag = LightRAG(
15
- working_dir=WORKING_DIR,
16
- embedding_func=openai_embed,
17
- llm_model_func=gpt_4o_mini_complete, # Default model for queries
18
- )
19
-
20
- await rag.initialize_storages()
21
- await initialize_pipeline_status()
22
-
23
- return rag
24
-
25
-
26
- def main():
27
- # Initialize RAG instance
28
- rag = asyncio.run(initialize_rag())
29
-
30
- # Load the data
31
- with open("./book.txt", "r", encoding="utf-8") as f:
32
- rag.insert(f.read())
33
-
34
- # Query with naive mode (default model)
35
- print("--- NAIVE mode ---")
36
- print(
37
- rag.query(
38
- "What are the main themes in this story?", param=QueryParam(mode="naive")
39
- )
40
- )
41
-
42
- # Query with local mode (default model)
43
- print("\n--- LOCAL mode ---")
44
- print(
45
- rag.query(
46
- "What are the main themes in this story?", param=QueryParam(mode="local")
47
- )
48
- )
49
-
50
- # Query with global mode (default model)
51
- print("\n--- GLOBAL mode ---")
52
- print(
53
- rag.query(
54
- "What are the main themes in this story?", param=QueryParam(mode="global")
55
- )
56
- )
57
-
58
- # Query with hybrid mode (default model)
59
- print("\n--- HYBRID mode ---")
60
- print(
61
- rag.query(
62
- "What are the main themes in this story?", param=QueryParam(mode="hybrid")
63
- )
64
- )
65
-
66
- # Query with mix mode (default model)
67
- print("\n--- MIX mode ---")
68
- print(
69
- rag.query(
70
- "What are the main themes in this story?", param=QueryParam(mode="mix")
71
- )
72
- )
73
-
74
- # Query with a custom model (gpt-4o) for a more complex question
75
- print("\n--- Using custom model for complex analysis ---")
76
- print(
77
- rag.query(
78
- "How does the character development reflect Victorian-era attitudes?",
79
- param=QueryParam(
80
- mode="global",
81
- model_func=gpt_4o_complete, # Override default model with more capable one
82
- ),
83
- )
84
- )
85
-
86
-
87
- if __name__ == "__main__":
88
- main()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
examples/{copy_llm_cache_to_another_storage.py β†’ unofficial-sample/copy_llm_cache_to_another_storage.py} RENAMED
File without changes
examples/{lightrag_bedrock_demo.py β†’ unofficial-sample/lightrag_bedrock_demo.py} RENAMED
File without changes
examples/{lightrag_hf_demo.py β†’ unofficial-sample/lightrag_hf_demo.py} RENAMED
File without changes
examples/{lightrag_llamaindex_direct_demo.py β†’ unofficial-sample/lightrag_llamaindex_direct_demo.py} RENAMED
File without changes
examples/{lightrag_llamaindex_litellm_demo.py β†’ unofficial-sample/lightrag_llamaindex_litellm_demo.py} RENAMED
File without changes
examples/{lightrag_lmdeploy_demo.py β†’ unofficial-sample/lightrag_lmdeploy_demo.py} RENAMED
File without changes
examples/{lightrag_nvidia_demo.py β†’ unofficial-sample/lightrag_nvidia_demo.py} RENAMED
File without changes
examples/{lightrag_openai_neo4j_milvus_redis_demo.py β†’ unofficial-sample/lightrag_openai_neo4j_milvus_redis_demo.py} RENAMED
File without changes