al1k
commited on
feat: add cloudflare workers ai integration for lightrag
Browse files
examples/unofficial-sample/{lighrag_cloudflareworker_example.py → lighrag_cloudflare_demo.py}
RENAMED
@@ -16,21 +16,24 @@ from dotenv import load_dotenv
|
|
16 |
|
17 |
"""This code is a modified version of lightrag_openai_demo.py"""
|
18 |
|
|
|
19 |
load_dotenv(dotenv_path=".env", override=False)
|
20 |
|
21 |
-
# ideally, as always, env!
|
22 |
|
23 |
-
|
24 |
-
cloudflare_api_key = '
|
25 |
-
|
|
|
|
|
|
|
|
|
26 |
|
27 |
# choose an embedding model
|
28 |
EMBEDDING_MODEL = '@cf/baai/bge-m3'
|
29 |
# choose a generative model
|
30 |
LLM_MODEL = "@cf/meta/llama-3.2-3b-instruct"
|
31 |
|
32 |
-
WORKING_DIR = "../dickens"
|
33 |
-
|
34 |
|
35 |
|
36 |
class CloudflareWorker:
|
@@ -319,6 +322,29 @@ async def main():
|
|
319 |
else:
|
320 |
print(resp)
|
321 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
322 |
except Exception as e:
|
323 |
print(f"An error occurred: {e}")
|
324 |
finally:
|
|
|
16 |
|
17 |
"""This code is a modified version of lightrag_openai_demo.py"""
|
18 |
|
19 |
+
# ideally, as always, env!
|
20 |
load_dotenv(dotenv_path=".env", override=False)
|
21 |
|
|
|
22 |
|
23 |
+
""" ----========= IMPORTANT CHANGE THIS! =========---- """
|
24 |
+
cloudflare_api_key = 'YOUR_API_KEY'
|
25 |
+
account_id = 'YOUR_ACCOUNT ID' #This is unique to your Cloudflare account
|
26 |
+
|
27 |
+
# Authomatically changes
|
28 |
+
api_base_url = f"https://api.cloudflare.com/client/v4/accounts/{account_id}/ai/run/"
|
29 |
+
|
30 |
|
31 |
# choose an embedding model
|
32 |
EMBEDDING_MODEL = '@cf/baai/bge-m3'
|
33 |
# choose a generative model
|
34 |
LLM_MODEL = "@cf/meta/llama-3.2-3b-instruct"
|
35 |
|
36 |
+
WORKING_DIR = "../dickens" #you can change output as desired
|
|
|
37 |
|
38 |
|
39 |
class CloudflareWorker:
|
|
|
322 |
else:
|
323 |
print(resp)
|
324 |
|
325 |
+
|
326 |
+
|
327 |
+
""" FOR TESTING (if you want to test straight away, after building. Uncomment this part"""
|
328 |
+
|
329 |
+
"""
|
330 |
+
print("\n" + "=" * 60)
|
331 |
+
print("AI ASSISTANT READY!")
|
332 |
+
print("Ask questions about (your uploaded) regulations")
|
333 |
+
print("Type 'quit' to exit")
|
334 |
+
print("=" * 60)
|
335 |
+
|
336 |
+
while True:
|
337 |
+
question = input("\n🔥 Your question: ")
|
338 |
+
|
339 |
+
if question.lower() in ['quit', 'exit', 'bye']:
|
340 |
+
break
|
341 |
+
|
342 |
+
print("\nThinking...")
|
343 |
+
response = await rag.aquery(question, param=QueryParam(mode="hybrid"))
|
344 |
+
print(f"\nAnswer: {response}")
|
345 |
+
|
346 |
+
"""
|
347 |
+
|
348 |
except Exception as e:
|
349 |
print(f"An error occurred: {e}")
|
350 |
finally:
|