Fix linting
Browse files- examples/lightrag_ollama_demo.py +2 -3
- lightrag/llm/ollama.py +17 -9
examples/lightrag_ollama_demo.py
CHANGED
@@ -26,9 +26,7 @@ def configure_logging():
|
|
26 |
|
27 |
# Get log directory path from environment variable or use current directory
|
28 |
log_dir = os.getenv("LOG_DIR", os.getcwd())
|
29 |
-
log_file_path = os.path.abspath(
|
30 |
-
os.path.join(log_dir, "lightrag_ollama_demo.log")
|
31 |
-
)
|
32 |
|
33 |
print(f"\nLightRAG compatible demo log file: {log_file_path}\n")
|
34 |
os.makedirs(os.path.dirname(log_file_path), exist_ok=True)
|
@@ -211,6 +209,7 @@ async def main():
|
|
211 |
await rag.llm_response_cache.index_done_callback()
|
212 |
await rag.finalize_storages()
|
213 |
|
|
|
214 |
if __name__ == "__main__":
|
215 |
# Configure logging before running the main function
|
216 |
configure_logging()
|
|
|
26 |
|
27 |
# Get log directory path from environment variable or use current directory
|
28 |
log_dir = os.getenv("LOG_DIR", os.getcwd())
|
29 |
+
log_file_path = os.path.abspath(os.path.join(log_dir, "lightrag_ollama_demo.log"))
|
|
|
|
|
30 |
|
31 |
print(f"\nLightRAG compatible demo log file: {log_file_path}\n")
|
32 |
os.makedirs(os.path.dirname(log_file_path), exist_ok=True)
|
|
|
209 |
await rag.llm_response_cache.index_done_callback()
|
210 |
await rag.finalize_storages()
|
211 |
|
212 |
+
|
213 |
if __name__ == "__main__":
|
214 |
# Configure logging before running the main function
|
215 |
configure_logging()
|
lightrag/llm/ollama.py
CHANGED
@@ -62,9 +62,9 @@ async def _ollama_model_if_cache(
|
|
62 |
}
|
63 |
if api_key:
|
64 |
headers["Authorization"] = f"Bearer {api_key}"
|
65 |
-
|
66 |
ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers)
|
67 |
-
|
68 |
try:
|
69 |
messages = []
|
70 |
if system_prompt:
|
@@ -106,15 +106,21 @@ async def _ollama_model_if_cache(
|
|
106 |
await ollama_client._client.aclose()
|
107 |
logger.debug("Successfully closed Ollama client after exception")
|
108 |
except Exception as close_error:
|
109 |
-
logger.warning(
|
|
|
|
|
110 |
raise e
|
111 |
finally:
|
112 |
if not stream:
|
113 |
try:
|
114 |
await ollama_client._client.aclose()
|
115 |
-
logger.debug(
|
|
|
|
|
116 |
except Exception as close_error:
|
117 |
-
logger.warning(
|
|
|
|
|
118 |
|
119 |
|
120 |
async def ollama_model_complete(
|
@@ -141,12 +147,12 @@ async def ollama_embed(texts: list[str], embed_model, **kwargs) -> np.ndarray:
|
|
141 |
}
|
142 |
if api_key:
|
143 |
headers["Authorization"] = f"Bearer {api_key}"
|
144 |
-
|
145 |
host = kwargs.pop("host", None)
|
146 |
timeout = kwargs.pop("timeout", None) or 90 # Default time out 90s
|
147 |
-
|
148 |
ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers)
|
149 |
-
|
150 |
try:
|
151 |
data = await ollama_client.embed(model=embed_model, input=texts)
|
152 |
return np.array(data["embeddings"])
|
@@ -156,7 +162,9 @@ async def ollama_embed(texts: list[str], embed_model, **kwargs) -> np.ndarray:
|
|
156 |
await ollama_client._client.aclose()
|
157 |
logger.debug("Successfully closed Ollama client after exception in embed")
|
158 |
except Exception as close_error:
|
159 |
-
logger.warning(
|
|
|
|
|
160 |
raise e
|
161 |
finally:
|
162 |
try:
|
|
|
62 |
}
|
63 |
if api_key:
|
64 |
headers["Authorization"] = f"Bearer {api_key}"
|
65 |
+
|
66 |
ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers)
|
67 |
+
|
68 |
try:
|
69 |
messages = []
|
70 |
if system_prompt:
|
|
|
106 |
await ollama_client._client.aclose()
|
107 |
logger.debug("Successfully closed Ollama client after exception")
|
108 |
except Exception as close_error:
|
109 |
+
logger.warning(
|
110 |
+
f"Failed to close Ollama client after exception: {close_error}"
|
111 |
+
)
|
112 |
raise e
|
113 |
finally:
|
114 |
if not stream:
|
115 |
try:
|
116 |
await ollama_client._client.aclose()
|
117 |
+
logger.debug(
|
118 |
+
"Successfully closed Ollama client for non-streaming response"
|
119 |
+
)
|
120 |
except Exception as close_error:
|
121 |
+
logger.warning(
|
122 |
+
f"Failed to close Ollama client in finally block: {close_error}"
|
123 |
+
)
|
124 |
|
125 |
|
126 |
async def ollama_model_complete(
|
|
|
147 |
}
|
148 |
if api_key:
|
149 |
headers["Authorization"] = f"Bearer {api_key}"
|
150 |
+
|
151 |
host = kwargs.pop("host", None)
|
152 |
timeout = kwargs.pop("timeout", None) or 90 # Default time out 90s
|
153 |
+
|
154 |
ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers)
|
155 |
+
|
156 |
try:
|
157 |
data = await ollama_client.embed(model=embed_model, input=texts)
|
158 |
return np.array(data["embeddings"])
|
|
|
162 |
await ollama_client._client.aclose()
|
163 |
logger.debug("Successfully closed Ollama client after exception in embed")
|
164 |
except Exception as close_error:
|
165 |
+
logger.warning(
|
166 |
+
f"Failed to close Ollama client after exception in embed: {close_error}"
|
167 |
+
)
|
168 |
raise e
|
169 |
finally:
|
170 |
try:
|