Update llm.py
Browse files- lightrag/llm.py +6 -6
lightrag/llm.py
CHANGED
@@ -354,7 +354,7 @@ async def ollama_model_if_cache(
|
|
354 |
"Content-Type": "application/json",
|
355 |
"Authorization": f"Bearer {api_key}"
|
356 |
} if api_key else {"Content-Type": "application/json"}
|
357 |
-
ollama_client = ollama.AsyncClient(host=host, timeout=timeout
|
358 |
messages = []
|
359 |
if system_prompt:
|
360 |
messages.append({"role": "system", "content": system_prompt})
|
@@ -418,7 +418,7 @@ async def lollms_model_if_cache(
|
|
418 |
request_data["prompt"] = full_prompt
|
419 |
timeout = aiohttp.ClientTimeout(total=kwargs.get("timeout", None))
|
420 |
|
421 |
-
async with aiohttp.ClientSession(timeout=timeout
|
422 |
if stream:
|
423 |
|
424 |
async def inner():
|
@@ -1160,9 +1160,9 @@ async def ollama_embedding(texts: list[str], embed_model, **kwargs) -> np.ndarra
|
|
1160 |
async def ollama_embed(texts: list[str], embed_model, **kwargs) -> np.ndarray:
|
1161 |
api_key = kwargs.pop("api_key", None)
|
1162 |
headers = (
|
1163 |
-
{"
|
1164 |
if api_key
|
1165 |
-
else
|
1166 |
)
|
1167 |
kwargs["headers"] = headers
|
1168 |
ollama_client = ollama.Client(**kwargs)
|
@@ -1187,9 +1187,9 @@ async def lollms_embed(
|
|
1187 |
"""
|
1188 |
api_key = kwargs.pop("api_key", None)
|
1189 |
headers = (
|
1190 |
-
{"
|
1191 |
if api_key
|
1192 |
-
else
|
1193 |
)
|
1194 |
async with aiohttp.ClientSession(headers=headers) as session:
|
1195 |
embeddings = []
|
|
|
354 |
"Content-Type": "application/json",
|
355 |
"Authorization": f"Bearer {api_key}"
|
356 |
} if api_key else {"Content-Type": "application/json"}
|
357 |
+
ollama_client = ollama.AsyncClient(host=host, timeout=timeout, headers=headers)
|
358 |
messages = []
|
359 |
if system_prompt:
|
360 |
messages.append({"role": "system", "content": system_prompt})
|
|
|
418 |
request_data["prompt"] = full_prompt
|
419 |
timeout = aiohttp.ClientTimeout(total=kwargs.get("timeout", None))
|
420 |
|
421 |
+
async with aiohttp.ClientSession(timeout=timeout, headers=headers) as session:
|
422 |
if stream:
|
423 |
|
424 |
async def inner():
|
|
|
1160 |
async def ollama_embed(texts: list[str], embed_model, **kwargs) -> np.ndarray:
|
1161 |
api_key = kwargs.pop("api_key", None)
|
1162 |
headers = (
|
1163 |
+
{"Content-Type": "application/json", "Authorization": api_key}
|
1164 |
if api_key
|
1165 |
+
else {"Content-Type": "application/json"}
|
1166 |
)
|
1167 |
kwargs["headers"] = headers
|
1168 |
ollama_client = ollama.Client(**kwargs)
|
|
|
1187 |
"""
|
1188 |
api_key = kwargs.pop("api_key", None)
|
1189 |
headers = (
|
1190 |
+
{"Content-Type": "application/json", "Authorization": api_key}
|
1191 |
if api_key
|
1192 |
+
else {"Content-Type": "application/json"}
|
1193 |
)
|
1194 |
async with aiohttp.ClientSession(headers=headers) as session:
|
1195 |
embeddings = []
|