Konrad Wojciechowski
commited on
Commit
·
5d5f55d
1
Parent(s):
7d43772
fix AttributeError: 'NoneType' object has no attribute 'dim'
Browse files- lightrag/llm/hf.py +6 -3
lightrag/llm/hf.py
CHANGED
@@ -139,11 +139,14 @@ async def hf_model_complete(
|
|
139 |
|
140 |
async def hf_embed(texts: list[str], tokenizer, embed_model) -> np.ndarray:
|
141 |
device = next(embed_model.parameters()).device
|
142 |
-
|
143 |
texts, return_tensors="pt", padding=True, truncation=True
|
144 |
-
).
|
145 |
with torch.no_grad():
|
146 |
-
outputs = embed_model(
|
|
|
|
|
|
|
147 |
embeddings = outputs.last_hidden_state.mean(dim=1)
|
148 |
if embeddings.dtype == torch.bfloat16:
|
149 |
return embeddings.detach().to(torch.float32).cpu().numpy()
|
|
|
139 |
|
140 |
async def hf_embed(texts: list[str], tokenizer, embed_model) -> np.ndarray:
|
141 |
device = next(embed_model.parameters()).device
|
142 |
+
encoded_texts = tokenizer(
|
143 |
texts, return_tensors="pt", padding=True, truncation=True
|
144 |
+
).to(device)
|
145 |
with torch.no_grad():
|
146 |
+
outputs = embed_model(
|
147 |
+
input_ids=encoded_texts["input_ids"],
|
148 |
+
attention_mask=encoded_texts["attention_mask"],
|
149 |
+
)
|
150 |
embeddings = outputs.last_hidden_state.mean(dim=1)
|
151 |
if embeddings.dtype == torch.bfloat16:
|
152 |
return embeddings.detach().to(torch.float32).cpu().numpy()
|