yangdx commited on
Commit
e8f9a30
·
1 Parent(s): a6f5190

fix linting

Browse files
lightrag/api/lightrag_server.py CHANGED
@@ -725,10 +725,7 @@ def create_app(args):
725
  from lightrag.llm.ollama import ollama_model_complete, ollama_embed
726
  if args.llm_binding == "openai" or args.embedding_binding == "openai":
727
  from lightrag.llm.openai import openai_complete_if_cache, openai_embed
728
- if (
729
- args.llm_binding == "azure_openai"
730
- or args.embedding_binding == "azure_openai"
731
- ):
732
  from lightrag.llm.azure_openai import (
733
  azure_openai_complete_if_cache,
734
  azure_openai_embed,
 
725
  from lightrag.llm.ollama import ollama_model_complete, ollama_embed
726
  if args.llm_binding == "openai" or args.embedding_binding == "openai":
727
  from lightrag.llm.openai import openai_complete_if_cache, openai_embed
728
+ if args.llm_binding == "azure_openai" or args.embedding_binding == "azure_openai":
 
 
 
729
  from lightrag.llm.azure_openai import (
730
  azure_openai_complete_if_cache,
731
  azure_openai_embed,
lightrag/operate.py CHANGED
@@ -1543,7 +1543,9 @@ async def naive_query(
1543
 
1544
  sys_prompt_temp = PROMPTS["naive_rag_response"]
1545
  sys_prompt = sys_prompt_temp.format(
1546
- content_data=section, response_type=query_param.response_type, history=history_context
 
 
1547
  )
1548
 
1549
  if query_param.only_need_prompt:
 
1543
 
1544
  sys_prompt_temp = PROMPTS["naive_rag_response"]
1545
  sys_prompt = sys_prompt_temp.format(
1546
+ content_data=section,
1547
+ response_type=query_param.response_type,
1548
+ history=history_context,
1549
  )
1550
 
1551
  if query_param.only_need_prompt: