yangdx commited on
Commit
a3f614a
·
2 Parent(s): 90d5ed5 07c1c0f

Merge branch 'add-keyword-extraction-param-for-llm' into fix-mutable-default-param

Browse files
Files changed (1) hide show
  1. lightrag/api/lightrag_server.py +7 -0
lightrag/api/lightrag_server.py CHANGED
@@ -17,6 +17,7 @@ import argparse
17
  from typing import List, Any, Optional, Union, Dict
18
  from pydantic import BaseModel
19
  from lightrag import LightRAG, QueryParam
 
20
  from lightrag.api import __api_version__
21
  from lightrag.utils import EmbeddingFunc
22
  from enum import Enum
@@ -756,6 +757,9 @@ def create_app(args):
756
  keyword_extraction=False,
757
  **kwargs,
758
  ) -> str:
 
 
 
759
  if history_messages is None:
760
  history_messages = []
761
  return await openai_complete_if_cache(
@@ -775,6 +779,9 @@ def create_app(args):
775
  keyword_extraction=False,
776
  **kwargs,
777
  ) -> str:
 
 
 
778
  if history_messages is None:
779
  history_messages = []
780
  return await azure_openai_complete_if_cache(
 
17
  from typing import List, Any, Optional, Union, Dict
18
  from pydantic import BaseModel
19
  from lightrag import LightRAG, QueryParam
20
+ from lightrag.types import GPTKeywordExtractionFormat
21
  from lightrag.api import __api_version__
22
  from lightrag.utils import EmbeddingFunc
23
  from enum import Enum
 
757
  keyword_extraction=False,
758
  **kwargs,
759
  ) -> str:
760
+ keyword_extraction = kwargs.pop("keyword_extraction", None)
761
+ if keyword_extraction:
762
+ kwargs["response_format"] = GPTKeywordExtractionFormat
763
  if history_messages is None:
764
  history_messages = []
765
  return await openai_complete_if_cache(
 
779
  keyword_extraction=False,
780
  **kwargs,
781
  ) -> str:
782
+ keyword_extraction = kwargs.pop("keyword_extraction", None)
783
+ if keyword_extraction:
784
+ kwargs["response_format"] = GPTKeywordExtractionFormat
785
  if history_messages is None:
786
  history_messages = []
787
  return await azure_openai_complete_if_cache(