gzdaniel commited on
Commit
96fe1c7
·
1 Parent(s): 3b12ee0

Set DEFAULT_HISTORY_TURNS to 0

Browse files
env.example CHANGED
@@ -50,7 +50,7 @@ OLLAMA_EMULATING_MODEL_TAG=latest
50
  ########################
51
  # LLM responde cache for query (Not valid for streaming response
52
  ENABLE_LLM_CACHE=true
53
- # HISTORY_TURNS=3
54
  # COSINE_THRESHOLD=0.2
55
  ### Number of entities or relations retrieved from KG
56
  # TOP_K=40
 
50
  ########################
51
  # LLM responde cache for query (Not valid for streaming response
52
  ENABLE_LLM_CACHE=true
53
+ # HISTORY_TURNS=0
54
  # COSINE_THRESHOLD=0.2
55
  ### Number of entities or relations retrieved from KG
56
  # TOP_K=40
lightrag/constants.py CHANGED
@@ -18,7 +18,7 @@ DEFAULT_CHUNK_TOP_K = 10
18
  DEFAULT_MAX_ENTITY_TOKENS = 10000
19
  DEFAULT_MAX_RELATION_TOKENS = 10000
20
  DEFAULT_MAX_TOTAL_TOKENS = 32000
21
- DEFAULT_HISTORY_TURNS = 3
22
  DEFAULT_ENABLE_RERANK = True
23
  DEFAULT_COSINE_THRESHOLD = 0.2
24
  DEFAULT_RELATED_CHUNK_NUMBER = 10
 
18
  DEFAULT_MAX_ENTITY_TOKENS = 10000
19
  DEFAULT_MAX_RELATION_TOKENS = 10000
20
  DEFAULT_MAX_TOTAL_TOKENS = 32000
21
+ DEFAULT_HISTORY_TURNS = 0
22
  DEFAULT_ENABLE_RERANK = True
23
  DEFAULT_COSINE_THRESHOLD = 0.2
24
  DEFAULT_RELATED_CHUNK_NUMBER = 10
lightrag_webui/src/stores/settings.ts CHANGED
@@ -271,7 +271,8 @@ const useSettingsStoreBase = create<SettingsState>()(
271
  max_entity_tokens: 10000,
272
  max_relation_tokens: 10000,
273
  max_total_tokens: 32000,
274
- enable_rerank: true
 
275
  }
276
  }
277
  return state
 
271
  max_entity_tokens: 10000,
272
  max_relation_tokens: 10000,
273
  max_total_tokens: 32000,
274
+ enable_rerank: true,
275
+ history_turns: 0,
276
  }
277
  }
278
  return state