yangdx commited on
Commit
a08239c
·
1 Parent(s): 6a34a59

Add tokenizer to global_config

Browse files
Files changed (1) hide show
  1. lightrag/lightrag.py +5 -5
lightrag/lightrag.py CHANGED
@@ -322,11 +322,6 @@ class LightRAG:
322
  **self.vector_db_storage_cls_kwargs,
323
  }
324
 
325
- # Show config
326
- global_config = asdict(self)
327
- _print_config = ",\n ".join([f"{k} = {v}" for k, v in global_config.items()])
328
- logger.debug(f"LightRAG init with param:\n {_print_config}\n")
329
-
330
  # Init Tokenizer
331
  # Post-initialization hook to handle backward compatabile tokenizer initialization based on provided parameters
332
  if self.tokenizer is None:
@@ -335,6 +330,11 @@ class LightRAG:
335
  else:
336
  self.tokenizer = TiktokenTokenizer()
337
 
 
 
 
 
 
338
  # Init Embedding
339
  self.embedding_func = limit_async_func_call(self.embedding_func_max_async)( # type: ignore
340
  self.embedding_func
 
322
  **self.vector_db_storage_cls_kwargs,
323
  }
324
 
 
 
 
 
 
325
  # Init Tokenizer
326
  # Post-initialization hook to handle backward compatabile tokenizer initialization based on provided parameters
327
  if self.tokenizer is None:
 
330
  else:
331
  self.tokenizer = TiktokenTokenizer()
332
 
333
+ # Fix global_config now
334
+ global_config = asdict(self)
335
+ _print_config = ",\n ".join([f"{k} = {v}" for k, v in global_config.items()])
336
+ logger.debug(f"LightRAG init with param:\n {_print_config}\n")
337
+
338
  # Init Embedding
339
  self.embedding_func = limit_async_func_call(self.embedding_func_max_async)( # type: ignore
340
  self.embedding_func