gzdaniel commited on
Commit
ff68e59
·
1 Parent(s): f87247c

Enhance Redis connection handling with retries and timeouts

Browse files

- Added Redis connection timeout configurations
- Implemented retry logic for Redis operations
- Updated error handling for timeout cases
- Improved connection pool management
- Added environment variable support

Files changed (2) hide show
  1. env.example +4 -0
  2. lightrag/kg/redis_impl.py +31 -5
env.example CHANGED
@@ -216,6 +216,10 @@ QDRANT_URL=http://localhost:6333
216
 
217
  ### Redis
218
  REDIS_URI=redis://localhost:6379
 
 
 
 
219
  # REDIS_WORKSPACE=forced_workspace_name
220
 
221
  ### Memgraph Configuration
 
216
 
217
  ### Redis
218
  REDIS_URI=redis://localhost:6379
219
+ REDIS_SOCKET_TIMEOUT=30
220
+ REDIS_CONNECT_TIMEOUT=10
221
+ REDIS_MAX_CONNECTIONS=100
222
+ REDIS_RETRY_ATTEMPTS=3
223
  # REDIS_WORKSPACE=forced_workspace_name
224
 
225
  ### Memgraph Configuration
lightrag/kg/redis_impl.py CHANGED
@@ -11,7 +11,7 @@ if not pm.is_installed("redis"):
11
 
12
  # aioredis is a depricated library, replaced with redis
13
  from redis.asyncio import Redis, ConnectionPool # type: ignore
14
- from redis.exceptions import RedisError, ConnectionError # type: ignore
15
  from lightrag.utils import logger
16
 
17
  from lightrag.base import (
@@ -22,14 +22,35 @@ from lightrag.base import (
22
  )
23
  import json
24
 
 
 
 
 
 
 
 
 
25
 
26
  config = configparser.ConfigParser()
27
  config.read("config.ini", "utf-8")
28
 
29
- # Constants for Redis connection pool
30
- MAX_CONNECTIONS = 100
31
- SOCKET_TIMEOUT = 5.0
32
- SOCKET_CONNECT_TIMEOUT = 3.0
 
 
 
 
 
 
 
 
 
 
 
 
 
33
 
34
 
35
  class RedisConnectionManager:
@@ -220,6 +241,7 @@ class RedisKVStorage(BaseKVStorage):
220
  """Ensure Redis resources are cleaned up when exiting context."""
221
  await self.close()
222
 
 
223
  async def get_by_id(self, id: str) -> dict[str, Any] | None:
224
  async with self._get_redis_connection() as redis:
225
  try:
@@ -235,6 +257,7 @@ class RedisKVStorage(BaseKVStorage):
235
  logger.error(f"JSON decode error for id {id}: {e}")
236
  return None
237
 
 
238
  async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
239
  async with self._get_redis_connection() as redis:
240
  try:
@@ -311,6 +334,7 @@ class RedisKVStorage(BaseKVStorage):
311
  existing_ids = {keys_list[i] for i, exists in enumerate(results) if exists}
312
  return set(keys) - existing_ids
313
 
 
314
  async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
315
  if not data:
316
  return
@@ -790,6 +814,7 @@ class RedisDocStatusStorage(DocStatusStorage):
790
  """Redis handles persistence automatically"""
791
  pass
792
 
 
793
  async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
794
  """Insert or update document status data"""
795
  if not data:
@@ -811,6 +836,7 @@ class RedisDocStatusStorage(DocStatusStorage):
811
  logger.error(f"JSON decode error during upsert: {e}")
812
  raise
813
 
 
814
  async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
815
  async with self._get_redis_connection() as redis:
816
  try:
 
11
 
12
  # aioredis is a depricated library, replaced with redis
13
  from redis.asyncio import Redis, ConnectionPool # type: ignore
14
+ from redis.exceptions import RedisError, ConnectionError, TimeoutError # type: ignore
15
  from lightrag.utils import logger
16
 
17
  from lightrag.base import (
 
22
  )
23
  import json
24
 
25
+ # Import tenacity for retry logic
26
+ from tenacity import (
27
+ retry,
28
+ stop_after_attempt,
29
+ wait_exponential,
30
+ retry_if_exception_type,
31
+ before_sleep_log,
32
+ )
33
 
34
  config = configparser.ConfigParser()
35
  config.read("config.ini", "utf-8")
36
 
37
+ # Constants for Redis connection pool with environment variable support
38
+ MAX_CONNECTIONS = int(os.getenv("REDIS_MAX_CONNECTIONS", "200"))
39
+ SOCKET_TIMEOUT = float(os.getenv("REDIS_SOCKET_TIMEOUT", "30.0"))
40
+ SOCKET_CONNECT_TIMEOUT = float(os.getenv("REDIS_CONNECT_TIMEOUT", "10.0"))
41
+ RETRY_ATTEMPTS = int(os.getenv("REDIS_RETRY_ATTEMPTS", "3"))
42
+
43
+ # Tenacity retry decorator for Redis operations
44
+ redis_retry = retry(
45
+ stop=stop_after_attempt(RETRY_ATTEMPTS),
46
+ wait=wait_exponential(multiplier=1, min=1, max=8),
47
+ retry=(
48
+ retry_if_exception_type(ConnectionError)
49
+ | retry_if_exception_type(TimeoutError)
50
+ | retry_if_exception_type(RedisError)
51
+ ),
52
+ before_sleep=before_sleep_log(logger, "WARNING"),
53
+ )
54
 
55
 
56
  class RedisConnectionManager:
 
241
  """Ensure Redis resources are cleaned up when exiting context."""
242
  await self.close()
243
 
244
+ @redis_retry
245
  async def get_by_id(self, id: str) -> dict[str, Any] | None:
246
  async with self._get_redis_connection() as redis:
247
  try:
 
257
  logger.error(f"JSON decode error for id {id}: {e}")
258
  return None
259
 
260
+ @redis_retry
261
  async def get_by_ids(self, ids: list[str]) -> list[dict[str, Any]]:
262
  async with self._get_redis_connection() as redis:
263
  try:
 
334
  existing_ids = {keys_list[i] for i, exists in enumerate(results) if exists}
335
  return set(keys) - existing_ids
336
 
337
+ @redis_retry
338
  async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
339
  if not data:
340
  return
 
814
  """Redis handles persistence automatically"""
815
  pass
816
 
817
+ @redis_retry
818
  async def upsert(self, data: dict[str, dict[str, Any]]) -> None:
819
  """Insert or update document status data"""
820
  if not data:
 
836
  logger.error(f"JSON decode error during upsert: {e}")
837
  raise
838
 
839
+ @redis_retry
840
  async def get_by_id(self, id: str) -> Union[dict[str, Any], None]:
841
  async with self._get_redis_connection() as redis:
842
  try: