anderson-ufrj commited on
Commit
1f616b9
·
1 Parent(s): 8d9d872

test(cache): implement multi-level cache system tests

Browse files

- Test cache entry lifecycle and expiration
- Test memory cache with LRU eviction
- Test Redis cache operations
- Test persistent file-based cache
- Test cache waterfall and write-through
- Add cache decorator tests

Files changed (1) hide show
  1. tests/unit/test_cache_system.py +520 -0
tests/unit/test_cache_system.py ADDED
@@ -0,0 +1,520 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Unit tests for cache system."""
2
+ import pytest
3
+ import asyncio
4
+ import json
5
+ import time
6
+ from unittest.mock import MagicMock, patch, AsyncMock
7
+ from datetime import datetime, timedelta
8
+ import pickle
9
+
10
+ from src.infrastructure.cache_system import (
11
+ CacheSystem,
12
+ CacheLevel,
13
+ CacheEntry,
14
+ CacheStats,
15
+ MemoryCache,
16
+ RedisCache,
17
+ PersistentCache,
18
+ CacheKey,
19
+ CacheSerializer
20
+ )
21
+
22
+
23
+ class TestCacheEntry:
24
+ """Test cache entry data structure."""
25
+
26
+ def test_cache_entry_creation(self):
27
+ """Test creating cache entry."""
28
+ entry = CacheEntry(
29
+ key="test_key",
30
+ value={"data": "test"},
31
+ ttl=3600,
32
+ level=CacheLevel.L1_MEMORY
33
+ )
34
+
35
+ assert entry.key == "test_key"
36
+ assert entry.value == {"data": "test"}
37
+ assert entry.ttl == 3600
38
+ assert entry.level == CacheLevel.L1_MEMORY
39
+ assert entry.created_at is not None
40
+ assert entry.hits == 0
41
+
42
+ def test_cache_entry_expiration(self):
43
+ """Test cache entry expiration check."""
44
+ # Create entry with 1 second TTL
45
+ entry = CacheEntry(
46
+ key="expire_test",
47
+ value="data",
48
+ ttl=1
49
+ )
50
+
51
+ # Should not be expired immediately
52
+ assert entry.is_expired() is False
53
+
54
+ # Wait and check expiration
55
+ time.sleep(1.1)
56
+ assert entry.is_expired() is True
57
+
58
+ def test_cache_entry_hit_tracking(self):
59
+ """Test hit count tracking."""
60
+ entry = CacheEntry(key="hit_test", value="data")
61
+
62
+ # Track hits
63
+ entry.record_hit()
64
+ entry.record_hit()
65
+ entry.record_hit()
66
+
67
+ assert entry.hits == 3
68
+ assert entry.last_accessed is not None
69
+
70
+
71
+ class TestCacheSerializer:
72
+ """Test cache serialization."""
73
+
74
+ def test_json_serialization(self):
75
+ """Test JSON serialization."""
76
+ data = {"key": "value", "number": 42, "list": [1, 2, 3]}
77
+
78
+ serialized = CacheSerializer.serialize(data, format="json")
79
+ deserialized = CacheSerializer.deserialize(serialized, format="json")
80
+
81
+ assert isinstance(serialized, bytes)
82
+ assert deserialized == data
83
+
84
+ def test_pickle_serialization(self):
85
+ """Test pickle serialization for complex objects."""
86
+ # Complex object with datetime
87
+ data = {
88
+ "timestamp": datetime.now(),
89
+ "function": lambda x: x * 2,
90
+ "nested": {"deep": {"value": 123}}
91
+ }
92
+
93
+ serialized = CacheSerializer.serialize(data, format="pickle")
94
+ deserialized = CacheSerializer.deserialize(serialized, format="pickle")
95
+
96
+ assert isinstance(serialized, bytes)
97
+ assert deserialized["nested"]["deep"]["value"] == 123
98
+
99
+ def test_compression(self):
100
+ """Test data compression."""
101
+ # Large data that benefits from compression
102
+ large_data = {"key": "x" * 10000}
103
+
104
+ uncompressed = CacheSerializer.serialize(large_data, compress=False)
105
+ compressed = CacheSerializer.serialize(large_data, compress=True)
106
+
107
+ # Compressed should be smaller
108
+ assert len(compressed) < len(uncompressed)
109
+
110
+ # Should decompress correctly
111
+ decompressed = CacheSerializer.deserialize(compressed, compress=True)
112
+ assert decompressed == large_data
113
+
114
+
115
+ class TestMemoryCache:
116
+ """Test in-memory cache implementation."""
117
+
118
+ @pytest.fixture
119
+ def memory_cache(self):
120
+ """Create memory cache instance."""
121
+ return MemoryCache(max_size_mb=10)
122
+
123
+ @pytest.mark.asyncio
124
+ async def test_memory_cache_get_set(self, memory_cache):
125
+ """Test basic get/set operations."""
126
+ key = "test_key"
127
+ value = {"data": "test_value"}
128
+
129
+ # Set value
130
+ await memory_cache.set(key, value, ttl=60)
131
+
132
+ # Get value
133
+ retrieved = await memory_cache.get(key)
134
+ assert retrieved == value
135
+
136
+ @pytest.mark.asyncio
137
+ async def test_memory_cache_ttl(self, memory_cache):
138
+ """Test TTL expiration."""
139
+ key = "ttl_test"
140
+ value = "expires_soon"
141
+
142
+ # Set with 1 second TTL
143
+ await memory_cache.set(key, value, ttl=1)
144
+
145
+ # Should exist immediately
146
+ assert await memory_cache.get(key) == value
147
+
148
+ # Wait for expiration
149
+ await asyncio.sleep(1.1)
150
+
151
+ # Should be expired
152
+ assert await memory_cache.get(key) is None
153
+
154
+ @pytest.mark.asyncio
155
+ async def test_memory_cache_eviction(self, memory_cache):
156
+ """Test LRU eviction when cache is full."""
157
+ # Fill cache with data
158
+ memory_cache.max_size_mb = 0.001 # Very small for testing
159
+
160
+ # Add items until eviction happens
161
+ for i in range(100):
162
+ await memory_cache.set(f"key_{i}", f"value_{i}" * 1000)
163
+
164
+ # Early items should be evicted
165
+ assert await memory_cache.get("key_0") is None
166
+
167
+ # Recent items should still exist
168
+ assert await memory_cache.get("key_99") is not None
169
+
170
+ @pytest.mark.asyncio
171
+ async def test_memory_cache_clear(self, memory_cache):
172
+ """Test clearing cache."""
173
+ # Add multiple items
174
+ for i in range(10):
175
+ await memory_cache.set(f"key_{i}", f"value_{i}")
176
+
177
+ # Clear cache
178
+ await memory_cache.clear()
179
+
180
+ # All items should be gone
181
+ for i in range(10):
182
+ assert await memory_cache.get(f"key_{i}") is None
183
+
184
+ @pytest.mark.asyncio
185
+ async def test_memory_cache_stats(self, memory_cache):
186
+ """Test cache statistics."""
187
+ # Perform operations
188
+ await memory_cache.set("key1", "value1")
189
+ await memory_cache.get("key1") # Hit
190
+ await memory_cache.get("key1") # Hit
191
+ await memory_cache.get("missing") # Miss
192
+
193
+ stats = await memory_cache.get_stats()
194
+
195
+ assert stats["hits"] == 2
196
+ assert stats["misses"] == 1
197
+ assert stats["hit_rate"] == 2/3
198
+ assert stats["size"] == 1
199
+
200
+
201
+ class TestRedisCache:
202
+ """Test Redis cache implementation."""
203
+
204
+ @pytest.fixture
205
+ def redis_cache(self):
206
+ """Create Redis cache instance."""
207
+ with patch("src.infrastructure.cache_system.get_redis_client"):
208
+ return RedisCache()
209
+
210
+ @pytest.fixture
211
+ def mock_redis(self):
212
+ """Create mock Redis client."""
213
+ redis = AsyncMock()
214
+ return redis
215
+
216
+ @pytest.mark.asyncio
217
+ async def test_redis_cache_get_set(self, redis_cache, mock_redis):
218
+ """Test Redis get/set operations."""
219
+ redis_cache._redis = mock_redis
220
+
221
+ key = "redis_key"
222
+ value = {"data": "redis_value"}
223
+
224
+ # Mock Redis responses
225
+ mock_redis.setex.return_value = True
226
+ mock_redis.get.return_value = json.dumps(value).encode()
227
+
228
+ # Set value
229
+ await redis_cache.set(key, value, ttl=300)
230
+
231
+ # Verify setex called correctly
232
+ mock_redis.setex.assert_called_once()
233
+ args = mock_redis.setex.call_args[0]
234
+ assert args[0] == f"cache:{key}"
235
+ assert args[1] == 300
236
+
237
+ # Get value
238
+ retrieved = await redis_cache.get(key)
239
+ assert retrieved == value
240
+
241
+ @pytest.mark.asyncio
242
+ async def test_redis_cache_batch_operations(self, redis_cache, mock_redis):
243
+ """Test batch get/set operations."""
244
+ redis_cache._redis = mock_redis
245
+
246
+ # Batch set
247
+ items = {
248
+ "key1": "value1",
249
+ "key2": "value2",
250
+ "key3": "value3"
251
+ }
252
+
253
+ await redis_cache.set_many(items, ttl=300)
254
+
255
+ # Verify pipeline used
256
+ assert mock_redis.pipeline.called
257
+
258
+ # Batch get
259
+ mock_redis.mget.return_value = [
260
+ json.dumps("value1").encode(),
261
+ None,
262
+ json.dumps("value3").encode()
263
+ ]
264
+
265
+ results = await redis_cache.get_many(["key1", "key2", "key3"])
266
+
267
+ assert results["key1"] == "value1"
268
+ assert results["key2"] is None
269
+ assert results["key3"] == "value3"
270
+
271
+ @pytest.mark.asyncio
272
+ async def test_redis_cache_delete(self, redis_cache, mock_redis):
273
+ """Test cache deletion."""
274
+ redis_cache._redis = mock_redis
275
+
276
+ # Delete single key
277
+ await redis_cache.delete("key1")
278
+ mock_redis.delete.assert_called_with("cache:key1")
279
+
280
+ # Delete pattern
281
+ mock_redis.keys.return_value = [b"cache:pattern:1", b"cache:pattern:2"]
282
+ await redis_cache.delete_pattern("pattern:*")
283
+
284
+ assert mock_redis.keys.called
285
+ assert mock_redis.delete.call_count == 3 # 1 single + 2 pattern
286
+
287
+ @pytest.mark.asyncio
288
+ async def test_redis_connection_failure(self, redis_cache, mock_redis):
289
+ """Test handling Redis connection failures."""
290
+ redis_cache._redis = mock_redis
291
+
292
+ # Simulate connection error
293
+ mock_redis.get.side_effect = Exception("Redis connection failed")
294
+
295
+ # Should return None on error (fail open)
296
+ result = await redis_cache.get("key")
297
+ assert result is None
298
+
299
+ # Should log error (verify in real implementation)
300
+
301
+
302
+ class TestPersistentCache:
303
+ """Test persistent file-based cache."""
304
+
305
+ @pytest.fixture
306
+ def persistent_cache(self, tmp_path):
307
+ """Create persistent cache instance."""
308
+ return PersistentCache(cache_dir=str(tmp_path))
309
+
310
+ @pytest.mark.asyncio
311
+ async def test_persistent_cache_get_set(self, persistent_cache):
312
+ """Test file-based cache operations."""
313
+ key = "persistent_key"
314
+ value = {"persistent": "data", "number": 42}
315
+
316
+ # Set value
317
+ await persistent_cache.set(key, value, ttl=3600)
318
+
319
+ # Get value
320
+ retrieved = await persistent_cache.get(key)
321
+ assert retrieved == value
322
+
323
+ # Verify file exists
324
+ cache_file = persistent_cache._get_cache_path(key)
325
+ assert cache_file.exists()
326
+
327
+ @pytest.mark.asyncio
328
+ async def test_persistent_cache_expiration(self, persistent_cache):
329
+ """Test persistent cache expiration."""
330
+ key = "expire_key"
331
+ value = "expires"
332
+
333
+ # Set with past expiration
334
+ await persistent_cache.set(key, value, ttl=-1)
335
+
336
+ # Should return None for expired
337
+ result = await persistent_cache.get(key)
338
+ assert result is None
339
+
340
+ @pytest.mark.asyncio
341
+ async def test_persistent_cache_cleanup(self, persistent_cache):
342
+ """Test cleanup of expired entries."""
343
+ # Create multiple entries
344
+ for i in range(5):
345
+ await persistent_cache.set(f"key_{i}", f"value_{i}", ttl=1)
346
+
347
+ # Wait for expiration
348
+ await asyncio.sleep(1.1)
349
+
350
+ # Run cleanup
351
+ await persistent_cache.cleanup()
352
+
353
+ # All files should be removed
354
+ cache_files = list(persistent_cache.cache_dir.glob("*.cache"))
355
+ assert len(cache_files) == 0
356
+
357
+
358
+ class TestCacheSystem:
359
+ """Test multi-level cache system."""
360
+
361
+ @pytest.fixture
362
+ def cache_system(self, tmp_path):
363
+ """Create cache system instance."""
364
+ with patch("src.infrastructure.cache_system.get_redis_client"):
365
+ return CacheSystem(
366
+ enable_l1=True,
367
+ enable_l2=True,
368
+ enable_l3=True,
369
+ l3_cache_dir=str(tmp_path)
370
+ )
371
+
372
+ @pytest.mark.asyncio
373
+ async def test_cache_system_waterfall(self, cache_system):
374
+ """Test cache waterfall (L1 -> L2 -> L3)."""
375
+ key = "waterfall_key"
376
+ value = {"test": "waterfall"}
377
+
378
+ # Set in L3 only
379
+ await cache_system.l3_cache.set(key, value)
380
+
381
+ # Get should promote to L2 and L1
382
+ result = await cache_system.get(key)
383
+ assert result == value
384
+
385
+ # Verify promoted to higher levels
386
+ assert await cache_system.l1_cache.get(key) == value
387
+ assert await cache_system.l2_cache.get(key) == value
388
+
389
+ @pytest.mark.asyncio
390
+ async def test_cache_system_write_through(self, cache_system):
391
+ """Test write-through to all cache levels."""
392
+ key = "write_through_key"
393
+ value = {"write": "through"}
394
+
395
+ # Set value
396
+ await cache_system.set(key, value, ttl=300)
397
+
398
+ # Should be in all levels
399
+ assert await cache_system.l1_cache.get(key) == value
400
+ assert await cache_system.l2_cache.get(key) == value
401
+ assert await cache_system.l3_cache.get(key) == value
402
+
403
+ @pytest.mark.asyncio
404
+ async def test_cache_system_invalidation(self, cache_system):
405
+ """Test cache invalidation across levels."""
406
+ key = "invalidate_key"
407
+ value = {"invalidate": "me"}
408
+
409
+ # Set in all levels
410
+ await cache_system.set(key, value)
411
+
412
+ # Invalidate
413
+ await cache_system.invalidate(key)
414
+
415
+ # Should be removed from all levels
416
+ assert await cache_system.l1_cache.get(key) is None
417
+ assert await cache_system.l2_cache.get(key) is None
418
+ assert await cache_system.l3_cache.get(key) is None
419
+
420
+ @pytest.mark.asyncio
421
+ async def test_cache_system_stats_aggregation(self, cache_system):
422
+ """Test aggregated statistics."""
423
+ # Perform operations
424
+ await cache_system.set("key1", "value1")
425
+ await cache_system.get("key1") # L1 hit
426
+ await cache_system.get("key2") # Miss all levels
427
+
428
+ stats = await cache_system.get_stats()
429
+
430
+ assert "l1" in stats
431
+ assert "l2" in stats
432
+ assert "l3" in stats
433
+ assert "total_hits" in stats
434
+ assert "total_misses" in stats
435
+
436
+ @pytest.mark.asyncio
437
+ async def test_cache_warming(self, cache_system):
438
+ """Test cache warming functionality."""
439
+ # Define warm-up data
440
+ warm_data = {
441
+ "config:app": {"name": "cidadao.ai", "version": "1.0"},
442
+ "config:features": {"ml_enabled": True, "cache_ttl": 300}
443
+ }
444
+
445
+ # Warm cache
446
+ await cache_system.warm_cache(warm_data)
447
+
448
+ # All data should be in L1
449
+ for key, value in warm_data.items():
450
+ assert await cache_system.l1_cache.get(key) == value
451
+
452
+ @pytest.mark.asyncio
453
+ async def test_cache_key_generation(self):
454
+ """Test cache key generation."""
455
+ # Simple key
456
+ simple_key = CacheKey.generate("user", "123")
457
+ assert simple_key == "user:123"
458
+
459
+ # Complex key with params
460
+ complex_key = CacheKey.generate(
461
+ "investigation",
462
+ "abc-123",
463
+ params={"year": 2024, "entity": "26000"}
464
+ )
465
+ assert "investigation:abc-123" in complex_key
466
+ assert "2024" in complex_key
467
+ assert "26000" in complex_key
468
+
469
+ # Hash-based key for long inputs
470
+ long_data = {"data": "x" * 1000}
471
+ hash_key = CacheKey.generate_hash("long", long_data)
472
+ assert len(hash_key) < 100 # Reasonable length
473
+
474
+
475
+ class TestCacheDecorator:
476
+ """Test cache decorator functionality."""
477
+
478
+ @pytest.mark.asyncio
479
+ async def test_cache_decorator_basic(self, cache_system):
480
+ """Test basic cache decorator."""
481
+ call_count = 0
482
+
483
+ @cache_system.cache(ttl=60)
484
+ async def expensive_function(x, y):
485
+ nonlocal call_count
486
+ call_count += 1
487
+ await asyncio.sleep(0.1) # Simulate expensive operation
488
+ return x + y
489
+
490
+ # First call - should execute function
491
+ result1 = await expensive_function(2, 3)
492
+ assert result1 == 5
493
+ assert call_count == 1
494
+
495
+ # Second call - should use cache
496
+ result2 = await expensive_function(2, 3)
497
+ assert result2 == 5
498
+ assert call_count == 1 # No additional call
499
+
500
+ # Different arguments - should execute again
501
+ result3 = await expensive_function(3, 4)
502
+ assert result3 == 7
503
+ assert call_count == 2
504
+
505
+ @pytest.mark.asyncio
506
+ async def test_cache_decorator_key_func(self, cache_system):
507
+ """Test cache decorator with custom key function."""
508
+ @cache_system.cache(
509
+ ttl=60,
510
+ key_func=lambda user_id, _: f"user:{user_id}"
511
+ )
512
+ async def get_user_data(user_id, include_details=False):
513
+ return {"id": user_id, "details": include_details}
514
+
515
+ # Both calls should use same cache key
516
+ result1 = await get_user_data("123", include_details=True)
517
+ result2 = await get_user_data("123", include_details=False)
518
+
519
+ # Should return cached result (ignoring include_details)
520
+ assert result1 == result2