Update models/loaders/matanyone_loader.py
Browse files
models/loaders/matanyone_loader.py
CHANGED
|
@@ -319,7 +319,16 @@ def reset(self):
|
|
| 319 |
logger.debug(f"Core clear_memory() failed: {e}")
|
| 320 |
|
| 321 |
|
|
|
|
|
|
|
| 322 |
class MatAnyoneLoader:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 323 |
def __init__(self, device: str = "auto", model_id: str = "PeiqingYang/MatAnyone"):
|
| 324 |
self.device = device
|
| 325 |
self.model_id = model_id
|
|
@@ -366,4 +375,49 @@ def get_info(self) -> Dict[str, Any]:
|
|
| 366 |
"model_id": self.model_id,
|
| 367 |
"loaded": self._wrapper is not None or self._processor is not None,
|
| 368 |
"wrapped": self._wrapper is not None,
|
| 369 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 319 |
logger.debug(f"Core clear_memory() failed: {e}")
|
| 320 |
|
| 321 |
|
| 322 |
+
# --------------------------- Main Loader Class ---------------------------
|
| 323 |
+
|
| 324 |
class MatAnyoneLoader:
|
| 325 |
+
"""
|
| 326 |
+
Loader for MatAnyone InferenceCore with cleanup support.
|
| 327 |
+
|
| 328 |
+
Provides a consistent interface with other model loaders,
|
| 329 |
+
including proper resource cleanup.
|
| 330 |
+
"""
|
| 331 |
+
|
| 332 |
def __init__(self, device: str = "auto", model_id: str = "PeiqingYang/MatAnyone"):
|
| 333 |
self.device = device
|
| 334 |
self.model_id = model_id
|
|
|
|
| 375 |
"model_id": self.model_id,
|
| 376 |
"loaded": self._wrapper is not None or self._processor is not None,
|
| 377 |
"wrapped": self._wrapper is not None,
|
| 378 |
+
}
|
| 379 |
+
|
| 380 |
+
def cleanup(self):
|
| 381 |
+
"""
|
| 382 |
+
Clean up all resources associated with MatAnyone.
|
| 383 |
+
|
| 384 |
+
This method ensures proper cleanup of:
|
| 385 |
+
- The wrapper's state and memory
|
| 386 |
+
- The InferenceCore processor
|
| 387 |
+
- Any CUDA tensors in memory
|
| 388 |
+
"""
|
| 389 |
+
logger.debug("Starting MatAnyone cleanup...")
|
| 390 |
+
|
| 391 |
+
# Clean up wrapper first
|
| 392 |
+
if self._wrapper:
|
| 393 |
+
try:
|
| 394 |
+
self._wrapper.reset()
|
| 395 |
+
logger.debug("MatAnyone wrapper reset completed")
|
| 396 |
+
except Exception as e:
|
| 397 |
+
logger.debug(f"Wrapper reset failed (non-critical): {e}")
|
| 398 |
+
self._wrapper = None
|
| 399 |
+
|
| 400 |
+
# Clean up processor
|
| 401 |
+
if self._processor:
|
| 402 |
+
try:
|
| 403 |
+
# Try various cleanup methods that might exist
|
| 404 |
+
if hasattr(self._processor, 'cleanup'):
|
| 405 |
+
self._processor.cleanup()
|
| 406 |
+
elif hasattr(self._processor, 'clear'):
|
| 407 |
+
self._processor.clear()
|
| 408 |
+
elif hasattr(self._processor, 'reset'):
|
| 409 |
+
self._processor.reset()
|
| 410 |
+
logger.debug("MatAnyone processor cleanup attempted")
|
| 411 |
+
except Exception as e:
|
| 412 |
+
logger.debug(f"Processor cleanup failed (non-critical): {e}")
|
| 413 |
+
self._processor = None
|
| 414 |
+
|
| 415 |
+
# Clear any CUDA cache if using GPU
|
| 416 |
+
if self.device != "cpu" and torch.cuda.is_available():
|
| 417 |
+
try:
|
| 418 |
+
torch.cuda.empty_cache()
|
| 419 |
+
logger.debug("CUDA cache cleared for MatAnyone")
|
| 420 |
+
except Exception as e:
|
| 421 |
+
logger.debug(f"CUDA cache clear failed: {e}")
|
| 422 |
+
|
| 423 |
+
logger.info("MatAnyone resources cleaned up")
|