From 0c4de4776e1132909336684d807ca424ea9a2479 Mon Sep 17 00:00:00 2001 From: David Bejar Caceres Date: Fri, 16 Dec 2022 16:14:15 +0100 Subject: [PATCH] Clear model from memory before switch, fixes out of memory --- lama_cleaner/model_manager.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lama_cleaner/model_manager.py b/lama_cleaner/model_manager.py index e14736e..549b307 100644 --- a/lama_cleaner/model_manager.py +++ b/lama_cleaner/model_manager.py @@ -1,4 +1,5 @@ import torch +import gc from lama_cleaner.model.fcf import FcF from lama_cleaner.model.lama import LaMa @@ -42,6 +43,12 @@ class ModelManager: if new_name == self.name: return try: + if (torch.cuda.memory_allocated() > 0): + # Clear current loaded model from memory + torch.cuda.empty_cache() + gc.collect() + del self.model + self.model = self.init_model(new_name, self.device, **self.kwargs) self.name = new_name except NotImplementedError as e: