Merge pull request #165 from davidbejarcaceres/Switch-model-unload-before-load-new-model

Clear model from memory before switch, fixes out of memory
This commit is contained in:
Qing 2022-12-17 08:41:49 +08:00 committed by GitHub
commit 6cfc7c30f1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -1,4 +1,5 @@
import torch import torch
import gc
from lama_cleaner.model.fcf import FcF from lama_cleaner.model.fcf import FcF
from lama_cleaner.model.lama import LaMa from lama_cleaner.model.lama import LaMa
@ -42,6 +43,12 @@ class ModelManager:
if new_name == self.name: if new_name == self.name:
return return
try: try:
if (torch.cuda.memory_allocated() > 0):
# Clear current loaded model from memory
torch.cuda.empty_cache()
del self.model
gc.collect()
self.model = self.init_model(new_name, self.device, **self.kwargs) self.model = self.init_model(new_name, self.device, **self.kwargs)
self.name = new_name self.name = new_name
except NotImplementedError as e: except NotImplementedError as e: