IOPaint/lama_cleaner/model_manager.py

56 lines
1.9 KiB
Python
Raw Normal View History

2022-11-14 11:19:50 +01:00
import torch
import gc
2022-11-14 11:19:50 +01:00
2022-09-02 04:37:30 +02:00
from lama_cleaner.model.fcf import FcF
2022-04-15 18:11:51 +02:00
from lama_cleaner.model.lama import LaMa
from lama_cleaner.model.ldm import LDM
2022-11-18 14:40:12 +01:00
from lama_cleaner.model.manga import Manga
2022-08-22 17:24:02 +02:00
from lama_cleaner.model.mat import MAT
2022-12-10 15:06:15 +01:00
from lama_cleaner.model.paint_by_example import PaintByExample
2022-12-04 06:41:48 +01:00
from lama_cleaner.model.sd import SD15, SD2
2022-07-14 10:49:03 +02:00
from lama_cleaner.model.zits import ZITS
2022-09-25 15:27:12 +02:00
from lama_cleaner.model.opencv2 import OpenCV2
2022-04-15 18:11:51 +02:00
from lama_cleaner.schema import Config
2022-12-04 06:41:48 +01:00
models = {"lama": LaMa, "ldm": LDM, "zits": ZITS, "mat": MAT, "fcf": FcF, "sd1.5": SD15, "cv2": OpenCV2, "manga": Manga,
2022-12-10 15:06:15 +01:00
"sd2": SD2, "paint_by_example": PaintByExample}
2022-04-15 18:11:51 +02:00
2022-07-14 10:49:03 +02:00
class ModelManager:
2022-11-14 11:19:50 +01:00
def __init__(self, name: str, device: torch.device, **kwargs):
2022-04-15 18:11:51 +02:00
self.name = name
self.device = device
2022-09-15 16:21:27 +02:00
self.kwargs = kwargs
self.model = self.init_model(name, device, **kwargs)
2022-04-15 18:11:51 +02:00
2022-09-15 16:21:27 +02:00
def init_model(self, name: str, device, **kwargs):
2022-07-14 10:49:03 +02:00
if name in models:
2022-09-15 16:21:27 +02:00
model = models[name](device, **kwargs)
2022-04-15 18:11:51 +02:00
else:
raise NotImplementedError(f"Not supported model: {name}")
return model
2022-04-17 17:31:12 +02:00
def is_downloaded(self, name: str) -> bool:
2022-07-14 10:49:03 +02:00
if name in models:
return models[name].is_downloaded()
2022-04-17 17:31:12 +02:00
else:
raise NotImplementedError(f"Not supported model: {name}")
2022-04-15 18:11:51 +02:00
def __call__(self, image, mask, config: Config):
return self.model(image, mask, config)
def switch(self, new_name: str):
if new_name == self.name:
return
try:
if (torch.cuda.memory_allocated() > 0):
# Clear current loaded model from memory
torch.cuda.empty_cache()
del self.model
gc.collect()
2022-09-15 16:21:27 +02:00
self.model = self.init_model(new_name, self.device, **self.kwargs)
2022-04-15 18:11:51 +02:00
self.name = new_name
except NotImplementedError as e:
raise e