2023-03-22 05:57:18 +01:00
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
import cv2
|
2023-03-26 14:42:31 +02:00
|
|
|
import pytest
|
|
|
|
import torch.cuda
|
2023-03-22 05:57:18 +01:00
|
|
|
|
2023-03-26 14:42:31 +02:00
|
|
|
from lama_cleaner.plugins import RemoveBG, RealESRGANUpscaler, GFPGANPlugin
|
2023-03-22 05:57:18 +01:00
|
|
|
|
|
|
|
current_dir = Path(__file__).parent.absolute().resolve()
|
|
|
|
save_dir = current_dir / "result"
|
|
|
|
save_dir.mkdir(exist_ok=True, parents=True)
|
|
|
|
img_p = current_dir / "bunny.jpeg"
|
2023-03-26 14:42:31 +02:00
|
|
|
bgr_img = cv2.imread(str(img_p))
|
|
|
|
rgb_img = cv2.cvtColor(bgr_img, cv2.COLOR_BGR2RGB)
|
|
|
|
|
|
|
|
|
|
|
|
def _save(img, name):
|
|
|
|
cv2.imwrite(str(save_dir / name), img)
|
2023-03-22 05:57:18 +01:00
|
|
|
|
|
|
|
|
|
|
|
def test_remove_bg():
|
|
|
|
model = RemoveBG()
|
2023-03-26 14:42:31 +02:00
|
|
|
res = model.forward(bgr_img)
|
|
|
|
_save(res, "test_remove_bg.png")
|
|
|
|
|
|
|
|
|
2023-03-28 10:36:41 +02:00
|
|
|
@pytest.mark.parametrize("device", ["cuda", "cpu", "mps"])
|
2023-03-26 14:42:31 +02:00
|
|
|
def test_upscale(device):
|
|
|
|
if device == "cuda" and not torch.cuda.is_available():
|
|
|
|
return
|
2023-03-28 10:36:41 +02:00
|
|
|
if device == "mps" and not torch.backends.mps.is_available():
|
|
|
|
return
|
2023-03-26 14:42:31 +02:00
|
|
|
|
|
|
|
model = RealESRGANUpscaler("realesr-general-x4v3", device)
|
|
|
|
res = model.forward(bgr_img, 2)
|
2023-03-28 10:36:41 +02:00
|
|
|
_save(res, f"test_upscale_x2_{device}.png")
|
2023-03-22 05:57:18 +01:00
|
|
|
|
2023-03-26 14:42:31 +02:00
|
|
|
res = model.forward(bgr_img, 4)
|
2023-03-28 10:36:41 +02:00
|
|
|
_save(res, f"test_upscale_x4_{device}.png")
|
2023-03-22 05:57:18 +01:00
|
|
|
|
|
|
|
|
2023-03-28 10:36:41 +02:00
|
|
|
@pytest.mark.parametrize("device", ["cuda", "cpu", "mps"])
|
2023-03-26 14:42:31 +02:00
|
|
|
def test_gfpgan(device):
|
|
|
|
if device == "cuda" and not torch.cuda.is_available():
|
|
|
|
return
|
2023-03-28 10:36:41 +02:00
|
|
|
if device == "mps" and not torch.backends.mps.is_available():
|
|
|
|
return
|
2023-03-26 14:42:31 +02:00
|
|
|
model = GFPGANPlugin(device)
|
|
|
|
res = model(rgb_img, None, None)
|
2023-03-28 10:36:41 +02:00
|
|
|
_save(res, f"test_gfpgan_{device}.png")
|