From a869982d13da5a648fbcd25bb818c5c4aececcac Mon Sep 17 00:00:00 2001 From: Qing Date: Wed, 10 Jan 2024 21:25:51 +0800 Subject: [PATCH] change controlnet depth preprocessor --- iopaint/model/controlnet.py | 3 ++- iopaint/model/helper/controlnet_preprocess.py | 8 +++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/iopaint/model/controlnet.py b/iopaint/model/controlnet.py index 3b02b47..dd361e9 100644 --- a/iopaint/model/controlnet.py +++ b/iopaint/model/controlnet.py @@ -93,8 +93,9 @@ class ControlNet(DiffusionInpaintModel): model_info.path, controlnet=controlnet, load_safety_checker=not disable_nsfw_checker, + torch_dtype=torch_dtype, **model_kwargs, - ).to(torch_dtype) + ) else: self.model = handle_from_pretrained_exceptions( PipeClass.from_pretrained, diff --git a/iopaint/model/helper/controlnet_preprocess.py b/iopaint/model/helper/controlnet_preprocess.py index 1ab1c80..8cdc8cc 100644 --- a/iopaint/model/helper/controlnet_preprocess.py +++ b/iopaint/model/helper/controlnet_preprocess.py @@ -23,11 +23,9 @@ def make_openpose_control_image(image: np.ndarray) -> Image: def make_depth_control_image(image: np.ndarray) -> Image: - from transformers import pipeline - - depth_estimator = pipeline("depth-estimation") - depth_image = depth_estimator(PIL.Image.fromarray(image))["depth"] - depth_image = np.array(depth_image) + from controlnet_aux import MidasDetector + midas = MidasDetector.from_pretrained("lllyasviel/Annotators") + depth_image = midas(image) depth_image = depth_image[:, :, None] depth_image = np.concatenate([depth_image, depth_image, depth_image], axis=2) control_image = PIL.Image.fromarray(depth_image)