IOPaint/iopaint/runtime.py

88 lines
2.4 KiB
Python
Raw Normal View History

2023-01-20 09:52:38 +01:00
# https://github.com/huggingface/huggingface_hub/blob/5a12851f54bf614be39614034ed3a9031922d297/src/huggingface_hub/utils/_runtime.py
2023-12-24 08:32:27 +01:00
import os
2023-01-20 09:52:38 +01:00
import platform
import sys
2023-12-24 08:32:27 +01:00
from pathlib import Path
2023-01-20 09:52:38 +01:00
import packaging.version
2023-12-24 08:32:27 +01:00
from loguru import logger
2023-01-20 09:52:38 +01:00
from rich import print
from typing import Dict, Any
2024-01-05 08:19:23 +01:00
from iopaint.const import Device
2023-12-24 08:32:27 +01:00
2023-01-20 09:52:38 +01:00
_PY_VERSION: str = sys.version.split()[0].rstrip("+")
if packaging.version.Version(_PY_VERSION) < packaging.version.Version("3.8.0"):
import importlib_metadata # type: ignore
else:
import importlib.metadata as importlib_metadata # type: ignore
_package_versions = {}
_CANDIDATES = [
"torch",
2023-04-16 03:59:35 +02:00
"torchvision",
2023-01-20 09:52:38 +01:00
"Pillow",
"diffusers",
"transformers",
"opencv-python",
"accelerate",
2024-01-05 08:19:23 +01:00
"iopaint",
2023-04-16 03:59:35 +02:00
"rembg",
"realesrgan",
"gfpgan",
2023-01-20 09:52:38 +01:00
]
# Check once at runtime
for name in _CANDIDATES:
_package_versions[name] = "N/A"
try:
_package_versions[name] = importlib_metadata.version(name)
except importlib_metadata.PackageNotFoundError:
pass
def dump_environment_info() -> Dict[str, str]:
2023-12-24 08:32:27 +01:00
"""Dump information about the machine to help debugging issues."""
2023-01-20 09:52:38 +01:00
# Generic machine info
info: Dict[str, Any] = {
"Platform": platform.platform(),
"Python version": platform.python_version(),
}
info.update(_package_versions)
print("\n".join([f"- {prop}: {val}" for prop, val in info.items()]) + "\n")
return info
2023-12-24 08:32:27 +01:00
def check_device(device: Device) -> Device:
if device == Device.cuda:
import platform
if platform.system() == "Darwin":
logger.warning("MacOS does not support cuda, use cpu instead")
return Device.cpu
else:
import torch
if not torch.cuda.is_available():
logger.warning("CUDA is not available, use cpu instead")
return Device.cpu
elif device == Device.mps:
import torch
if not torch.backends.mps.is_available():
logger.warning("mps is not available, use cpu instead")
return Device.cpu
return device
def setup_model_dir(model_dir: Path):
model_dir = model_dir.expanduser().absolute()
os.environ["U2NET_HOME"] = str(model_dir)
os.environ["XDG_CACHE_HOME"] = str(model_dir)
if not model_dir.exists():
logger.info(f"Create model directory: {model_dir}")
model_dir.mkdir(exist_ok=True, parents=True)
2024-01-05 08:38:34 +01:00
return model_dir