2023-03-19 15:40:23 +01:00
|
|
|
import os
|
|
|
|
|
|
|
|
os.environ["PYTORCH_ENABLE_MPS_FALLBACK"] = "1"
|
2024-01-09 15:54:20 +01:00
|
|
|
# https://github.com/pytorch/pytorch/issues/27971#issuecomment-1768868068
|
|
|
|
os.environ["ONEDNN_PRIMITIVE_CACHE_CAPACITY"] = "1"
|
|
|
|
os.environ["LRU_CACHE_CAPACITY"] = "1"
|
|
|
|
# prevent CPU memory leak when run model on GPU
|
|
|
|
# https://github.com/pytorch/pytorch/issues/98688#issuecomment-1869288431
|
|
|
|
# https://github.com/pytorch/pytorch/issues/108334#issuecomment-1752763633
|
|
|
|
os.environ["TORCH_CUDNN_V8_API_LRU_CACHE_LIMIT"] = "1"
|
|
|
|
|
2023-03-19 15:40:23 +01:00
|
|
|
|
2022-10-24 12:29:33 +02:00
|
|
|
import warnings
|
2023-03-19 15:40:23 +01:00
|
|
|
|
2022-10-24 12:29:33 +02:00
|
|
|
warnings.simplefilter("ignore", UserWarning)
|
|
|
|
|
2023-03-19 15:40:23 +01:00
|
|
|
|
2022-04-18 09:01:10 +02:00
|
|
|
def entry_point():
|
2023-01-14 14:36:32 +01:00
|
|
|
# To make os.environ["XDG_CACHE_HOME"] = args.model_cache_dir works for diffusers
|
|
|
|
# https://github.com/huggingface/diffusers/blob/be99201a567c1ccd841dc16fb24e88f7f239c187/src/diffusers/utils/constants.py#L18
|
2024-01-05 08:19:23 +01:00
|
|
|
from iopaint.cli import typer_app
|
2023-03-19 15:40:23 +01:00
|
|
|
|
2023-12-24 08:32:27 +01:00
|
|
|
typer_app()
|