update user_scripts
This commit is contained in:
parent
6921a13a83
commit
af914e2086
@ -20,20 +20,24 @@ log = logging.getLogger("lama-cleaner")
|
||||
|
||||
def find_free_port() -> int:
|
||||
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
|
||||
s.bind(('', 0))
|
||||
s.bind(("", 0))
|
||||
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
return s.getsockname()[1]
|
||||
|
||||
|
||||
CONFIG_PATH = "config.json"
|
||||
|
||||
|
||||
class MODEL(str, Enum):
|
||||
SD15 = "sd1.5"
|
||||
LAMA = 'lama'
|
||||
LAMA = "lama"
|
||||
|
||||
|
||||
class DEVICE(str, Enum):
|
||||
CUDA = "cuda"
|
||||
CPU = "cpu"
|
||||
|
||||
|
||||
@task
|
||||
def info(c):
|
||||
print("Environment information".center(60, "-"))
|
||||
@ -44,21 +48,26 @@ def info(c):
|
||||
c.run("python --version")
|
||||
c.run("which pip")
|
||||
c.run("pip --version")
|
||||
c.run("pip list | grep lama")
|
||||
c.run('pip list | grep "torch\|lama\|diffusers\|opencv\|cuda"')
|
||||
except:
|
||||
pass
|
||||
print("-"*60)
|
||||
print("-" * 60)
|
||||
|
||||
|
||||
@task(pre=[info])
|
||||
def config(c, disable_device_choice=False):
|
||||
# TODO: 提示选择模型,选择设备,端口,host
|
||||
# 如果是 sd 模型,提示接受条款和输入 huggingface token
|
||||
model = Prompt.ask("Choice model", choices=[MODEL.SD15, MODEL.LAMA], default=MODEL.SD15)
|
||||
model = Prompt.ask(
|
||||
"Choice model", choices=[MODEL.SD15, MODEL.LAMA], default=MODEL.SD15
|
||||
)
|
||||
|
||||
hf_access_token = ""
|
||||
if model == MODEL.SD15:
|
||||
while True:
|
||||
hf_access_token = Prompt.ask("Huggingface access token (https://huggingface.co/docs/hub/security-tokens)")
|
||||
hf_access_token = Prompt.ask(
|
||||
"Huggingface access token (https://huggingface.co/docs/hub/security-tokens)"
|
||||
)
|
||||
if hf_access_token == "":
|
||||
log.warning("Access token is required to download model")
|
||||
else:
|
||||
@ -67,16 +76,28 @@ def config(c, disable_device_choice=False):
|
||||
if disable_device_choice:
|
||||
device = DEVICE.CPU
|
||||
else:
|
||||
device = Prompt.ask("Choice device", choices=[DEVICE.CUDA, DEVICE.CPU], default=DEVICE.CUDA)
|
||||
device = Prompt.ask(
|
||||
"Choice device", choices=[DEVICE.CUDA, DEVICE.CPU], default=DEVICE.CUDA
|
||||
)
|
||||
if device == DEVICE.CUDA:
|
||||
import torch
|
||||
|
||||
if not torch.cuda.is_available():
|
||||
log.warning("Did not find CUDA device on your computer, fallback to cpu")
|
||||
log.warning(
|
||||
"Did not find CUDA device on your computer, fallback to cpu"
|
||||
)
|
||||
device = DEVICE.CPU
|
||||
|
||||
configs = {"model": model, "device": device, "hf_access_token": hf_access_token}
|
||||
desktop = Confirm.ask("Start as desktop app?")
|
||||
|
||||
configs = {
|
||||
"model": model,
|
||||
"device": device,
|
||||
"hf_access_token": hf_access_token,
|
||||
"desktop": desktop,
|
||||
}
|
||||
log.info(f"Save config to {CONFIG_PATH}")
|
||||
with open(CONFIG_PATH, 'w', encoding='utf-8') as f:
|
||||
with open(CONFIG_PATH, "w", encoding="utf-8") as f:
|
||||
json.dump(configs, f, indent=2, ensure_ascii=False)
|
||||
log.info(f"Config finish, you can close this window.")
|
||||
|
||||
@ -88,13 +109,22 @@ def start(c):
|
||||
exit()
|
||||
|
||||
log.info(f"Load config from {CONFIG_PATH}")
|
||||
with open(CONFIG_PATH, 'r', encoding='utf-8') as f:
|
||||
with open(CONFIG_PATH, "r", encoding="utf-8") as f:
|
||||
configs = json.load(f)
|
||||
|
||||
model = configs['model']
|
||||
device = configs['device']
|
||||
hf_access_token = configs['hf_access_token']
|
||||
model = configs["model"]
|
||||
device = configs["device"]
|
||||
hf_access_token = configs["hf_access_token"]
|
||||
desktop = configs["desktop"]
|
||||
port = find_free_port()
|
||||
log.info(f"Using random port: {port}")
|
||||
|
||||
c.run(f"lama-cleaner --model {model} --device {device} --hf_access_token={hf_access_token} --port {port} --gui --gui-size 1400 900")
|
||||
if desktop:
|
||||
c.run(
|
||||
f"lama-cleaner --model {model} --device {device} --hf_access_token={hf_access_token} --port {port} --gui --gui-size 1400 900"
|
||||
)
|
||||
else:
|
||||
c.run(
|
||||
f"lama-cleaner --model {model} --device {device} --hf_access_token={hf_access_token} --port {port}"
|
||||
)
|
||||
|
||||
|
@ -6,7 +6,8 @@ set PATH=C:\Windows\System32;%PATH%
|
||||
|
||||
@call conda-unpack
|
||||
|
||||
@call pip3 install -U torch==1.12.1 --extra-index-url https://download.pytorch.org/whl/cu116
|
||||
@call conda install -y cudatoolkit=11.3
|
||||
@call pip3 install torch --extra-index-url https://download.pytorch.org/whl/cu113
|
||||
@call pip3 install -U lama-cleaner
|
||||
|
||||
@call invoke config
|
||||
|
Loading…
Reference in New Issue
Block a user