From 30e205a5f8f130ee52b730be90ea43d3c563a4cf Mon Sep 17 00:00:00 2001 From: Qing Date: Wed, 16 Nov 2022 17:59:39 +0800 Subject: [PATCH] only call keepGUIAlive when run as gui --- lama_cleaner/app/src/App.tsx | 15 ++++++++++++--- lama_cleaner/app/src/adapters/inpainting.ts | 6 ++++++ lama_cleaner/server.py | 6 ++++++ 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/lama_cleaner/app/src/App.tsx b/lama_cleaner/app/src/App.tsx index 1ac717b..963f54e 100644 --- a/lama_cleaner/app/src/App.tsx +++ b/lama_cleaner/app/src/App.tsx @@ -9,6 +9,7 @@ import { fileState, toastState } from './store/Atoms' import { keepGUIAlive } from './utils' import Header from './components/Header/Header' import useHotKey from './hooks/useHotkey' +import { isDesktop } from './adapters/inpainting' const SUPPORTED_FILE_TYPE = [ 'image/jpeg', @@ -18,9 +19,6 @@ const SUPPORTED_FILE_TYPE = [ 'image/tiff', ] -// Keeping GUI Window Open -keepGUIAlive() - function App() { const [file, setFile] = useRecoilState(fileState) const [theme, setTheme] = useRecoilState(themeState) @@ -34,6 +32,17 @@ function App() { setFile(userInputImage) }, [userInputImage, setFile]) + // Keeping GUI Window Open + useEffect(() => { + const fetchData = async () => { + const isRunDesktop = await isDesktop().then(res => res.text()) + if (isRunDesktop === 'True') { + keepGUIAlive() + } + } + fetchData() + }, []) + // Dark Mode Hotkey useHotKey( 'shift+d', diff --git a/lama_cleaner/app/src/adapters/inpainting.ts b/lama_cleaner/app/src/adapters/inpainting.ts index a8a81d8..61056be 100644 --- a/lama_cleaner/app/src/adapters/inpainting.ts +++ b/lama_cleaner/app/src/adapters/inpainting.ts @@ -102,6 +102,12 @@ export function currentModel() { }) } +export function isDesktop() { + return fetch(`${API_ENDPOINT}/is_desktop`, { + method: 'GET', + }) +} + export function modelDownloaded(name: string) { return fetch(`${API_ENDPOINT}/model_downloaded/${name}`, { method: 'GET', diff --git a/lama_cleaner/server.py b/lama_cleaner/server.py index 6211154..4ad0980 100644 --- a/lama_cleaner/server.py +++ b/lama_cleaner/server.py @@ -74,6 +74,7 @@ model: ModelManager = None device = None input_image_path: str = None is_disable_model_switch: bool = False +is_desktop: bool = False def get_image_ext(img_bytes): w = imghdr.what("", img_bytes) @@ -188,6 +189,9 @@ def get_is_disable_model_switch(): def model_downloaded(name): return str(model.is_downloaded(name)), 200 +@app.route("/is_desktop") +def get_is_desktop(): + return str(is_desktop), 200 @app.route("/model", methods=["POST"]) def switch_model(): @@ -227,10 +231,12 @@ def main(args): global device global input_image_path global is_disable_model_switch + global is_desktop device = torch.device(args.device) input_image_path = args.input is_disable_model_switch = args.disable_model_switch + is_desktop = args.gui if is_disable_model_switch: logger.info(f"Start with --disable-model-switch, model switch on frontend is disable")