Added desktop application mode

Run lama-cleaner as a desktop application.
This commit is contained in:
blessedcoolant 2022-03-24 05:07:33 +13:00
parent 1207b6e291
commit 44e131f9ac
9 changed files with 58 additions and 12 deletions

View File

@ -35,6 +35,13 @@ python3 main.py --device=cuda --port=8080 --model=ldm --ldm-steps=50
Diffusion model is **MUCH MORE** slower than GANs(1080x720 image takes 8s on 3090), but it's possible to get better
results than LaMa.
### GUI
You can run lama-cleaner as a desktop application using the following command line arguments.
`--gui`: Launch lama-cleaner as a desktop application
`--gui_size`: Set the window size for the application. Usage: --gui_size 1200 900
|Original Image|LaMa|LDM|
|--------------|------|----|
|![photo-1583445095369-9c651e7e5d34](https://user-images.githubusercontent.com/3998421/156923525-d6afdec3-7b98-403f-ad20-88ebc6eb8d6d.jpg)|![photo-1583445095369-9c651e7e5d34_cleanup_lama](https://user-images.githubusercontent.com/3998421/156923620-a40cc066-fd4a-4d85-a29f-6458711d1247.png)|![photo-1583445095369-9c651e7e5d34_cleanup_ldm](https://user-images.githubusercontent.com/3998421/156923652-0d06c8c8-33ad-4a42-a717-9c99f3268933.png)|

View File

@ -1,7 +1,7 @@
{
"files": {
"main.css": "/static/css/main.1144a0ea.chunk.css",
"main.js": "/static/js/main.eeda4def.chunk.js",
"main.css": "/static/css/main.0a04cd80.chunk.css",
"main.js": "/static/js/main.288df200.chunk.js",
"runtime-main.js": "/static/js/runtime-main.5e86ac81.js",
"static/js/2.d3149f41.chunk.js": "/static/js/2.d3149f41.chunk.js",
"index.html": "/index.html",
@ -10,7 +10,7 @@
"entrypoints": [
"static/js/runtime-main.5e86ac81.js",
"static/js/2.d3149f41.chunk.js",
"static/css/main.1144a0ea.chunk.css",
"static/js/main.eeda4def.chunk.js"
"static/css/main.0a04cd80.chunk.css",
"static/js/main.288df200.chunk.js"
]
}

View File

@ -1 +1 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"/><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=0"/><meta name="theme-color" content="#ffffff"/><title>lama-cleaner - Image inpainting powered by LaMa</title><link href="/static/css/main.1144a0ea.chunk.css" rel="stylesheet"></head><body class="h-screen"><noscript>You need to enable JavaScript to run this app.</noscript><div id="root" class="h-full"></div><script>"localhost"===location.hostname&&(self.FIREBASE_APPCHECK_DEBUG_TOKEN=!0)</script><script>!function(e){function r(r){for(var n,l,a=r[0],f=r[1],i=r[2],p=0,s=[];p<a.length;p++)l=a[p],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in f)Object.prototype.hasOwnProperty.call(f,n)&&(e[n]=f[n]);for(c&&c(r);s.length;)s.shift()();return u.push.apply(u,i||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,a=1;a<t.length;a++){var f=t[a];0!==o[f]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var a=this["webpackJsonplama-cleaner"]=this["webpackJsonplama-cleaner"]||[],f=a.push.bind(a);a.push=r,a=a.slice();for(var i=0;i<a.length;i++)r(a[i]);var c=f;t()}([])</script><script src="/static/js/2.d3149f41.chunk.js"></script><script src="/static/js/main.eeda4def.chunk.js"></script></body></html>
<!doctype html><html lang="en"><head><meta charset="utf-8"/><meta name="viewport" content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=0"/><meta name="theme-color" content="#ffffff"/><title>lama-cleaner - Image inpainting powered by LaMa</title><link href="/static/css/main.0a04cd80.chunk.css" rel="stylesheet"></head><body class="h-screen"><noscript>You need to enable JavaScript to run this app.</noscript><div id="root" class="h-full"></div><script>"localhost"===location.hostname&&(self.FIREBASE_APPCHECK_DEBUG_TOKEN=!0)</script><script>!function(e){function r(r){for(var n,l,a=r[0],f=r[1],i=r[2],p=0,s=[];p<a.length;p++)l=a[p],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in f)Object.prototype.hasOwnProperty.call(f,n)&&(e[n]=f[n]);for(c&&c(r);s.length;)s.shift()();return u.push.apply(u,i||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,a=1;a<t.length;a++){var f=t[a];0!==o[f]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var a=this["webpackJsonplama-cleaner"]=this["webpackJsonplama-cleaner"]||[],f=a.push.bind(a);a.push=r,a=a.slice();for(var i=0;i<a.length;i++)r(a[i]);var c=f;t()}([])</script><script src="/static/js/2.d3149f41.chunk.js"></script><script src="/static/js/main.288df200.chunk.js"></script></body></html>

View File

@ -31,12 +31,14 @@
"scripts": {
"dev": "run-p watch:css react-scripts:start",
"build": "run-s build:css react-scripts:build",
"build:windows": "run-s build:css react-scripts:winbuild",
"test": "react-scripts test",
"eject": "react-scripts eject",
"build:css": "cross-env TAILWIND_MODE=build NODE_ENV=production postcss src/styles/tailwind.css -o src/styles/index.css",
"watch:css": "cross-env TAILWIND_MODE=watch NODE_ENV=development postcss src/styles/tailwind.css -o src/styles/index.css --watch",
"react-scripts:start": "delay 5 && react-scripts start",
"react-scripts:build": "GENERATE_SOURCEMAP=false react-scripts build"
"react-scripts:build": "GENERATE_SOURCEMAP=false react-scripts build",
"react-scripts:winbuild": "set \"GENERATE_SOURCEMAP=false\" && react-scripts build"
},
"eslintConfig": {
"extends": "react-app"

View File

@ -6,6 +6,27 @@ import FileSelect from './components/FileSelect'
import ShortcutsModal from './components/ShortcutsModal'
import Editor from './Editor'
// Keeping GUI Window Open
async function getRequest(url = '') {
const response = await fetch(url, {
method: 'GET',
cache: 'no-cache',
})
return response.json()
}
if (!process.env.NODE_ENV || process.env.NODE_ENV === 'production') {
document.addEventListener('DOMContentLoaded', function () {
const url = document.location
const route = '/flaskwebgui-keep-server-alive'
const intervalRequest = 3 * 1000
function keepAliveServer() {
getRequest(url + route).then(data => console.log(data))
}
setInterval(keepAliveServer, intervalRequest)
})
}
function App() {
const [file, setFile] = useState<File>()
const [showShortcuts, toggleShowShortcuts] = useToggle(false)

25
main.py
View File

@ -13,6 +13,8 @@ import torch
from lama_cleaner.lama import LaMa
from lama_cleaner.ldm import LDM
from flaskwebgui import FlaskUI
try:
torch._C._jit_override_can_fuse_on_cpu(False)
torch._C._jit_override_can_fuse_on_gpu(False)
@ -41,7 +43,8 @@ os.environ["NUMEXPR_NUM_THREADS"] = NUM_THREADS
if os.environ.get("CACHE_DIR"):
os.environ["TORCH_HOME"] = os.environ["CACHE_DIR"]
BUILD_DIR = os.environ.get("LAMA_CLEANER_BUILD_DIR", "./lama_cleaner/app/build")
BUILD_DIR = os.environ.get("LAMA_CLEANER_BUILD_DIR",
"./lama_cleaner/app/build")
app = Flask(__name__, static_folder=os.path.join(BUILD_DIR, "static"))
app.config["JSON_AS_ASCII"] = False
@ -66,12 +69,14 @@ def process():
size_limit = int(size_limit)
print(f"Origin image shape: {original_shape}")
image = resize_max_size(image, size_limit=size_limit, interpolation=interpolation)
image = resize_max_size(image, size_limit=size_limit,
interpolation=interpolation)
print(f"Resized image shape: {image.shape}")
image = norm_img(image)
mask = load_img(input["mask"].read(), gray=True)
mask = resize_max_size(mask, size_limit=size_limit, interpolation=interpolation)
mask = resize_max_size(mask, size_limit=size_limit,
interpolation=interpolation)
mask = norm_img(mask)
start = time.time()
@ -111,6 +116,10 @@ def get_args_parser():
"The larger the value, the better the result, but it will be more time-consuming",
)
parser.add_argument("--device", default="cuda", type=str)
parser.add_argument("--gui", action="store_true",
help="Launch as desktop app")
parser.add_argument("--gui_size", default=[1600, 1000], nargs=2, type=int,
help="Set window size for GUI")
parser.add_argument("--debug", action="store_true")
return parser.parse_args()
@ -125,13 +134,19 @@ def main():
crop_size = [int(it) for it in args.crop_size.split(",")]
if args.model == "lama":
model = LaMa(crop_trigger_size=crop_trigger_size, crop_size=crop_size, device=device)
model = LaMa(crop_trigger_size=crop_trigger_size,
crop_size=crop_size, device=device)
elif args.model == "ldm":
model = LDM(device, steps=args.ldm_steps)
else:
raise NotImplementedError(f"Not supported model: {args.model}")
app.run(host="0.0.0.0", port=args.port, debug=args.debug)
if args.gui:
app_width, app_height = args.gui_size
ui = FlaskUI(app, width=app_width, height=app_height)
ui.run()
else:
app.run(host="127.0.0.1", port=args.port, debug=args.debug)
if __name__ == "__main__":

View File

@ -2,4 +2,5 @@ torch>=1.8.2
opencv-python
flask_cors
flask
flaskwebgui
tqdm