From a9f1000af8f9aa77f8d8909d8a789693f1bfa272 Mon Sep 17 00:00:00 2001 From: cmdr2 Date: Sat, 29 Jul 2023 11:41:44 +0530 Subject: [PATCH 1/2] Install button for TensorRT - displayed only if an NVIDIA gpu is active --- ui/index.html | 7 +++++++ ui/media/js/parameters.js | 41 ++++++++++++++++++++++++++++++++++----- 2 files changed, 43 insertions(+), 5 deletions(-) diff --git a/ui/index.html b/ui/index.html index 776eb627..370ecbea 100644 --- a/ui/index.html +++ b/ui/index.html @@ -360,6 +360,13 @@

+
+
+
+

Accelerate Easy Diffusion

+
+
+


Share Easy Diffusion

diff --git a/ui/media/js/parameters.js b/ui/media/js/parameters.js index 60f053b9..d7694774 100644 --- a/ui/media/js/parameters.js +++ b/ui/media/js/parameters.js @@ -16,6 +16,7 @@ var ParameterType = { */ let parametersTable = document.querySelector("#system-settings-table") let networkParametersTable = document.querySelector("#system-settings-network-table") +let installExtrasTable = document.querySelector("#system-settings-install-extras-table") /** * JSDoc style @@ -241,6 +242,17 @@ var PARAMETERS = [ render: () => '', table: networkParametersTable, }, + { + id: "nvidia_tensorrt", + type: ParameterType.custom, + label: "NVIDIA TensorRT", + note: `Faster image generation by converting your Stable Diffusion models to the NVIDIA TensorRT format. You can choose the + models to convert. Requires an NVIDIA graphics card.

+ Early access version: support for LoRA is still under development.`, + icon: "fa-angles-up", + render: () => '', + table: installExtrasTable, + }, ] function getParameterSettingsEntry(id) { @@ -582,6 +594,25 @@ function setDeviceInfo(devices) { systemInfoEl.querySelector("#system-info-cpu").innerText = cpu systemInfoEl.querySelector("#system-info-gpus-all").innerHTML = allGPUs.join("
") systemInfoEl.querySelector("#system-info-rendering-devices").innerHTML = activeGPUs.join("
") + + // tensorRT + if (devices.active) { + console.log(devices.active) + let nvidiaGPUs = Object.keys(devices.active).filter((d) => { + let gpuName = devices.active[d].name + gpuName = gpuName.toLowerCase() + console.log(gpuName) + return ( + gpuName.includes("nvidia") || + gpuName.includes("geforce") || + gpuName.includes("quadro") || + gpuName.includes("tesla") + ) + }) + if (nvidiaGPUs.length > 0) { + document.querySelector("#install-extras-container").classList.remove("displayNone") + } + } } function setHostInfo(hosts) { @@ -744,10 +775,10 @@ navigator.permissions.query({ name: "clipboard-write" }).then(function(result) { document.addEventListener("system_info_update", (e) => setDeviceInfo(e.detail)) -useBetaChannelField.addEventListener('change', (e) => { - if (e.target.checked) { - getParameterSettingsEntry("test_diffusers").classList.remove('displayNone') - } else { - getParameterSettingsEntry("test_diffusers").classList.add('displayNone') +useBetaChannelField.addEventListener("change", (e) => { + if (e.target.checked) { + getParameterSettingsEntry("test_diffusers").classList.remove("displayNone") + } else { + getParameterSettingsEntry("test_diffusers").classList.add("displayNone") } }) From 2e3059a7c8efba2d045607120634c843da242d13 Mon Sep 17 00:00:00 2001 From: cmdr2 Date: Sat, 29 Jul 2023 21:09:27 +0530 Subject: [PATCH 2/2] UI for TensorRT installation and conversion --- scripts/check_modules.py | 2 +- ui/easydiffusion/app.py | 2 + ui/easydiffusion/package_manager.py | 93 +++++++++++++++++++++ ui/easydiffusion/server.py | 30 ++++++- ui/easydiffusion/task_manager.py | 6 ++ ui/easydiffusion/tasks/render_images.py | 14 +++- ui/easydiffusion/types.py | 5 +- ui/index.html | 12 ++- ui/media/js/main.js | 102 +++++++++++++++++++++--- ui/media/js/parameters.js | 8 +- 10 files changed, 251 insertions(+), 23 deletions(-) create mode 100644 ui/easydiffusion/package_manager.py diff --git a/scripts/check_modules.py b/scripts/check_modules.py index 1519845b..4ee0d830 100644 --- a/scripts/check_modules.py +++ b/scripts/check_modules.py @@ -18,7 +18,7 @@ os_name = platform.system() modules_to_check = { "torch": ("1.11.0", "1.13.1", "2.0.0"), "torchvision": ("0.12.0", "0.14.1", "0.15.1"), - "sdkit": "1.0.143", + "sdkit": "1.0.146", "stable-diffusion-sdkit": "2.1.4", "rich": "12.6.0", "uvicorn": "0.19.0", diff --git a/ui/easydiffusion/app.py b/ui/easydiffusion/app.py index 921886f1..e181f9b8 100644 --- a/ui/easydiffusion/app.py +++ b/ui/easydiffusion/app.py @@ -32,6 +32,8 @@ logging.basicConfig( SD_DIR = os.getcwd() +ROOT_DIR = os.path.abspath(os.path.join(SD_DIR, "..")) + SD_UI_DIR = os.getenv("SD_UI_PATH", None) CONFIG_DIR = os.path.abspath(os.path.join(SD_UI_DIR, "..", "scripts")) diff --git a/ui/easydiffusion/package_manager.py b/ui/easydiffusion/package_manager.py new file mode 100644 index 00000000..2ec83893 --- /dev/null +++ b/ui/easydiffusion/package_manager.py @@ -0,0 +1,93 @@ +import sys +import os +import platform +from importlib.metadata import version as pkg_version + +from sdkit.utils import log + +from easydiffusion import app + +# future home of scripts/check_modules.py + +manifest = { + "tensorrt": { + "install": ["nvidia-cudnn", "tensorrt-libs", "tensorrt"], + "uninstall": ["tensorrt"], + # TODO also uninstall tensorrt-libs and nvidia-cudnn, but do it upon restarting (avoid 'file in use' error) + } +} +installing = [] + +# remove this once TRT releases on pypi +if platform.system() == "Windows": + trt_dir = os.path.join(app.ROOT_DIR, "tensorrt") + if os.path.exists(trt_dir): + files = os.listdir(trt_dir) + + packages = manifest["tensorrt"]["install"] + packages = tuple(p.replace("-", "_") for p in packages) + + wheels = [] + for p in packages: + f = next((f for f in files if f.startswith(p) and f.endswith((".whl", ".tar.gz"))), None) + if f: + wheels.append(os.path.join(trt_dir, f)) + + manifest["tensorrt"]["install"] = wheels + + +def get_installed_packages() -> list: + return {module_name: version(module_name) for module_name in manifest if is_installed(module_name)} + + +def is_installed(module_name) -> bool: + return version(module_name) is not None + + +def install(module_name): + if is_installed(module_name): + log.info(f"{module_name} has already been installed!") + return + if module_name in installing: + log.info(f"{module_name} is already installing!") + return + + if module_name not in manifest: + raise RuntimeError(f"Can't install unknown package: {module_name}!") + + commands = manifest[module_name]["install"] + commands = [f"python -m pip install --upgrade {cmd}" for cmd in commands] + + installing.append(module_name) + + try: + for cmd in commands: + print(">", cmd) + if os.system(cmd) != 0: + raise RuntimeError(f"Error while running {cmd}. Please check the logs in the command-line.") + finally: + installing.remove(module_name) + + +def uninstall(module_name): + if not is_installed(module_name): + log.info(f"{module_name} hasn't been installed!") + return + + if module_name not in manifest: + raise RuntimeError(f"Can't uninstall unknown package: {module_name}!") + + commands = manifest[module_name]["uninstall"] + commands = [f"python -m pip uninstall -y {cmd}" for cmd in commands] + + for cmd in commands: + print(">", cmd) + if os.system(cmd) != 0: + raise RuntimeError(f"Error while running {cmd}. Please check the logs in the command-line.") + + +def version(module_name: str) -> str: + try: + return pkg_version(module_name) + except: + return None diff --git a/ui/easydiffusion/server.py b/ui/easydiffusion/server.py index 0f1890c3..a8f848fd 100644 --- a/ui/easydiffusion/server.py +++ b/ui/easydiffusion/server.py @@ -8,7 +8,7 @@ import os import traceback from typing import List, Union -from easydiffusion import app, model_manager, task_manager +from easydiffusion import app, model_manager, task_manager, package_manager from easydiffusion.tasks import RenderTask, FilterTask from easydiffusion.types import ( GenerateImageRequest, @@ -135,6 +135,10 @@ def init(): def stop_cloudflare_tunnel(req: dict): return stop_cloudflare_tunnel_internal(req) + @server_api.post("/package/{package_name:str}") + def modify_package(package_name: str, req: dict): + return modify_package_internal(package_name, req) + @server_api.get("/") def read_root(): return FileResponse(os.path.join(app.SD_UI_DIR, "index.html"), headers=NOCACHE_HEADERS) @@ -226,16 +230,24 @@ def ping_internal(session_id: str = None): if task_manager.current_state_error: raise HTTPException(status_code=500, detail=str(task_manager.current_state_error)) raise HTTPException(status_code=500, detail="Render thread is dead.") + if task_manager.current_state_error and not isinstance(task_manager.current_state_error, StopAsyncIteration): raise HTTPException(status_code=500, detail=str(task_manager.current_state_error)) + # Alive response = {"status": str(task_manager.current_state)} + if session_id: session = task_manager.get_cached_session(session_id, update_ttl=True) response["tasks"] = {id(t): t.status for t in session.tasks} + response["devices"] = task_manager.get_devices() + response["packages_installed"] = package_manager.get_installed_packages() + response["packages_installing"] = package_manager.installing + if cloudflare.address != None: response["cloudflare"] = cloudflare.address + return JSONResponse(response, headers=NOCACHE_HEADERS) @@ -423,3 +435,19 @@ def stop_cloudflare_tunnel_internal(req: dict): log.error(str(e)) log.error(traceback.format_exc()) return HTTPException(status_code=500, detail=str(e)) + + +def modify_package_internal(package_name: str, req: dict): + try: + cmd = req["command"] + if cmd not in ("install", "uninstall"): + raise RuntimeError(f"Unknown command: {cmd}") + + cmd = getattr(package_manager, cmd) + cmd(package_name) + + return JSONResponse({"status": "OK"}, headers=NOCACHE_HEADERS) + except Exception as e: + log.error(str(e)) + log.error(traceback.format_exc()) + return HTTPException(status_code=500, detail=str(e)) diff --git a/ui/easydiffusion/task_manager.py b/ui/easydiffusion/task_manager.py index 27b53b6f..699b4494 100644 --- a/ui/easydiffusion/task_manager.py +++ b/ui/easydiffusion/task_manager.py @@ -373,6 +373,12 @@ def get_devices(): finally: manager_lock.release() + # temp until TRT releases + import os + from easydiffusion import app + + devices["enable_trt"] = os.path.exists(os.path.join(app.ROOT_DIR, "tensorrt")) + return devices diff --git a/ui/easydiffusion/tasks/render_images.py b/ui/easydiffusion/tasks/render_images.py index 42bcc76a..bbc36aa5 100644 --- a/ui/easydiffusion/tasks/render_images.py +++ b/ui/easydiffusion/tasks/render_images.py @@ -60,7 +60,11 @@ class RenderTask(Task): model_manager.resolve_model_paths(self.models_data) models_to_force_reload = [] - if runtime.set_vram_optimizations(context) or self.has_clip_skip_changed(context): + if ( + runtime.set_vram_optimizations(context) + or self.has_param_changed(context, "clip_skip") + or self.has_param_changed(context, "convert_to_tensorrt") + ): models_to_force_reload.append("stable-diffusion") model_manager.reload_models_if_necessary(context, self.models_data, models_to_force_reload) @@ -78,13 +82,15 @@ class RenderTask(Task): step_callback, ) - def has_clip_skip_changed(self, context): + def has_param_changed(self, context, param_name): if not context.test_diffusers: return False + if "stable-diffusion" not in context.models or "params" not in context.models["stable-diffusion"]: + return True model = context.models["stable-diffusion"] - new_clip_skip = self.models_data.model_params.get("stable-diffusion", {}).get("clip_skip", False) - return model["clip_skip"] != new_clip_skip + new_val = self.models_data.model_params.get("stable-diffusion", {}).get(param_name, False) + return model["params"].get(param_name) != new_val def make_images( diff --git a/ui/easydiffusion/types.py b/ui/easydiffusion/types.py index a37da9ef..b1d55f5a 100644 --- a/ui/easydiffusion/types.py +++ b/ui/easydiffusion/types.py @@ -217,7 +217,10 @@ def convert_legacy_render_req_to_new(old_req: dict): # move the model params if model_paths["stable-diffusion"]: - model_params["stable-diffusion"] = {"clip_skip": bool(old_req.get("clip_skip", False))} + model_params["stable-diffusion"] = { + "clip_skip": bool(old_req.get("clip_skip", False)), + "convert_to_tensorrt": bool(old_req.get("convert_to_tensorrt", False)), + } # move the filter params if model_paths["realesrgan"]: diff --git a/ui/index.html b/ui/index.html index 370ecbea..2d47433e 100644 --- a/ui/index.html +++ b/ui/index.html @@ -146,6 +146,14 @@ Click to learn more about custom models + + + + + Click to learn more about TensorRT + + + @@ -363,7 +371,7 @@

-

Accelerate Easy Diffusion

+

Optional Packages

@@ -677,7 +685,7 @@ async function init() { events: { statusChange: setServerStatus, idle: onIdle, - ping: tunnelUpdate + ping: onPing } }) splashScreen() diff --git a/ui/media/js/main.js b/ui/media/js/main.js index 811a4b9b..fa42d3ff 100644 --- a/ui/media/js/main.js +++ b/ui/media/js/main.js @@ -275,24 +275,24 @@ function setServerStatus(event) { // e : MouseEvent // prompt : Text to be shown as prompt. Should be a question to which "yes" is a good answer. // fn : function to be called if the user confirms the dialog or has the shift key pressed +// allowSkip: Allow skipping the dialog using the shift key or the confirm_dangerous_actions setting (default: true) // // If the user had the shift key pressed while clicking, the function fn will be executed. // If the setting "confirm_dangerous_actions" in the system settings is disabled, the function // fn will be executed. // Otherwise, a confirmation dialog is shown. If the user confirms, the function fn will also // be executed. -function shiftOrConfirm(e, prompt, fn) { +function shiftOrConfirm(e, prompt, fn, allowSkip = true) { e.stopPropagation() - if (e.shiftKey || !confirmDangerousActionsField.checked) { + let tip = allowSkip + ? 'Tip: To skip this dialog, use shift-click or disable the "Confirm dangerous actions" setting in the Settings tab.' + : "" + if (allowSkip && (e.shiftKey || !confirmDangerousActionsField.checked)) { fn(e) } else { - confirm( - 'Tip: To skip this dialog, use shift-click or disable the "Confirm dangerous actions" setting in the Settings tab.', - prompt, - () => { - fn(e) - } - ) + confirm(tip, prompt, () => { + fn(e) + }) } } @@ -1417,6 +1417,11 @@ function getCurrentUserRequest() { newTask.reqBody.lora_alpha = modelStrengths } } + if (testDiffusers.checked && document.getElementById("toggle-tensorrt-install").innerHTML == "Uninstall") { + // TRT is installed + newTask.reqBody.convert_to_tensorrt = document.querySelector("#convert_to_tensorrt").checked + } + return newTask } @@ -2217,6 +2222,11 @@ resumeBtn.addEventListener("click", function() { document.body.classList.remove("wait-pause") }) +function onPing(event) { + tunnelUpdate(event) + packagesUpdate(event) +} + function tunnelUpdate(event) { if ("cloudflare" in event) { document.getElementById("cloudflare-off").classList.add("displayNone") @@ -2230,6 +2240,23 @@ function tunnelUpdate(event) { } } +function packagesUpdate(event) { + let trtBtn = document.getElementById("toggle-tensorrt-install") + let trtInstalled = "packages_installed" in event && "tensorrt" in event["packages_installed"] + + if ("packages_installing" in event && event["packages_installing"].includes("tensorrt")) { + trtBtn.innerHTML = "Installing.." + trtBtn.disabled = true + } else { + trtBtn.innerHTML = trtInstalled ? "Uninstall" : "Install" + trtBtn.disabled = false + } + + if (document.getElementById("toggle-tensorrt-install").innerHTML == "Uninstall") { + document.querySelector("#enable_trt_config").classList.remove("displayNone") + } +} + document.getElementById("toggle-cloudflare-tunnel").addEventListener("click", async function() { let command = "stop" if (document.getElementById("toggle-cloudflare-tunnel").innerHTML == "Start") { @@ -2249,6 +2276,63 @@ document.getElementById("toggle-cloudflare-tunnel").addEventListener("click", as console.log(`Cloudflare tunnel ${command} result:`, res) }) +document.getElementById("toggle-tensorrt-install").addEventListener("click", function(e) { + if (this.disabled === true) { + return + } + + let command = this.innerHTML.toLowerCase() + let self = this + + shiftOrConfirm( + e, + "Are you sure you want to " + command + " TensorRT?", + async function() { + showToast(`TensorRT ${command} started. Please wait.`) + + self.disabled = true + + if (command === "install") { + self.innerHTML = "Installing.." + } else if (command === "uninstall") { + self.innerHTML = "Uninstalling.." + } + + if (command === "installing..") { + alert("Already installing TensorRT!") + return + } + if (command !== "install" && command !== "uninstall") { + return + } + + let res = await fetch("/package/tensorrt", { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify({ + command: command, + }), + }) + res = await res.json() + + self.disabled = false + + if (res.status === "OK") { + alert("TensorRT " + command + "ed successfully!") + self.innerHTML = command === "install" ? "Uninstall" : "Install" + } else if (res.status_code === 500) { + alert("TensorselfRT failed to " + command + ": " + res.detail) + self.innerHTML = command === "install" ? "Install" : "Uninstall" + } + + console.log(`Package ${command} result:`, res) + }, + false + ) +}) + /* Embeddings */ function updateEmbeddingsList(filter = "") { diff --git a/ui/media/js/parameters.js b/ui/media/js/parameters.js index d7694774..1980f1a1 100644 --- a/ui/media/js/parameters.js +++ b/ui/media/js/parameters.js @@ -247,10 +247,10 @@ var PARAMETERS = [ type: ParameterType.custom, label: "NVIDIA TensorRT", note: `Faster image generation by converting your Stable Diffusion models to the NVIDIA TensorRT format. You can choose the - models to convert. Requires an NVIDIA graphics card.

+ models to convert. Download size: approximately 2 GB.

Early access version: support for LoRA is still under development.`, icon: "fa-angles-up", - render: () => '', + render: () => '', table: installExtrasTable, }, ] @@ -596,12 +596,10 @@ function setDeviceInfo(devices) { systemInfoEl.querySelector("#system-info-rendering-devices").innerHTML = activeGPUs.join("
") // tensorRT - if (devices.active) { - console.log(devices.active) + if (devices.active && testDiffusers.checked && devices.enable_trt === true) { let nvidiaGPUs = Object.keys(devices.active).filter((d) => { let gpuName = devices.active[d].name gpuName = gpuName.toLowerCase() - console.log(gpuName) return ( gpuName.includes("nvidia") || gpuName.includes("geforce") ||