diff --git a/scripts/on_sd_start.bat b/scripts/on_sd_start.bat index 3a8a2961..eddae6b8 100644 --- a/scripts/on_sd_start.bat +++ b/scripts/on_sd_start.bat @@ -104,18 +104,21 @@ call python --version @FOR /F "tokens=* USEBACKQ" %%F IN (`python scripts\get_config.py --default=False net listen_to_network`) DO ( if "%%F" EQU "True" ( - @SET ED_BIND_IP=0.0.0.0 + @FOR /F "tokens=* USEBACKQ" %%G IN (`python scripts\get_config.py --default=0.0.0.0 net bind_ip`) DO ( + @SET ED_BIND_IP=%%G + ) ) else ( @SET ED_BIND_IP=127.0.0.1 ) ) + @cd stable-diffusion @rem set any overrides set HF_HUB_DISABLE_SYMLINKS_WARNING=true -@uvicorn main:server_api --app-dir "%SD_UI_PATH%" --port %ED_BIND_PORT% --host %ED_BIND_IP% --log-level error +@python -m uvicorn main:server_api --app-dir "%SD_UI_PATH%" --port %ED_BIND_PORT% --host %ED_BIND_IP% --log-level error @pause diff --git a/scripts/on_sd_start.sh b/scripts/on_sd_start.sh index e54c72bc..e366bd2a 100755 --- a/scripts/on_sd_start.sh +++ b/scripts/on_sd_start.sh @@ -72,7 +72,7 @@ export SD_UI_PATH=`pwd`/ui export ED_BIND_PORT="$( python scripts/get_config.py --default=9000 net listen_port )" case "$( python scripts/get_config.py --default=False net listen_to_network )" in "True") - export ED_BIND_IP=0.0.0.0 + export ED_BIND_IP=$( python scripts/get_config.py --default=0.0.0.0 net bind_ip) ;; "False") export ED_BIND_IP=127.0.0.1 diff --git a/ui/easydiffusion/utils/save_utils.py b/ui/easydiffusion/utils/save_utils.py index ff2906a6..75d35dc8 100644 --- a/ui/easydiffusion/utils/save_utils.py +++ b/ui/easydiffusion/utils/save_utils.py @@ -1,6 +1,8 @@ import os import re import time +import regex + from datetime import datetime from functools import reduce @@ -30,11 +32,12 @@ TASK_TEXT_MAPPING = { "lora_alpha": "LoRA Strength", "use_hypernetwork_model": "Hypernetwork model", "hypernetwork_strength": "Hypernetwork Strength", + "use_embedding_models": "Embedding models", "tiling": "Seamless Tiling", "use_face_correction": "Use Face Correction", "use_upscale": "Use Upscaling", "upscale_amount": "Upscale By", - "latent_upscaler_steps": "Latent Upscaler Steps" + "latent_upscaler_steps": "Latent Upscaler Steps", } time_placeholders = { @@ -202,6 +205,9 @@ def get_printable_request(req: GenerateImageRequest, task_data: TaskData): req_metadata = req.dict() task_data_metadata = task_data.dict() + app_config = app.getConfig() + using_diffusers = app_config.get("test_diffusers", False) + # Save the metadata in the order defined in TASK_TEXT_MAPPING metadata = {} for key in TASK_TEXT_MAPPING.keys(): @@ -209,6 +215,24 @@ def get_printable_request(req: GenerateImageRequest, task_data: TaskData): metadata[key] = req_metadata[key] elif key in task_data_metadata: metadata[key] = task_data_metadata[key] + elif key is "use_embedding_models" and using_diffusers: + embeddings_extensions = {".pt", ".bin", ".safetensors"} + def scan_directory(directory_path: str): + used_embeddings = [] + for entry in os.scandir(directory_path): + if entry.is_file(): + entry_extension = os.path.splitext(entry.name)[1] + if entry_extension not in embeddings_extensions: + continue + + embedding_name_regex = regex.compile(r"(^|[\s,])" + regex.escape(os.path.splitext(entry.name)[0]) + r"([+-]*$|[\s,]|[+-]+[\s,])") + if embedding_name_regex.search(req.prompt) or embedding_name_regex.search(req.negative_prompt): + used_embeddings.append(entry.path) + elif entry.is_dir(): + used_embeddings.extend(scan_directory(entry.path)) + return used_embeddings + used_embeddings = scan_directory(os.path.join(app.MODELS_DIR, "embeddings")) + metadata["use_embedding_models"] = ", ".join(used_embeddings) if len(used_embeddings) > 0 else None # Clean up the metadata if req.init_image is None and "prompt_strength" in metadata: @@ -222,8 +246,7 @@ def get_printable_request(req: GenerateImageRequest, task_data: TaskData): if task_data.use_upscale != "latent_upscaler" and "latent_upscaler_steps" in metadata: del metadata["latent_upscaler_steps"] - app_config = app.getConfig() - if not app_config.get("test_diffusers", False): + if not using_diffusers: for key in (x for x in ["use_lora_model", "lora_alpha", "clip_skip", "tiling", "latent_upscaler_steps"] if x in metadata): del metadata[key] diff --git a/ui/media/js/main.js b/ui/media/js/main.js index 5ebf76b5..e36865d6 100644 --- a/ui/media/js/main.js +++ b/ui/media/js/main.js @@ -2186,7 +2186,17 @@ function updateEmbeddingsList(filter="") { } } - embeddingsList.innerHTML = html(modelsOptions.embeddings, "", filter) + // Remove after fixing https://github.com/huggingface/diffusers/issues/3922 + let warning = "" + if (vramUsageLevelField.value == "low") { + warning = ` +
+ Warning: Your GPU memory profile is set to "Low". Embeddings currently only work in "Balanced" mode! +
` + } + // END of remove block + + embeddingsList.innerHTML = warning + html(modelsOptions.embeddings, "", filter) embeddingsList.querySelectorAll("button").forEach( (b) => { b.addEventListener("click", onButtonClick)}) } diff --git a/ui/media/js/utils.js b/ui/media/js/utils.js index 6ab24712..bbacbb47 100644 --- a/ui/media/js/utils.js +++ b/ui/media/js/utils.js @@ -1074,6 +1074,12 @@ async function deleteKeys(keyToDelete) { function modalDialogCloseOnBackdropClick(dialog) { dialog.addEventListener('mousedown', function (event) { + // Firefox creates an event with clientX|Y = 0|0 when choosing an