mirror of
https://github.com/easydiffusion/easydiffusion.git
synced 2025-05-30 06:40:09 +02:00
Merge branch 'beta' into Mouse-wheel-behavior-fixes
This commit is contained in:
commit
7c50b8bf94
27
3rd-PARTY-LICENSES
Normal file
27
3rd-PARTY-LICENSES
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
jquery-confirm
|
||||||
|
==============
|
||||||
|
https://craftpip.github.io/jquery-confirm/
|
||||||
|
|
||||||
|
jquery-confirm is licensed under the MIT license:
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2019 Boniface Pereira
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
@ -19,8 +19,15 @@
|
|||||||
- Configuration to prevent the browser from opening on startup
|
- Configuration to prevent the browser from opening on startup
|
||||||
- Lots of minor bug fixes
|
- Lots of minor bug fixes
|
||||||
- A `What's New?` tab in the UI
|
- A `What's New?` tab in the UI
|
||||||
|
- Ask for a confimation before clearing the results pane or stopping a render task. The dialog can be skipped by holding down the shift key while clicking on the button.
|
||||||
|
- Show the network addresses of the server in the systems setting dialog
|
||||||
|
|
||||||
### Detailed changelog
|
### Detailed changelog
|
||||||
|
* 2.4.17 - 30 Nov 2022 - Scroll to generated image. Thanks @patriceac
|
||||||
|
* 2.4.17 - 30 Nov 2022 - Show the network addresses of the server in the systems setting dialog. Thanks @JeLuf
|
||||||
|
* 2.4.17 - 30 Nov 2022 - Fix a bug where GFPGAN wouldn't work properly when multiple GPUs tried to run it at the same time. Thanks @madrang
|
||||||
|
* 2.4.17 - 30 Nov 2022 - Confirm before stopping or clearing all the tasks. Thanks @JeLuf
|
||||||
|
* 2.4.16 - 29 Nov 2022 - Bug fixes for SD 2.0 - remove the need for patching, default to SD 1.4 model if trying to load an SD2 model in SD1.4.
|
||||||
* 2.4.15 - 25 Nov 2022 - Experimental support for SD 2.0. Uses lots of memory, not optimized, probably GPU-only.
|
* 2.4.15 - 25 Nov 2022 - Experimental support for SD 2.0. Uses lots of memory, not optimized, probably GPU-only.
|
||||||
* 2.4.14 - 22 Nov 2022 - Change the backend to a custom fork of Stable Diffusion
|
* 2.4.14 - 22 Nov 2022 - Change the backend to a custom fork of Stable Diffusion
|
||||||
* 2.4.13 - 21 Nov 2022 - Change the modifier weight via mouse wheel, drag to reorder selected modifiers, and some more modifier-related fixes. Thanks @patriceac
|
* 2.4.13 - 21 Nov 2022 - Change the modifier weight via mouse wheel, drag to reorder selected modifiers, and some more modifier-related fixes. Thanks @patriceac
|
||||||
|
@ -42,13 +42,9 @@ if NOT DEFINED test_sd2 set test_sd2=N
|
|||||||
|
|
||||||
if "%test_sd2%" == "N" (
|
if "%test_sd2%" == "N" (
|
||||||
@call git -c advice.detachedHead=false checkout 7f32368ed1030a6e710537047bacd908adea183a
|
@call git -c advice.detachedHead=false checkout 7f32368ed1030a6e710537047bacd908adea183a
|
||||||
|
|
||||||
@call git apply --whitespace=warn ..\ui\sd_internal\ddim_callback.patch
|
|
||||||
)
|
)
|
||||||
if "%test_sd2%" == "Y" (
|
if "%test_sd2%" == "Y" (
|
||||||
@call git -c advice.detachedHead=false checkout 6e2f82187f8ecc4ea59ac37dc239cfcc78038f6d
|
@call git -c advice.detachedHead=false checkout 5d647c5459f4cd790672512222bc41903c01bb71
|
||||||
|
|
||||||
@call git apply ..\ui\sd_internal\ddim_callback_sd2.patch
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@cd ..
|
@cd ..
|
||||||
@ -66,8 +62,6 @@ if NOT DEFINED test_sd2 set test_sd2=N
|
|||||||
@cd stable-diffusion
|
@cd stable-diffusion
|
||||||
@call git -c advice.detachedHead=false checkout 7f32368ed1030a6e710537047bacd908adea183a
|
@call git -c advice.detachedHead=false checkout 7f32368ed1030a6e710537047bacd908adea183a
|
||||||
|
|
||||||
@call git apply --whitespace=warn ..\ui\sd_internal\ddim_callback.patch
|
|
||||||
|
|
||||||
@cd ..
|
@cd ..
|
||||||
)
|
)
|
||||||
|
|
||||||
|
8
scripts/on_sd_start.sh
Normal file → Executable file
8
scripts/on_sd_start.sh
Normal file → Executable file
@ -37,12 +37,8 @@ if [ -e "scripts/install_status.txt" ] && [ `grep -c sd_git_cloned scripts/insta
|
|||||||
|
|
||||||
if [ "$test_sd2" == "N" ]; then
|
if [ "$test_sd2" == "N" ]; then
|
||||||
git -c advice.detachedHead=false checkout 7f32368ed1030a6e710537047bacd908adea183a
|
git -c advice.detachedHead=false checkout 7f32368ed1030a6e710537047bacd908adea183a
|
||||||
|
|
||||||
git apply --whitespace=warn ../ui/sd_internal/ddim_callback.patch || fail "ddim patch failed"
|
|
||||||
elif [ "$test_sd2" == "Y" ]; then
|
elif [ "$test_sd2" == "Y" ]; then
|
||||||
git -c advice.detachedHead=false checkout 992f111312afa9ec1a01beaa9733cb9728f5acd3
|
git -c advice.detachedHead=false checkout 5d647c5459f4cd790672512222bc41903c01bb71
|
||||||
|
|
||||||
git apply --whitespace=warn ../ui/sd_internal/ddim_callback_sd2.patch || fail "sd2 ddim patch failed"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cd ..
|
cd ..
|
||||||
@ -58,8 +54,6 @@ else
|
|||||||
cd stable-diffusion
|
cd stable-diffusion
|
||||||
git -c advice.detachedHead=false checkout 7f32368ed1030a6e710537047bacd908adea183a
|
git -c advice.detachedHead=false checkout 7f32368ed1030a6e710537047bacd908adea183a
|
||||||
|
|
||||||
git apply --whitespace=warn ../ui/sd_internal/ddim_callback.patch || fail "ddim patch failed"
|
|
||||||
|
|
||||||
cd ..
|
cd ..
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
<head>
|
<head>
|
||||||
<title>Stable Diffusion UI</title>
|
<title>Stable Diffusion UI</title>
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<meta name="theme-color" content="#673AB6">
|
||||||
<link rel="icon" type="image/png" href="/media/images/favicon-16x16.png" sizes="16x16">
|
<link rel="icon" type="image/png" href="/media/images/favicon-16x16.png" sizes="16x16">
|
||||||
<link rel="icon" type="image/png" href="/media/images/favicon-32x32.png" sizes="32x32">
|
<link rel="icon" type="image/png" href="/media/images/favicon-32x32.png" sizes="32x32">
|
||||||
<link rel="stylesheet" href="/media/css/fonts.css">
|
<link rel="stylesheet" href="/media/css/fonts.css">
|
||||||
@ -12,7 +13,10 @@
|
|||||||
<link rel="stylesheet" href="/media/css/modifier-thumbnails.css">
|
<link rel="stylesheet" href="/media/css/modifier-thumbnails.css">
|
||||||
<link rel="stylesheet" href="/media/css/fontawesome-all.min.css">
|
<link rel="stylesheet" href="/media/css/fontawesome-all.min.css">
|
||||||
<link rel="stylesheet" href="/media/css/drawingboard.min.css">
|
<link rel="stylesheet" href="/media/css/drawingboard.min.css">
|
||||||
|
<link rel="stylesheet" href="/media/css//jquery-confirm.min.css">
|
||||||
|
<link rel="manifest" href="/media/manifest.webmanifest">
|
||||||
<script src="/media/js/jquery-3.6.1.min.js"></script>
|
<script src="/media/js/jquery-3.6.1.min.js"></script>
|
||||||
|
<script src="/media/js/jquery-confirm.min.js"></script>
|
||||||
<script src="/media/js/drawingboard.min.js"></script>
|
<script src="/media/js/drawingboard.min.js"></script>
|
||||||
<script src="/media/js/marked.min.js"></script>
|
<script src="/media/js/marked.min.js"></script>
|
||||||
</head>
|
</head>
|
||||||
@ -22,7 +26,7 @@
|
|||||||
<div id="logo">
|
<div id="logo">
|
||||||
<h1>
|
<h1>
|
||||||
Stable Diffusion UI
|
Stable Diffusion UI
|
||||||
<small>v2.4.15 <span id="updateBranchLabel"></span></small>
|
<small>v2.4.17 <span id="updateBranchLabel"></span></small>
|
||||||
</h1>
|
</h1>
|
||||||
</div>
|
</div>
|
||||||
<div id="server-status">
|
<div id="server-status">
|
||||||
@ -250,8 +254,17 @@
|
|||||||
<br/><br/>
|
<br/><br/>
|
||||||
<div>
|
<div>
|
||||||
<h3><i class="fa fa-microchip icon"></i> System Info</h3>
|
<h3><i class="fa fa-microchip icon"></i> System Info</h3>
|
||||||
<div id="system-info"></div>
|
<div id="system-info">
|
||||||
|
<table>
|
||||||
|
<tr><td><label>Processor:</label></td><td id="system-info-cpu" class="value"></td></tr>
|
||||||
|
<tr><td><label>Compatible Graphics Cards (all):</label></td><td id="system-info-gpus-all" class="value"></td></tr>
|
||||||
|
<tr><td></td><td> </td></tr>
|
||||||
|
<tr><td><label>Used for rendering 🔥:</label></td><td id="system-info-rendering-devices" class="value"></td></tr>
|
||||||
|
<tr><td><label>Server Addresses <i class="fa-solid fa-circle-question help-btn"><span class="simple-tooltip right">You can access Stable Diffusion UI from other devices using these addresses</span></i> :</label></td><td id="system-info-server-hosts" class="value"></td></tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div id="tab-content-about" class="tab-content">
|
<div id="tab-content-about" class="tab-content">
|
||||||
@ -348,7 +361,7 @@ async function init() {
|
|||||||
await getAppConfig()
|
await getAppConfig()
|
||||||
await loadModifiers()
|
await loadModifiers()
|
||||||
await loadUIPlugins()
|
await loadUIPlugins()
|
||||||
await getDevices()
|
await getSystemInfo()
|
||||||
|
|
||||||
setInterval(healthCheck, HEALTH_PING_INTERVAL * 1000)
|
setInterval(healthCheck, HEALTH_PING_INTERVAL * 1000)
|
||||||
healthCheck()
|
healthCheck()
|
||||||
|
9
ui/media/css/jquery-confirm.min.css
vendored
Normal file
9
ui/media/css/jquery-confirm.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -210,7 +210,7 @@ code {
|
|||||||
}
|
}
|
||||||
.collapsible-content {
|
.collapsible-content {
|
||||||
display: block;
|
display: block;
|
||||||
padding-left: 15px;
|
padding-left: 10px;
|
||||||
}
|
}
|
||||||
.collapsible-content h5 {
|
.collapsible-content h5 {
|
||||||
padding: 5pt 0pt;
|
padding: 5pt 0pt;
|
||||||
@ -658,11 +658,15 @@ input::file-selector-button {
|
|||||||
opacity: 1;
|
opacity: 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* MOBILE SUPPORT */
|
/* Small screens */
|
||||||
@media screen and (max-width: 700px) {
|
@media screen and (max-width: 1265px) {
|
||||||
#top-nav {
|
#top-nav {
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* MOBILE SUPPORT */
|
||||||
|
@media screen and (max-width: 700px) {
|
||||||
body {
|
body {
|
||||||
margin: 0px;
|
margin: 0px;
|
||||||
}
|
}
|
||||||
@ -712,7 +716,7 @@ input::file-selector-button {
|
|||||||
padding-right: 0px;
|
padding-right: 0px;
|
||||||
}
|
}
|
||||||
#server-status {
|
#server-status {
|
||||||
display: none;
|
top: 75%;
|
||||||
}
|
}
|
||||||
.popup > div {
|
.popup > div {
|
||||||
padding-left: 5px !important;
|
padding-left: 5px !important;
|
||||||
@ -730,6 +734,15 @@ input::file-selector-button {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@media screen and (max-width: 500px) {
|
||||||
|
#server-status #server-status-msg {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
#server-status:hover #server-status-msg {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@media (min-width: 700px) {
|
@media (min-width: 700px) {
|
||||||
/* #editor {
|
/* #editor {
|
||||||
max-width: 480px;
|
max-width: 480px;
|
||||||
@ -997,8 +1010,17 @@ button:hover {
|
|||||||
button:active {
|
button:active {
|
||||||
transition-duration: 0.1s;
|
transition-duration: 0.1s;
|
||||||
background-color: hsl(var(--accent-hue), 100%, calc(var(--accent-lightness) + 24%));
|
background-color: hsl(var(--accent-hue), 100%, calc(var(--accent-lightness) + 24%));
|
||||||
|
position: relative;
|
||||||
|
top: 1px;
|
||||||
|
left: 1px;
|
||||||
}
|
}
|
||||||
|
|
||||||
button#save-system-settings-btn {
|
button#save-system-settings-btn {
|
||||||
padding: 4pt 8pt;
|
padding: 4pt 8pt;
|
||||||
}
|
}
|
||||||
|
#ip-info a {
|
||||||
|
color:var(--text-color)
|
||||||
|
}
|
||||||
|
#ip-info div {
|
||||||
|
line-height: 200%;
|
||||||
|
}
|
||||||
|
@ -30,6 +30,9 @@
|
|||||||
--primary-button-border: none;
|
--primary-button-border: none;
|
||||||
--input-switch-padding: 1px;
|
--input-switch-padding: 1px;
|
||||||
--input-height: 18px;
|
--input-height: 18px;
|
||||||
|
|
||||||
|
/* Main theme color, hex color fallback. */
|
||||||
|
--theme-color-fallback: #673AB6;
|
||||||
}
|
}
|
||||||
|
|
||||||
.theme-light {
|
.theme-light {
|
||||||
@ -44,6 +47,8 @@
|
|||||||
--input-text-color: black;
|
--input-text-color: black;
|
||||||
--input-background-color: #f8f9fa;
|
--input-background-color: #f8f9fa;
|
||||||
--input-border-color: grey;
|
--input-border-color: grey;
|
||||||
|
|
||||||
|
--theme-color-fallback: #aaaaaa;
|
||||||
}
|
}
|
||||||
|
|
||||||
.theme-discord {
|
.theme-discord {
|
||||||
@ -58,6 +63,8 @@
|
|||||||
--input-border-size: 2px;
|
--input-border-size: 2px;
|
||||||
--input-background-color: #202225;
|
--input-background-color: #202225;
|
||||||
--input-border-color: var(--input-background-color);
|
--input-border-color: var(--input-background-color);
|
||||||
|
|
||||||
|
--theme-color-fallback: #202225;
|
||||||
}
|
}
|
||||||
|
|
||||||
.theme-cool-blue {
|
.theme-cool-blue {
|
||||||
@ -71,8 +78,10 @@
|
|||||||
--background-color4: hsl(var(--main-hue), var(--main-saturation), calc(var(--value-base) - (3 * var(--value-step))));
|
--background-color4: hsl(var(--main-hue), var(--main-saturation), calc(var(--value-base) - (3 * var(--value-step))));
|
||||||
|
|
||||||
--input-background-color: var(--background-color3);
|
--input-background-color: var(--background-color3);
|
||||||
|
|
||||||
--accent-hue: 212;
|
--accent-hue: 212;
|
||||||
|
|
||||||
|
--theme-color-fallback: #0056b8;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -87,6 +96,8 @@
|
|||||||
--background-color4: hsl(var(--main-hue), var(--main-saturation), calc(var(--value-base) - (3 * var(--value-step))));
|
--background-color4: hsl(var(--main-hue), var(--main-saturation), calc(var(--value-base) - (3 * var(--value-step))));
|
||||||
|
|
||||||
--input-background-color: var(--background-color3);
|
--input-background-color: var(--background-color3);
|
||||||
|
|
||||||
|
--theme-color-fallback: #5300b8;
|
||||||
}
|
}
|
||||||
|
|
||||||
.theme-super-dark {
|
.theme-super-dark {
|
||||||
@ -101,6 +112,8 @@
|
|||||||
|
|
||||||
--input-background-color: var(--background-color3);
|
--input-background-color: var(--background-color3);
|
||||||
--input-border-size: 0px;
|
--input-border-size: 0px;
|
||||||
|
|
||||||
|
--theme-color-fallback: #000000;
|
||||||
}
|
}
|
||||||
|
|
||||||
.theme-wild {
|
.theme-wild {
|
||||||
@ -117,8 +130,8 @@
|
|||||||
|
|
||||||
--input-border-size: 1px;
|
--input-border-size: 1px;
|
||||||
--input-background-color: hsl(222, var(--main-saturation), calc(var(--value-base) - (2 * var(--value-step))));
|
--input-background-color: hsl(222, var(--main-saturation), calc(var(--value-base) - (2 * var(--value-step))));
|
||||||
--input-text-color: red;
|
--input-text-color: #FF0000;
|
||||||
--input-border-color: green;
|
--input-border-color: #005E05;
|
||||||
}
|
}
|
||||||
|
|
||||||
.theme-gnomie {
|
.theme-gnomie {
|
||||||
@ -136,6 +149,8 @@
|
|||||||
--input-background-color: #2a2a2a;
|
--input-background-color: #2a2a2a;
|
||||||
--input-border-size: 0px;
|
--input-border-size: 0px;
|
||||||
--input-border-color: var(--input-background-color);
|
--input-border-color: var(--input-background-color);
|
||||||
|
|
||||||
|
--theme-color-fallback: #2168bf;
|
||||||
}
|
}
|
||||||
|
|
||||||
.theme-gnomie .panel-box {
|
.theme-gnomie .panel-box {
|
||||||
|
@ -35,6 +35,7 @@ const SETTINGS_IDS_LIST = [
|
|||||||
"sound_toggle",
|
"sound_toggle",
|
||||||
"turbo",
|
"turbo",
|
||||||
"use_full_precision",
|
"use_full_precision",
|
||||||
|
"confirm_dangerous_actions",
|
||||||
"auto_save_settings"
|
"auto_save_settings"
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -55,6 +56,9 @@ async function initSettings() {
|
|||||||
if (!element) {
|
if (!element) {
|
||||||
console.error(`Missing settings element ${id}`)
|
console.error(`Missing settings element ${id}`)
|
||||||
}
|
}
|
||||||
|
if (id in SETTINGS) { // don't create it again
|
||||||
|
return
|
||||||
|
}
|
||||||
SETTINGS[id] = {
|
SETTINGS[id] = {
|
||||||
key: id,
|
key: id,
|
||||||
element: element,
|
element: element,
|
||||||
|
@ -192,9 +192,9 @@ const TASK_MAPPING = {
|
|||||||
parse: (val) => val
|
parse: (val) => val
|
||||||
},
|
},
|
||||||
|
|
||||||
numOutputsParallel: { name: 'Parallel Images',
|
num_outputs: { name: 'Parallel Images',
|
||||||
setUI: (numOutputsParallel) => {
|
setUI: (num_outputs) => {
|
||||||
numOutputsParallelField.value = numOutputsParallel
|
numOutputsParallelField.value = num_outputs
|
||||||
},
|
},
|
||||||
readUI: () => parseInt(numOutputsParallelField.value),
|
readUI: () => parseInt(numOutputsParallelField.value),
|
||||||
parse: (val) => val
|
parse: (val) => val
|
||||||
@ -328,6 +328,7 @@ function getModelPath(filename, extensions)
|
|||||||
filename = filename.slice(0, filename.length - ext.length)
|
filename = filename.slice(0, filename.length - ext.length)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
return filename
|
||||||
}
|
}
|
||||||
|
|
||||||
const TASK_TEXT_MAPPING = {
|
const TASK_TEXT_MAPPING = {
|
||||||
|
@ -90,9 +90,7 @@ function createModifierGroup(modifierGroup, initiallyExpanded) {
|
|||||||
if (activeTags.map(x => trimModifiers(x.name)).includes(trimModifiers(modifierName))) {
|
if (activeTags.map(x => trimModifiers(x.name)).includes(trimModifiers(modifierName))) {
|
||||||
// remove modifier from active array
|
// remove modifier from active array
|
||||||
activeTags = activeTags.filter(x => trimModifiers(x.name) != trimModifiers(modifierName))
|
activeTags = activeTags.filter(x => trimModifiers(x.name) != trimModifiers(modifierName))
|
||||||
modifierCard.classList.remove(activeCardClass)
|
toggleCardState(modifierCard, false)
|
||||||
|
|
||||||
modifierCard.querySelector('.modifier-card-image-overlay').innerText = '+'
|
|
||||||
} else {
|
} else {
|
||||||
// add modifier to active array
|
// add modifier to active array
|
||||||
activeTags.push({
|
activeTags.push({
|
||||||
@ -101,10 +99,7 @@ function createModifierGroup(modifierGroup, initiallyExpanded) {
|
|||||||
'originElement': modifierCard,
|
'originElement': modifierCard,
|
||||||
'previews': modifierPreviews
|
'previews': modifierPreviews
|
||||||
})
|
})
|
||||||
|
toggleCardState(modifierCard, true)
|
||||||
modifierCard.classList.add(activeCardClass)
|
|
||||||
|
|
||||||
modifierCard.querySelector('.modifier-card-image-overlay').innerText = '-'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
refreshTagsList()
|
refreshTagsList()
|
||||||
@ -226,8 +221,7 @@ function refreshTagsList() {
|
|||||||
let idx = activeTags.indexOf(tag)
|
let idx = activeTags.indexOf(tag)
|
||||||
|
|
||||||
if (idx !== -1 && activeTags[idx].originElement !== undefined) {
|
if (idx !== -1 && activeTags[idx].originElement !== undefined) {
|
||||||
activeTags[idx].originElement.classList.remove(activeCardClass)
|
toggleCardState(activeTags[idx].originElement, false)
|
||||||
activeTags[idx].originElement.querySelector('.modifier-card-image-overlay').innerText = '+'
|
|
||||||
|
|
||||||
activeTags.splice(idx, 1)
|
activeTags.splice(idx, 1)
|
||||||
refreshTagsList()
|
refreshTagsList()
|
||||||
@ -240,6 +234,16 @@ function refreshTagsList() {
|
|||||||
editorModifierTagsList.appendChild(brk)
|
editorModifierTagsList.appendChild(brk)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function toggleCardState(card, makeActive) {
|
||||||
|
if (makeActive) {
|
||||||
|
card.classList.add(activeCardClass)
|
||||||
|
card.querySelector('.modifier-card-image-overlay').innerText = '-'
|
||||||
|
} else {
|
||||||
|
card.classList.remove(activeCardClass)
|
||||||
|
card.querySelector('.modifier-card-image-overlay').innerText = '+'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function changePreviewImages(val) {
|
function changePreviewImages(val) {
|
||||||
const previewImages = document.querySelectorAll('.modifier-card-image-container img')
|
const previewImages = document.querySelectorAll('.modifier-card-image-container img')
|
||||||
|
|
||||||
|
10
ui/media/js/jquery-confirm.min.js
vendored
Normal file
10
ui/media/js/jquery-confirm.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@ -138,6 +138,33 @@ function isServerAvailable() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// shiftOrConfirm(e, prompt, fn)
|
||||||
|
// e : MouseEvent
|
||||||
|
// prompt : Text to be shown as prompt. Should be a question to which "yes" is a good answer.
|
||||||
|
// fn : function to be called if the user confirms the dialog or has the shift key pressed
|
||||||
|
//
|
||||||
|
// If the user had the shift key pressed while clicking, the function fn will be executed.
|
||||||
|
// If the setting "confirm_dangerous_actions" in the system settings is disabled, the function
|
||||||
|
// fn will be executed.
|
||||||
|
// Otherwise, a confirmation dialog is shown. If the user confirms, the function fn will also
|
||||||
|
// be executed.
|
||||||
|
function shiftOrConfirm(e, prompt, fn) {
|
||||||
|
e.stopPropagation()
|
||||||
|
if (e.shiftKey || !confirmDangerousActionsField.checked) {
|
||||||
|
fn(e)
|
||||||
|
} else {
|
||||||
|
$.confirm({ theme: 'supervan',
|
||||||
|
title: prompt,
|
||||||
|
content: 'Tip: To skip this dialog, use shift-click or disable the setting "Confirm dangerous actions" in the systems setting.',
|
||||||
|
buttons: {
|
||||||
|
yes: () => { fn(e) },
|
||||||
|
cancel: () => {}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function logMsg(msg, level, outputMsg) {
|
function logMsg(msg, level, outputMsg) {
|
||||||
if (outputMsg.hasChildNodes()) {
|
if (outputMsg.hasChildNodes()) {
|
||||||
outputMsg.appendChild(document.createElement('br'))
|
outputMsg.appendChild(document.createElement('br'))
|
||||||
@ -169,34 +196,6 @@ function playSound() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
function setSystemInfo(devices) {
|
|
||||||
let cpu = devices.all.cpu.name
|
|
||||||
let allGPUs = Object.keys(devices.all).filter(d => d != 'cpu')
|
|
||||||
let activeGPUs = Object.keys(devices.active)
|
|
||||||
|
|
||||||
function ID_TO_TEXT(d) {
|
|
||||||
let info = devices.all[d]
|
|
||||||
if ("mem_free" in info && "mem_total" in info) {
|
|
||||||
return `${info.name} <small>(${d}) (${info.mem_free.toFixed(1)}Gb free / ${info.mem_total.toFixed(1)} Gb total)</small>`
|
|
||||||
} else {
|
|
||||||
return `${info.name} <small>(${d}) (no memory info)</small>`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
allGPUs = allGPUs.map(ID_TO_TEXT)
|
|
||||||
activeGPUs = activeGPUs.map(ID_TO_TEXT)
|
|
||||||
|
|
||||||
let systemInfo = `
|
|
||||||
<table>
|
|
||||||
<tr><td><label>Processor:</label></td><td class="value">${cpu}</td></tr>
|
|
||||||
<tr><td><label>Compatible Graphics Cards (all):</label></td><td class="value">${allGPUs.join('</br>')}</td></tr>
|
|
||||||
<tr><td></td><td> </td></tr>
|
|
||||||
<tr><td><label>Used for rendering 🔥:</label></td><td class="value">${activeGPUs.join('</br>')}</td></tr>
|
|
||||||
</table>`
|
|
||||||
|
|
||||||
let systemInfoEl = document.querySelector('#system-info')
|
|
||||||
systemInfoEl.innerHTML = systemInfo
|
|
||||||
}
|
|
||||||
|
|
||||||
async function healthCheck() {
|
async function healthCheck() {
|
||||||
try {
|
try {
|
||||||
@ -231,7 +230,7 @@ async function healthCheck() {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
if (serverState.devices) {
|
if (serverState.devices) {
|
||||||
setSystemInfo(serverState.devices)
|
setDeviceInfo(serverState.devices)
|
||||||
}
|
}
|
||||||
serverState.time = Date.now()
|
serverState.time = Date.now()
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
@ -887,8 +886,7 @@ function createTask(task) {
|
|||||||
task['progressBar'] = taskEntry.querySelector('.progress-bar')
|
task['progressBar'] = taskEntry.querySelector('.progress-bar')
|
||||||
task['stopTask'] = taskEntry.querySelector('.stopTask')
|
task['stopTask'] = taskEntry.querySelector('.stopTask')
|
||||||
|
|
||||||
task['stopTask'].addEventListener('click', async function(e) {
|
task['stopTask'].addEventListener('click', (e) => { shiftOrConfirm(e, "Are you sure? Should this task be stopped?", async function(e) {
|
||||||
e.stopPropagation()
|
|
||||||
if (task['isProcessing']) {
|
if (task['isProcessing']) {
|
||||||
task.isProcessing = false
|
task.isProcessing = false
|
||||||
task.progressBar.classList.remove("active")
|
task.progressBar.classList.remove("active")
|
||||||
@ -903,9 +901,9 @@ function createTask(task) {
|
|||||||
taskQueue.splice(idx, 1)
|
taskQueue.splice(idx, 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
taskEntry.remove()
|
removeTask(taskEntry)
|
||||||
}
|
}
|
||||||
})
|
})})
|
||||||
|
|
||||||
task['useSettings'] = taskEntry.querySelector('.useSettings')
|
task['useSettings'] = taskEntry.querySelector('.useSettings')
|
||||||
task['useSettings'].addEventListener('click', function(e) {
|
task['useSettings'].addEventListener('click', function(e) {
|
||||||
@ -934,10 +932,10 @@ function getPrompts() {
|
|||||||
prompts = prompts.filter(prompt => prompt !== '')
|
prompts = prompts.filter(prompt => prompt !== '')
|
||||||
|
|
||||||
if (activeTags.length > 0) {
|
if (activeTags.length > 0) {
|
||||||
const promptTags = activeTags.map(x => x.name).join(", ")
|
const promptTags = activeTags.map(x => x.name).join(", ")
|
||||||
prompts = prompts.map((prompt) => `${prompt}, ${promptTags}`)
|
prompts = prompts.map((prompt) => `${prompt}, ${promptTags}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
let promptsToMake = applySetOperator(prompts)
|
let promptsToMake = applySetOperator(prompts)
|
||||||
promptsToMake = applyPermuteOperator(promptsToMake)
|
promptsToMake = applyPermuteOperator(promptsToMake)
|
||||||
|
|
||||||
@ -1047,21 +1045,25 @@ async function stopAllTasks() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
clearAllPreviewsBtn.addEventListener('click', async function() {
|
function removeTask(taskToRemove) {
|
||||||
|
taskToRemove.remove()
|
||||||
|
|
||||||
|
if (document.querySelector('.imageTaskContainer') === null) {
|
||||||
|
previewTools.style.display = 'none'
|
||||||
|
initialText.style.display = 'block'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
clearAllPreviewsBtn.addEventListener('click', (e) => { shiftOrConfirm(e, "Are you sure? Remove all results and tasks from the results pane?", async function() {
|
||||||
await stopAllTasks()
|
await stopAllTasks()
|
||||||
|
|
||||||
let taskEntries = document.querySelectorAll('.imageTaskContainer')
|
let taskEntries = document.querySelectorAll('.imageTaskContainer')
|
||||||
taskEntries.forEach(task => {
|
taskEntries.forEach(removeTask)
|
||||||
task.remove()
|
})})
|
||||||
})
|
|
||||||
|
|
||||||
previewTools.style.display = 'none'
|
stopImageBtn.addEventListener('click', (e) => { shiftOrConfirm(e, "Are you sure? Do you want to stop all the tasks?", async function(e) {
|
||||||
initialText.style.display = 'block'
|
|
||||||
})
|
|
||||||
|
|
||||||
stopImageBtn.addEventListener('click', async function() {
|
|
||||||
await stopAllTasks()
|
await stopAllTasks()
|
||||||
})
|
})})
|
||||||
|
|
||||||
widthField.addEventListener('change', onDimensionChange)
|
widthField.addEventListener('change', onDimensionChange)
|
||||||
heightField.addEventListener('change', onDimensionChange)
|
heightField.addEventListener('change', onDimensionChange)
|
||||||
|
@ -5,9 +5,9 @@
|
|||||||
*/
|
*/
|
||||||
var ParameterType = {
|
var ParameterType = {
|
||||||
checkbox: "checkbox",
|
checkbox: "checkbox",
|
||||||
select: "select",
|
select: "select",
|
||||||
select_multiple: "select_multiple",
|
select_multiple: "select_multiple",
|
||||||
custom: "custom",
|
custom: "custom",
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -23,174 +23,182 @@
|
|||||||
|
|
||||||
/** @type {Array.<Parameter>} */
|
/** @type {Array.<Parameter>} */
|
||||||
var PARAMETERS = [
|
var PARAMETERS = [
|
||||||
{
|
{
|
||||||
id: "theme",
|
id: "theme",
|
||||||
type: ParameterType.select,
|
type: ParameterType.select,
|
||||||
label: "Theme",
|
label: "Theme",
|
||||||
default: "theme-default",
|
default: "theme-default",
|
||||||
note: "customize the look and feel of the ui",
|
note: "customize the look and feel of the ui",
|
||||||
options: [ // Note: options expanded dynamically
|
options: [ // Note: options expanded dynamically
|
||||||
{
|
{
|
||||||
value: "theme-default",
|
value: "theme-default",
|
||||||
label: "Default"
|
label: "Default"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
icon: "fa-palette"
|
icon: "fa-palette"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "save_to_disk",
|
id: "save_to_disk",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Auto-Save Images",
|
label: "Auto-Save Images",
|
||||||
note: "automatically saves images to the specified location",
|
note: "automatically saves images to the specified location",
|
||||||
icon: "fa-download",
|
icon: "fa-download",
|
||||||
default: false,
|
default: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "diskPath",
|
id: "diskPath",
|
||||||
type: ParameterType.custom,
|
type: ParameterType.custom,
|
||||||
label: "Save Location",
|
label: "Save Location",
|
||||||
render: (parameter) => {
|
render: (parameter) => {
|
||||||
return `<input id="${parameter.id}" name="${parameter.id}" size="30" disabled>`
|
return `<input id="${parameter.id}" name="${parameter.id}" size="30" disabled>`
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "sound_toggle",
|
id: "sound_toggle",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Enable Sound",
|
label: "Enable Sound",
|
||||||
note: "plays a sound on task completion",
|
note: "plays a sound on task completion",
|
||||||
icon: "fa-volume-low",
|
icon: "fa-volume-low",
|
||||||
default: true,
|
default: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "ui_open_browser_on_start",
|
id: "ui_open_browser_on_start",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Open browser on startup",
|
label: "Open browser on startup",
|
||||||
note: "starts the default browser on startup",
|
note: "starts the default browser on startup",
|
||||||
icon: "fa-window-restore",
|
icon: "fa-window-restore",
|
||||||
default: true,
|
default: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "turbo",
|
id: "turbo",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Turbo Mode",
|
label: "Turbo Mode",
|
||||||
note: "generates images faster, but uses an additional 1 GB of GPU memory",
|
note: "generates images faster, but uses an additional 1 GB of GPU memory",
|
||||||
icon: "fa-forward",
|
icon: "fa-forward",
|
||||||
default: true,
|
default: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "use_cpu",
|
id: "use_cpu",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Use CPU (not GPU)",
|
label: "Use CPU (not GPU)",
|
||||||
note: "warning: this will be *very* slow",
|
note: "warning: this will be *very* slow",
|
||||||
icon: "fa-microchip",
|
icon: "fa-microchip",
|
||||||
default: false,
|
default: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "auto_pick_gpus",
|
id: "auto_pick_gpus",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Automatically pick the GPUs (experimental)",
|
label: "Automatically pick the GPUs (experimental)",
|
||||||
default: false,
|
default: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "use_gpus",
|
id: "use_gpus",
|
||||||
type: ParameterType.select_multiple,
|
type: ParameterType.select_multiple,
|
||||||
label: "GPUs to use (experimental)",
|
label: "GPUs to use (experimental)",
|
||||||
note: "to process in parallel",
|
note: "to process in parallel",
|
||||||
default: false,
|
default: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "use_full_precision",
|
id: "use_full_precision",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Use Full Precision",
|
label: "Use Full Precision",
|
||||||
note: "for GPU-only. warning: this will consume more VRAM",
|
note: "for GPU-only. warning: this will consume more VRAM",
|
||||||
icon: "fa-crosshairs",
|
icon: "fa-crosshairs",
|
||||||
default: false,
|
default: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "auto_save_settings",
|
id: "auto_save_settings",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Auto-Save Settings",
|
label: "Auto-Save Settings",
|
||||||
note: "restores settings on browser load",
|
note: "restores settings on browser load",
|
||||||
icon: "fa-gear",
|
icon: "fa-gear",
|
||||||
default: true,
|
default: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "listen_to_network",
|
id: "confirm_dangerous_actions",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Make Stable Diffusion available on your network",
|
label: "Confirm dangerous actions",
|
||||||
note: "Other devices on your network can access this web page",
|
note: "Actions that might lead to data loss must either be clicked with the shift key pressed, or confirmed in an 'Are you sure?' dialog",
|
||||||
icon: "fa-network-wired",
|
icon: "fa-check-double",
|
||||||
default: true,
|
default: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "listen_port",
|
id: "listen_to_network",
|
||||||
type: ParameterType.custom,
|
type: ParameterType.checkbox,
|
||||||
label: "Network port",
|
label: "Make Stable Diffusion available on your network",
|
||||||
note: "Port that this server listens to. The '9000' part in 'http://localhost:9000'",
|
note: "Other devices on your network can access this web page",
|
||||||
icon: "fa-anchor",
|
icon: "fa-network-wired",
|
||||||
render: (parameter) => {
|
default: true,
|
||||||
return `<input id="${parameter.id}" name="${parameter.id}" size="6" value="9000" onkeypress="preventNonNumericalInput(event)">`
|
},
|
||||||
}
|
{
|
||||||
},
|
id: "listen_port",
|
||||||
{
|
type: ParameterType.custom,
|
||||||
id: "test_sd2",
|
label: "Network port",
|
||||||
type: ParameterType.checkbox,
|
note: "Port that this server listens to. The '9000' part in 'http://localhost:9000'",
|
||||||
label: "Test SD 2.0",
|
icon: "fa-anchor",
|
||||||
note: "Experimental! High memory usage! GPU-only! Please restart the program after changing this.",
|
render: (parameter) => {
|
||||||
icon: "fa-fire",
|
return `<input id="${parameter.id}" name="${parameter.id}" size="6" value="9000" onkeypress="preventNonNumericalInput(event)">`
|
||||||
default: false,
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "use_beta_channel",
|
id: "test_sd2",
|
||||||
type: ParameterType.checkbox,
|
type: ParameterType.checkbox,
|
||||||
label: "Beta channel",
|
label: "Test SD 2.0",
|
||||||
note: "Get the latest features immediately (but could be less stable). Please restart the program after changing this.",
|
note: "Experimental! High memory usage! GPU-only! Not the final version! Please restart the program after changing this.",
|
||||||
icon: "fa-fire",
|
icon: "fa-fire",
|
||||||
default: false,
|
default: false,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: "use_beta_channel",
|
||||||
|
type: ParameterType.checkbox,
|
||||||
|
label: "Beta channel",
|
||||||
|
note: "Get the latest features immediately (but could be less stable). Please restart the program after changing this.",
|
||||||
|
icon: "fa-fire",
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
function getParameterSettingsEntry(id) {
|
function getParameterSettingsEntry(id) {
|
||||||
let parameter = PARAMETERS.filter(p => p.id === id)
|
let parameter = PARAMETERS.filter(p => p.id === id)
|
||||||
if (parameter.length === 0) {
|
if (parameter.length === 0) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
return parameter[0].settingsEntry
|
return parameter[0].settingsEntry
|
||||||
}
|
}
|
||||||
|
|
||||||
function getParameterElement(parameter) {
|
function getParameterElement(parameter) {
|
||||||
switch (parameter.type) {
|
switch (parameter.type) {
|
||||||
case ParameterType.checkbox:
|
case ParameterType.checkbox:
|
||||||
var is_checked = parameter.default ? " checked" : "";
|
var is_checked = parameter.default ? " checked" : "";
|
||||||
return `<input id="${parameter.id}" name="${parameter.id}"${is_checked} type="checkbox">`
|
return `<input id="${parameter.id}" name="${parameter.id}"${is_checked} type="checkbox">`
|
||||||
case ParameterType.select:
|
case ParameterType.select:
|
||||||
case ParameterType.select_multiple:
|
case ParameterType.select_multiple:
|
||||||
var options = (parameter.options || []).map(option => `<option value="${option.value}">${option.label}</option>`).join("")
|
var options = (parameter.options || []).map(option => `<option value="${option.value}">${option.label}</option>`).join("")
|
||||||
var multiple = (parameter.type == ParameterType.select_multiple ? 'multiple' : '')
|
var multiple = (parameter.type == ParameterType.select_multiple ? 'multiple' : '')
|
||||||
return `<select id="${parameter.id}" name="${parameter.id}" ${multiple}>${options}</select>`
|
return `<select id="${parameter.id}" name="${parameter.id}" ${multiple}>${options}</select>`
|
||||||
case ParameterType.custom:
|
case ParameterType.custom:
|
||||||
return parameter.render(parameter)
|
return parameter.render(parameter)
|
||||||
default:
|
default:
|
||||||
console.error(`Invalid type for parameter ${parameter.id}`);
|
console.error(`Invalid type for parameter ${parameter.id}`);
|
||||||
return "ERROR: Invalid Type"
|
return "ERROR: Invalid Type"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let parametersTable = document.querySelector("#system-settings .parameters-table")
|
let parametersTable = document.querySelector("#system-settings .parameters-table")
|
||||||
/* fill in the system settings popup table */
|
/* fill in the system settings popup table */
|
||||||
function initParameters() {
|
function initParameters() {
|
||||||
PARAMETERS.forEach(parameter => {
|
PARAMETERS.forEach(parameter => {
|
||||||
var element = getParameterElement(parameter)
|
var element = getParameterElement(parameter)
|
||||||
var note = parameter.note ? `<small>${parameter.note}</small>` : "";
|
var note = parameter.note ? `<small>${parameter.note}</small>` : "";
|
||||||
var icon = parameter.icon ? `<i class="fa ${parameter.icon}"></i>` : "";
|
var icon = parameter.icon ? `<i class="fa ${parameter.icon}"></i>` : "";
|
||||||
var newrow = document.createElement('div')
|
var newrow = document.createElement('div')
|
||||||
newrow.innerHTML = `
|
newrow.innerHTML = `
|
||||||
<div>${icon}</div>
|
<div>${icon}</div>
|
||||||
<div><label for="${parameter.id}">${parameter.label}</label>${note}</div>
|
<div><label for="${parameter.id}">${parameter.label}</label>${note}</div>
|
||||||
<div>${element}</div>`
|
<div>${element}</div>`
|
||||||
parametersTable.appendChild(newrow)
|
parametersTable.appendChild(newrow)
|
||||||
parameter.settingsEntry = newrow
|
parameter.settingsEntry = newrow
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
initParameters()
|
initParameters()
|
||||||
@ -207,9 +215,11 @@ let listenPortField = document.querySelector("#listen_port")
|
|||||||
let testSD2Field = document.querySelector("#test_sd2")
|
let testSD2Field = document.querySelector("#test_sd2")
|
||||||
let useBetaChannelField = document.querySelector("#use_beta_channel")
|
let useBetaChannelField = document.querySelector("#use_beta_channel")
|
||||||
let uiOpenBrowserOnStartField = document.querySelector("#ui_open_browser_on_start")
|
let uiOpenBrowserOnStartField = document.querySelector("#ui_open_browser_on_start")
|
||||||
|
let confirmDangerousActionsField = document.querySelector("#confirm_dangerous_actions")
|
||||||
|
|
||||||
let saveSettingsBtn = document.querySelector('#save-system-settings-btn')
|
let saveSettingsBtn = document.querySelector('#save-system-settings-btn')
|
||||||
|
|
||||||
|
|
||||||
async function changeAppConfig(configDelta) {
|
async function changeAppConfig(configDelta) {
|
||||||
try {
|
try {
|
||||||
let res = await fetch('/app_config', {
|
let res = await fetch('/app_config', {
|
||||||
@ -242,12 +252,15 @@ async function getAppConfig() {
|
|||||||
if ('test_sd2' in config) {
|
if ('test_sd2' in config) {
|
||||||
testSD2Field.checked = config['test_sd2']
|
testSD2Field.checked = config['test_sd2']
|
||||||
}
|
}
|
||||||
if (config.net && config.net.listen_to_network === false) {
|
|
||||||
listenToNetworkField.checked = false
|
let testSD2SettingEntry = getParameterSettingsEntry('test_sd2')
|
||||||
}
|
testSD2SettingEntry.style.display = (config.update_branch === 'beta' ? '' : 'none')
|
||||||
if (config.net && config.net.listen_port !== undefined) {
|
if (config.net && config.net.listen_to_network === false) {
|
||||||
listenPortField.value = config.net.listen_port
|
listenToNetworkField.checked = false
|
||||||
}
|
}
|
||||||
|
if (config.net && config.net.listen_port !== undefined) {
|
||||||
|
listenPortField.value = config.net.listen_port
|
||||||
|
}
|
||||||
|
|
||||||
console.log('get config status response', config)
|
console.log('get config status response', config)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
@ -275,7 +288,6 @@ function getCurrentRenderDeviceSelection() {
|
|||||||
useCPUField.addEventListener('click', function() {
|
useCPUField.addEventListener('click', function() {
|
||||||
let gpuSettingEntry = getParameterSettingsEntry('use_gpus')
|
let gpuSettingEntry = getParameterSettingsEntry('use_gpus')
|
||||||
let autoPickGPUSettingEntry = getParameterSettingsEntry('auto_pick_gpus')
|
let autoPickGPUSettingEntry = getParameterSettingsEntry('auto_pick_gpus')
|
||||||
console.log("hello", this.checked);
|
|
||||||
if (this.checked) {
|
if (this.checked) {
|
||||||
gpuSettingEntry.style.display = 'none'
|
gpuSettingEntry.style.display = 'none'
|
||||||
autoPickGPUSettingEntry.style.display = 'none'
|
autoPickGPUSettingEntry.style.display = 'none'
|
||||||
@ -325,14 +337,45 @@ async function getDiskPath() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getDevices() {
|
function setDeviceInfo(devices) {
|
||||||
|
let cpu = devices.all.cpu.name
|
||||||
|
let allGPUs = Object.keys(devices.all).filter(d => d != 'cpu')
|
||||||
|
let activeGPUs = Object.keys(devices.active)
|
||||||
|
|
||||||
|
function ID_TO_TEXT(d) {
|
||||||
|
let info = devices.all[d]
|
||||||
|
if ("mem_free" in info && "mem_total" in info) {
|
||||||
|
return `${info.name} <small>(${d}) (${info.mem_free.toFixed(1)}Gb free / ${info.mem_total.toFixed(1)} Gb total)</small>`
|
||||||
|
} else {
|
||||||
|
return `${info.name} <small>(${d}) (no memory info)</small>`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
allGPUs = allGPUs.map(ID_TO_TEXT)
|
||||||
|
activeGPUs = activeGPUs.map(ID_TO_TEXT)
|
||||||
|
|
||||||
|
let systemInfoEl = document.querySelector('#system-info')
|
||||||
|
systemInfoEl.querySelector('#system-info-cpu').innerText = cpu
|
||||||
|
systemInfoEl.querySelector('#system-info-gpus-all').innerHTML = allGPUs.join('</br>')
|
||||||
|
systemInfoEl.querySelector('#system-info-rendering-devices').innerHTML = activeGPUs.join('</br>')
|
||||||
|
}
|
||||||
|
|
||||||
|
function setHostInfo(hosts) {
|
||||||
|
let port = listenPortField.value
|
||||||
|
hosts = hosts.map(addr => `http://${addr}:${port}/`).map(url => `<div><a href="${url}">${url}</a></div>`)
|
||||||
|
document.querySelector('#system-info-server-hosts').innerHTML = hosts.join('')
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getSystemInfo() {
|
||||||
try {
|
try {
|
||||||
let res = await fetch('/get/devices')
|
let res = await fetch('/get/system_info')
|
||||||
if (res.status === 200) {
|
if (res.status === 200) {
|
||||||
res = await res.json()
|
res = await res.json()
|
||||||
|
let devices = res['devices']
|
||||||
|
let hosts = res['hosts']
|
||||||
|
|
||||||
let allDeviceIds = Object.keys(res['all']).filter(d => d !== 'cpu')
|
let allDeviceIds = Object.keys(devices['all']).filter(d => d !== 'cpu')
|
||||||
let activeDeviceIds = Object.keys(res['active']).filter(d => d !== 'cpu')
|
let activeDeviceIds = Object.keys(devices['active']).filter(d => d !== 'cpu')
|
||||||
|
|
||||||
if (activeDeviceIds.length === 0) {
|
if (activeDeviceIds.length === 0) {
|
||||||
useCPUField.checked = true
|
useCPUField.checked = true
|
||||||
@ -350,11 +393,11 @@ async function getDevices() {
|
|||||||
useCPUField.disabled = true // no compatible GPUs, so make the CPU mandatory
|
useCPUField.disabled = true // no compatible GPUs, so make the CPU mandatory
|
||||||
}
|
}
|
||||||
|
|
||||||
autoPickGPUsField.checked = (res['config'] === 'auto')
|
autoPickGPUsField.checked = (devices['config'] === 'auto')
|
||||||
|
|
||||||
useGPUsField.innerHTML = ''
|
useGPUsField.innerHTML = ''
|
||||||
allDeviceIds.forEach(device => {
|
allDeviceIds.forEach(device => {
|
||||||
let deviceName = res['all'][device]['name']
|
let deviceName = devices['all'][device]['name']
|
||||||
let deviceOption = `<option value="${device}">${deviceName} (${device})</option>`
|
let deviceOption = `<option value="${device}">${deviceName} (${device})</option>`
|
||||||
useGPUsField.insertAdjacentHTML('beforeend', deviceOption)
|
useGPUsField.insertAdjacentHTML('beforeend', deviceOption)
|
||||||
})
|
})
|
||||||
@ -365,6 +408,9 @@ async function getDevices() {
|
|||||||
} else {
|
} else {
|
||||||
$('#use_gpus').val(activeDeviceIds)
|
$('#use_gpus').val(activeDeviceIds)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setDeviceInfo(devices)
|
||||||
|
setHostInfo(hosts)
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log('error fetching devices', e)
|
console.log('error fetching devices', e)
|
||||||
@ -372,23 +418,23 @@ async function getDevices() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
saveSettingsBtn.addEventListener('click', function() {
|
saveSettingsBtn.addEventListener('click', function() {
|
||||||
let updateBranch = (useBetaChannelField.checked ? 'beta' : 'main')
|
let updateBranch = (useBetaChannelField.checked ? 'beta' : 'main')
|
||||||
|
|
||||||
if (listenPortField.value == '') {
|
if (listenPortField.value == '') {
|
||||||
alert('The network port field must not be empty.')
|
alert('The network port field must not be empty.')
|
||||||
} else if (listenPortField.value<1 || listenPortField.value>65535) {
|
} else if (listenPortField.value<1 || listenPortField.value>65535) {
|
||||||
alert('The network port must be a number from 1 to 65535')
|
alert('The network port must be a number from 1 to 65535')
|
||||||
} else {
|
} else {
|
||||||
changeAppConfig({
|
changeAppConfig({
|
||||||
'render_devices': getCurrentRenderDeviceSelection(),
|
'render_devices': getCurrentRenderDeviceSelection(),
|
||||||
'update_branch': updateBranch,
|
'update_branch': updateBranch,
|
||||||
'ui_open_browser_on_start': uiOpenBrowserOnStartField.checked,
|
'ui_open_browser_on_start': uiOpenBrowserOnStartField.checked,
|
||||||
'listen_to_network': listenToNetworkField.checked,
|
'listen_to_network': listenToNetworkField.checked,
|
||||||
'listen_port': listenPortField.value,
|
'listen_port': listenPortField.value,
|
||||||
'test_sd2': testSD2Field.checked
|
'test_sd2': testSD2Field.checked
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
saveSettingsBtn.classList.add('active')
|
saveSettingsBtn.classList.add('active')
|
||||||
asyncDelay(300).then(() => saveSettingsBtn.classList.remove('active'))
|
asyncDelay(300).then(() => saveSettingsBtn.classList.remove('active'))
|
||||||
})
|
})
|
||||||
|
@ -60,6 +60,7 @@ function themeFieldChanged() {
|
|||||||
|
|
||||||
body.style = "";
|
body.style = "";
|
||||||
var theme = THEMES.find(t => t.key == theme_key);
|
var theme = THEMES.find(t => t.key == theme_key);
|
||||||
|
let borderColor = undefined
|
||||||
if (theme) {
|
if (theme) {
|
||||||
// refresh variables incase they are back referencing
|
// refresh variables incase they are back referencing
|
||||||
Array.from(DEFAULT_THEME.rule.style)
|
Array.from(DEFAULT_THEME.rule.style)
|
||||||
@ -67,7 +68,14 @@ function themeFieldChanged() {
|
|||||||
.forEach(cssVariable => {
|
.forEach(cssVariable => {
|
||||||
body.style.setProperty(cssVariable, DEFAULT_THEME.rule.style.getPropertyValue(cssVariable));
|
body.style.setProperty(cssVariable, DEFAULT_THEME.rule.style.getPropertyValue(cssVariable));
|
||||||
});
|
});
|
||||||
|
borderColor = theme.rule.style.getPropertyValue('--input-border-color').trim()
|
||||||
|
if (!borderColor.startsWith('#')) {
|
||||||
|
borderColor = theme.rule.style.getPropertyValue('--theme-color-fallback')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
borderColor = DEFAULT_THEME.rule.style.getPropertyValue('--theme-color-fallback')
|
||||||
}
|
}
|
||||||
|
document.querySelector('meta[name="theme-color"]').setAttribute("content", borderColor)
|
||||||
}
|
}
|
||||||
|
|
||||||
themeField.addEventListener('change', themeFieldChanged);
|
themeField.addEventListener('change', themeFieldChanged);
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
// https://gomakethings.com/finding-the-next-and-previous-sibling-elements-that-match-a-selector-with-vanilla-js/
|
// https://gomakethings.com/finding-the-next-and-previous-sibling-elements-that-match-a-selector-with-vanilla-js/
|
||||||
function getNextSibling(elem, selector) {
|
function getNextSibling(elem, selector) {
|
||||||
// Get the next sibling element
|
// Get the next sibling element
|
||||||
var sibling = elem.nextElementSibling
|
var sibling = elem.nextElementSibling
|
||||||
|
|
||||||
// If there's no selector, return the first sibling
|
// If there's no selector, return the first sibling
|
||||||
if (!selector) return sibling
|
if (!selector) return sibling
|
||||||
|
|
||||||
// If the sibling matches our selector, use it
|
// If the sibling matches our selector, use it
|
||||||
// If not, jump to the next sibling and continue the loop
|
// If not, jump to the next sibling and continue the loop
|
||||||
while (sibling) {
|
while (sibling) {
|
||||||
if (sibling.matches(selector)) return sibling
|
if (sibling.matches(selector)) return sibling
|
||||||
sibling = sibling.nextElementSibling
|
sibling = sibling.nextElementSibling
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
8
ui/media/manifest.webmanifest
Normal file
8
ui/media/manifest.webmanifest
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"name": "Stable Diffusion UI",
|
||||||
|
"display": "standalone",
|
||||||
|
"display_override": [
|
||||||
|
"window-controls-overlay"
|
||||||
|
],
|
||||||
|
"theme_color": "#000000"
|
||||||
|
}
|
42
ui/plugins/ui/Autoscroll.plugin.js
Normal file
42
ui/plugins/ui/Autoscroll.plugin.js
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
(function () {
|
||||||
|
"use strict"
|
||||||
|
|
||||||
|
var styleSheet = document.createElement("style");
|
||||||
|
styleSheet.textContent = `
|
||||||
|
.auto-scroll {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
document.head.appendChild(styleSheet);
|
||||||
|
|
||||||
|
const autoScrollControl = document.createElement('div');
|
||||||
|
autoScrollControl.innerHTML = `<input id="auto_scroll" name="auto_scroll" type="checkbox">
|
||||||
|
<label for="auto_scroll">Scroll to generated image</label>`
|
||||||
|
autoScrollControl.className = "auto-scroll"
|
||||||
|
clearAllPreviewsBtn.parentNode.insertBefore(autoScrollControl, clearAllPreviewsBtn.nextSibling)
|
||||||
|
prettifyInputs(document);
|
||||||
|
let autoScroll = document.querySelector("#auto_scroll")
|
||||||
|
|
||||||
|
SETTINGS_IDS_LIST.push("auto_scroll")
|
||||||
|
initSettings()
|
||||||
|
|
||||||
|
// observe for changes in the preview pane
|
||||||
|
var observer = new MutationObserver(function (mutations) {
|
||||||
|
mutations.forEach(function (mutation) {
|
||||||
|
if (mutation.target.className == 'img-batch') {
|
||||||
|
Autoscroll(mutation.target)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
observer.observe(document.getElementById('preview'), {
|
||||||
|
childList: true,
|
||||||
|
subtree: true
|
||||||
|
})
|
||||||
|
|
||||||
|
function Autoscroll(target) {
|
||||||
|
if (autoScroll.checked && target !== null) {
|
||||||
|
target.parentElement.parentElement.parentElement.scrollIntoView();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})()
|
@ -7,6 +7,7 @@ Notes:
|
|||||||
import json
|
import json
|
||||||
import os, re
|
import os, re
|
||||||
import traceback
|
import traceback
|
||||||
|
import queue
|
||||||
import torch
|
import torch
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from gc import collect as gc_collect
|
from gc import collect as gc_collect
|
||||||
@ -27,6 +28,8 @@ from gfpgan import GFPGANer
|
|||||||
from basicsr.archs.rrdbnet_arch import RRDBNet
|
from basicsr.archs.rrdbnet_arch import RRDBNet
|
||||||
from realesrgan import RealESRGANer
|
from realesrgan import RealESRGANer
|
||||||
|
|
||||||
|
from threading import Lock
|
||||||
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
logging.set_verbosity_error()
|
logging.set_verbosity_error()
|
||||||
@ -34,7 +37,7 @@ logging.set_verbosity_error()
|
|||||||
# consts
|
# consts
|
||||||
config_yaml = "optimizedSD/v1-inference.yaml"
|
config_yaml = "optimizedSD/v1-inference.yaml"
|
||||||
filename_regex = re.compile('[^a-zA-Z0-9]')
|
filename_regex = re.compile('[^a-zA-Z0-9]')
|
||||||
force_gfpgan_to_cuda0 = True # workaround: gfpgan currently works only on cuda:0
|
gfpgan_temp_device_lock = Lock() # workaround: gfpgan currently can only start on one device at a time.
|
||||||
|
|
||||||
# api stuff
|
# api stuff
|
||||||
from sd_internal import device_manager
|
from sd_internal import device_manager
|
||||||
@ -308,12 +311,6 @@ def move_to_cpu(model):
|
|||||||
|
|
||||||
def load_model_gfpgan():
|
def load_model_gfpgan():
|
||||||
if thread_data.gfpgan_file is None: raise ValueError(f'Thread gfpgan_file is undefined.')
|
if thread_data.gfpgan_file is None: raise ValueError(f'Thread gfpgan_file is undefined.')
|
||||||
|
|
||||||
# hack for a bug in facexlib: https://github.com/xinntao/facexlib/pull/19/files
|
|
||||||
from facexlib.detection import retinaface
|
|
||||||
retinaface.device = torch.device(thread_data.device)
|
|
||||||
print('forced retinaface.device to', thread_data.device)
|
|
||||||
|
|
||||||
model_path = thread_data.gfpgan_file + ".pth"
|
model_path = thread_data.gfpgan_file + ".pth"
|
||||||
thread_data.model_gfpgan = GFPGANer(device=torch.device(thread_data.device), model_path=model_path, upscale=1, arch='clean', channel_multiplier=2, bg_upsampler=None)
|
thread_data.model_gfpgan = GFPGANer(device=torch.device(thread_data.device), model_path=model_path, upscale=1, arch='clean', channel_multiplier=2, bg_upsampler=None)
|
||||||
print('loaded', thread_data.gfpgan_file, 'to', thread_data.model_gfpgan.device, 'precision', thread_data.precision)
|
print('loaded', thread_data.gfpgan_file, 'to', thread_data.model_gfpgan.device, 'precision', thread_data.precision)
|
||||||
@ -369,15 +366,23 @@ def apply_filters(filter_name, image_data, model_path=None):
|
|||||||
image_data.to(thread_data.device)
|
image_data.to(thread_data.device)
|
||||||
|
|
||||||
if filter_name == 'gfpgan':
|
if filter_name == 'gfpgan':
|
||||||
if model_path is not None and model_path != thread_data.gfpgan_file:
|
# This lock is only ever used here. No need to use timeout for the request. Should never deadlock.
|
||||||
thread_data.gfpgan_file = model_path
|
with gfpgan_temp_device_lock: # Wait for any other devices to complete before starting.
|
||||||
load_model_gfpgan()
|
# hack for a bug in facexlib: https://github.com/xinntao/facexlib/pull/19/files
|
||||||
elif not thread_data.model_gfpgan:
|
from facexlib.detection import retinaface
|
||||||
load_model_gfpgan()
|
retinaface.device = torch.device(thread_data.device)
|
||||||
if thread_data.model_gfpgan is None: raise Exception('Model "gfpgan" not loaded.')
|
print('forced retinaface.device to', thread_data.device)
|
||||||
print('enhance with', thread_data.gfpgan_file, 'on', thread_data.model_gfpgan.device, 'precision', thread_data.precision)
|
|
||||||
_, _, output = thread_data.model_gfpgan.enhance(image_data[:,:,::-1], has_aligned=False, only_center_face=False, paste_back=True)
|
if model_path is not None and model_path != thread_data.gfpgan_file:
|
||||||
image_data = output[:,:,::-1]
|
thread_data.gfpgan_file = model_path
|
||||||
|
load_model_gfpgan()
|
||||||
|
elif not thread_data.model_gfpgan:
|
||||||
|
load_model_gfpgan()
|
||||||
|
if thread_data.model_gfpgan is None: raise Exception('Model "gfpgan" not loaded.')
|
||||||
|
|
||||||
|
print('enhance with', thread_data.gfpgan_file, 'on', thread_data.model_gfpgan.device, 'precision', thread_data.precision)
|
||||||
|
_, _, output = thread_data.model_gfpgan.enhance(image_data[:,:,::-1], has_aligned=False, only_center_face=False, paste_back=True)
|
||||||
|
image_data = output[:,:,::-1]
|
||||||
|
|
||||||
if filter_name == 'real_esrgan':
|
if filter_name == 'real_esrgan':
|
||||||
if model_path is not None and model_path != thread_data.real_esrgan_file:
|
if model_path is not None and model_path != thread_data.real_esrgan_file:
|
||||||
@ -392,9 +397,34 @@ def apply_filters(filter_name, image_data, model_path=None):
|
|||||||
|
|
||||||
return image_data
|
return image_data
|
||||||
|
|
||||||
def mk_img(req: Request):
|
def is_model_reload_necessary(req: Request):
|
||||||
|
# custom model support:
|
||||||
|
# the req.use_stable_diffusion_model needs to be a valid path
|
||||||
|
# to the ckpt file (without the extension).
|
||||||
|
if not os.path.exists(req.use_stable_diffusion_model + '.ckpt'): raise FileNotFoundError(f'Cannot find {req.use_stable_diffusion_model}.ckpt')
|
||||||
|
|
||||||
|
needs_model_reload = False
|
||||||
|
if not thread_data.model or thread_data.ckpt_file != req.use_stable_diffusion_model or thread_data.vae_file != req.use_vae_model:
|
||||||
|
thread_data.ckpt_file = req.use_stable_diffusion_model
|
||||||
|
thread_data.vae_file = req.use_vae_model
|
||||||
|
needs_model_reload = True
|
||||||
|
|
||||||
|
if thread_data.device != 'cpu':
|
||||||
|
if (thread_data.precision == 'autocast' and (req.use_full_precision or not thread_data.model_is_half)) or \
|
||||||
|
(thread_data.precision == 'full' and not req.use_full_precision and not thread_data.force_full_precision):
|
||||||
|
thread_data.precision = 'full' if req.use_full_precision else 'autocast'
|
||||||
|
needs_model_reload = True
|
||||||
|
|
||||||
|
return needs_model_reload
|
||||||
|
|
||||||
|
def reload_model():
|
||||||
|
unload_models()
|
||||||
|
unload_filters()
|
||||||
|
load_model_ckpt()
|
||||||
|
|
||||||
|
def mk_img(req: Request, data_queue: queue.Queue, task_temp_images: list, step_callback):
|
||||||
try:
|
try:
|
||||||
yield from do_mk_img(req)
|
return do_mk_img(req, data_queue, task_temp_images, step_callback)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(traceback.format_exc())
|
print(traceback.format_exc())
|
||||||
|
|
||||||
@ -405,12 +435,13 @@ def mk_img(req: Request):
|
|||||||
thread_data.model.model2.to("cpu")
|
thread_data.model.model2.to("cpu")
|
||||||
|
|
||||||
gc() # Release from memory.
|
gc() # Release from memory.
|
||||||
yield json.dumps({
|
data_queue.put(json.dumps({
|
||||||
"status": 'failed',
|
"status": 'failed',
|
||||||
"detail": str(e)
|
"detail": str(e)
|
||||||
})
|
}))
|
||||||
|
raise e
|
||||||
|
|
||||||
def update_temp_img(req, x_samples):
|
def update_temp_img(req, x_samples, task_temp_images: list):
|
||||||
partial_images = []
|
partial_images = []
|
||||||
for i in range(req.num_outputs):
|
for i in range(req.num_outputs):
|
||||||
if thread_data.test_sd2:
|
if thread_data.test_sd2:
|
||||||
@ -421,19 +452,18 @@ def update_temp_img(req, x_samples):
|
|||||||
x_sample = 255.0 * rearrange(x_sample[0].cpu().numpy(), "c h w -> h w c")
|
x_sample = 255.0 * rearrange(x_sample[0].cpu().numpy(), "c h w -> h w c")
|
||||||
x_sample = x_sample.astype(np.uint8)
|
x_sample = x_sample.astype(np.uint8)
|
||||||
img = Image.fromarray(x_sample)
|
img = Image.fromarray(x_sample)
|
||||||
buf = BytesIO()
|
buf = img_to_buffer(img, output_format='JPEG')
|
||||||
img.save(buf, format='JPEG')
|
|
||||||
buf.seek(0)
|
|
||||||
|
|
||||||
del img, x_sample, x_sample_ddim
|
del img, x_sample, x_sample_ddim
|
||||||
# don't delete x_samples, it is used in the code that called this callback
|
# don't delete x_samples, it is used in the code that called this callback
|
||||||
|
|
||||||
thread_data.temp_images[str(req.session_id) + '/' + str(i)] = buf
|
thread_data.temp_images[str(req.session_id) + '/' + str(i)] = buf
|
||||||
|
task_temp_images[i] = buf
|
||||||
partial_images.append({'path': f'/image/tmp/{req.session_id}/{i}'})
|
partial_images.append({'path': f'/image/tmp/{req.session_id}/{i}'})
|
||||||
return partial_images
|
return partial_images
|
||||||
|
|
||||||
# Build and return the apropriate generator for do_mk_img
|
# Build and return the apropriate generator for do_mk_img
|
||||||
def get_image_progress_generator(req, extra_props=None):
|
def get_image_progress_generator(req, data_queue: queue.Queue, task_temp_images: list, step_callback, extra_props=None):
|
||||||
if not req.stream_progress_updates:
|
if not req.stream_progress_updates:
|
||||||
def empty_callback(x_samples, i): return x_samples
|
def empty_callback(x_samples, i): return x_samples
|
||||||
return empty_callback
|
return empty_callback
|
||||||
@ -452,15 +482,17 @@ def get_image_progress_generator(req, extra_props=None):
|
|||||||
progress.update(extra_props)
|
progress.update(extra_props)
|
||||||
|
|
||||||
if req.stream_image_progress and i % 5 == 0:
|
if req.stream_image_progress and i % 5 == 0:
|
||||||
progress['output'] = update_temp_img(req, x_samples)
|
progress['output'] = update_temp_img(req, x_samples, task_temp_images)
|
||||||
|
|
||||||
yield json.dumps(progress)
|
data_queue.put(json.dumps(progress))
|
||||||
|
|
||||||
|
step_callback()
|
||||||
|
|
||||||
if thread_data.stop_processing:
|
if thread_data.stop_processing:
|
||||||
raise UserInitiatedStop("User requested that we stop processing")
|
raise UserInitiatedStop("User requested that we stop processing")
|
||||||
return img_callback
|
return img_callback
|
||||||
|
|
||||||
def do_mk_img(req: Request):
|
def do_mk_img(req: Request, data_queue: queue.Queue, task_temp_images: list, step_callback):
|
||||||
thread_data.stop_processing = False
|
thread_data.stop_processing = False
|
||||||
|
|
||||||
res = Response()
|
res = Response()
|
||||||
@ -469,28 +501,6 @@ def do_mk_img(req: Request):
|
|||||||
|
|
||||||
thread_data.temp_images.clear()
|
thread_data.temp_images.clear()
|
||||||
|
|
||||||
# custom model support:
|
|
||||||
# the req.use_stable_diffusion_model needs to be a valid path
|
|
||||||
# to the ckpt file (without the extension).
|
|
||||||
if not os.path.exists(req.use_stable_diffusion_model + '.ckpt'): raise FileNotFoundError(f'Cannot find {req.use_stable_diffusion_model}.ckpt')
|
|
||||||
|
|
||||||
needs_model_reload = False
|
|
||||||
if not thread_data.model or thread_data.ckpt_file != req.use_stable_diffusion_model or thread_data.vae_file != req.use_vae_model:
|
|
||||||
thread_data.ckpt_file = req.use_stable_diffusion_model
|
|
||||||
thread_data.vae_file = req.use_vae_model
|
|
||||||
needs_model_reload = True
|
|
||||||
|
|
||||||
if thread_data.device != 'cpu':
|
|
||||||
if (thread_data.precision == 'autocast' and (req.use_full_precision or not thread_data.model_is_half)) or \
|
|
||||||
(thread_data.precision == 'full' and not req.use_full_precision and not thread_data.force_full_precision):
|
|
||||||
thread_data.precision = 'full' if req.use_full_precision else 'autocast'
|
|
||||||
needs_model_reload = True
|
|
||||||
|
|
||||||
if needs_model_reload:
|
|
||||||
unload_models()
|
|
||||||
unload_filters()
|
|
||||||
load_model_ckpt()
|
|
||||||
|
|
||||||
if thread_data.turbo != req.turbo and not thread_data.test_sd2:
|
if thread_data.turbo != req.turbo and not thread_data.test_sd2:
|
||||||
thread_data.turbo = req.turbo
|
thread_data.turbo = req.turbo
|
||||||
thread_data.model.turbo = req.turbo
|
thread_data.model.turbo = req.turbo
|
||||||
@ -606,7 +616,7 @@ def do_mk_img(req: Request):
|
|||||||
thread_data.modelFS.to(thread_data.device)
|
thread_data.modelFS.to(thread_data.device)
|
||||||
|
|
||||||
n_steps = req.num_inference_steps if req.init_image is None else t_enc
|
n_steps = req.num_inference_steps if req.init_image is None else t_enc
|
||||||
img_callback = get_image_progress_generator(req, {"total_steps": n_steps})
|
img_callback = get_image_progress_generator(req, data_queue, task_temp_images, step_callback, {"total_steps": n_steps})
|
||||||
|
|
||||||
# run the handler
|
# run the handler
|
||||||
try:
|
try:
|
||||||
@ -615,13 +625,6 @@ def do_mk_img(req: Request):
|
|||||||
x_samples = _txt2img(req.width, req.height, req.num_outputs, req.num_inference_steps, req.guidance_scale, None, opt_C, opt_f, opt_ddim_eta, c, uc, opt_seed, img_callback, mask, req.sampler)
|
x_samples = _txt2img(req.width, req.height, req.num_outputs, req.num_inference_steps, req.guidance_scale, None, opt_C, opt_f, opt_ddim_eta, c, uc, opt_seed, img_callback, mask, req.sampler)
|
||||||
else:
|
else:
|
||||||
x_samples = _img2img(init_latent, t_enc, batch_size, req.guidance_scale, c, uc, req.num_inference_steps, opt_ddim_eta, opt_seed, img_callback, mask, opt_C, req.height, req.width, opt_f)
|
x_samples = _img2img(init_latent, t_enc, batch_size, req.guidance_scale, c, uc, req.num_inference_steps, opt_ddim_eta, opt_seed, img_callback, mask, opt_C, req.height, req.width, opt_f)
|
||||||
|
|
||||||
if req.stream_progress_updates:
|
|
||||||
yield from x_samples
|
|
||||||
if hasattr(thread_data, 'partial_x_samples'):
|
|
||||||
if thread_data.partial_x_samples is not None:
|
|
||||||
x_samples = thread_data.partial_x_samples
|
|
||||||
del thread_data.partial_x_samples
|
|
||||||
except UserInitiatedStop:
|
except UserInitiatedStop:
|
||||||
if not hasattr(thread_data, 'partial_x_samples'):
|
if not hasattr(thread_data, 'partial_x_samples'):
|
||||||
continue
|
continue
|
||||||
@ -666,9 +669,11 @@ def do_mk_img(req: Request):
|
|||||||
save_metadata(meta_out_path, req, prompts[0], opt_seed)
|
save_metadata(meta_out_path, req, prompts[0], opt_seed)
|
||||||
|
|
||||||
if return_orig_img:
|
if return_orig_img:
|
||||||
img_str = img_to_base64_str(img, req.output_format)
|
img_buffer = img_to_buffer(img, req.output_format)
|
||||||
|
img_str = buffer_to_base64_str(img_buffer, req.output_format)
|
||||||
res_image_orig = ResponseImage(data=img_str, seed=opt_seed)
|
res_image_orig = ResponseImage(data=img_str, seed=opt_seed)
|
||||||
res.images.append(res_image_orig)
|
res.images.append(res_image_orig)
|
||||||
|
task_temp_images[i] = img_buffer
|
||||||
|
|
||||||
if req.save_to_disk_path is not None:
|
if req.save_to_disk_path is not None:
|
||||||
res_image_orig.path_abs = img_out_path
|
res_image_orig.path_abs = img_out_path
|
||||||
@ -684,9 +689,11 @@ def do_mk_img(req: Request):
|
|||||||
filters_applied.append(req.use_upscale)
|
filters_applied.append(req.use_upscale)
|
||||||
if (len(filters_applied) > 0):
|
if (len(filters_applied) > 0):
|
||||||
filtered_image = Image.fromarray(img_data[i])
|
filtered_image = Image.fromarray(img_data[i])
|
||||||
filtered_img_data = img_to_base64_str(filtered_image, req.output_format)
|
filtered_buffer = img_to_buffer(filtered_image, req.output_format)
|
||||||
|
filtered_img_data = buffer_to_base64_str(filtered_buffer, req.output_format)
|
||||||
response_image = ResponseImage(data=filtered_img_data, seed=opt_seed)
|
response_image = ResponseImage(data=filtered_img_data, seed=opt_seed)
|
||||||
res.images.append(response_image)
|
res.images.append(response_image)
|
||||||
|
task_temp_images[i] = filtered_buffer
|
||||||
if req.save_to_disk_path is not None:
|
if req.save_to_disk_path is not None:
|
||||||
filtered_img_out_path = get_base_path(req.save_to_disk_path, req.session_id, prompts[0], img_id, req.output_format, "_".join(filters_applied))
|
filtered_img_out_path = get_base_path(req.save_to_disk_path, req.session_id, prompts[0], img_id, req.output_format, "_".join(filters_applied))
|
||||||
save_image(filtered_image, filtered_img_out_path)
|
save_image(filtered_image, filtered_img_out_path)
|
||||||
@ -705,7 +712,10 @@ def do_mk_img(req: Request):
|
|||||||
print(f'memory_final = {round(torch.cuda.memory_allocated(thread_data.device) / 1e6, 2)}Mb')
|
print(f'memory_final = {round(torch.cuda.memory_allocated(thread_data.device) / 1e6, 2)}Mb')
|
||||||
|
|
||||||
print('Task completed')
|
print('Task completed')
|
||||||
yield json.dumps(res.json())
|
res = res.json()
|
||||||
|
data_queue.put(json.dumps(res))
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
def save_image(img, img_out_path):
|
def save_image(img, img_out_path):
|
||||||
try:
|
try:
|
||||||
@ -771,7 +781,7 @@ def _txt2img(opt_W, opt_H, opt_n_samples, opt_ddim_steps, opt_scale, start_code,
|
|||||||
sampler.make_schedule(ddim_num_steps=opt_ddim_steps, ddim_eta=opt_ddim_eta, verbose=False)
|
sampler.make_schedule(ddim_num_steps=opt_ddim_steps, ddim_eta=opt_ddim_eta, verbose=False)
|
||||||
|
|
||||||
|
|
||||||
samples_ddim = sampler.sample(
|
samples_ddim, intermediates = sampler.sample(
|
||||||
S=opt_ddim_steps,
|
S=opt_ddim_steps,
|
||||||
conditioning=c,
|
conditioning=c,
|
||||||
batch_size=opt_n_samples,
|
batch_size=opt_n_samples,
|
||||||
@ -804,7 +814,7 @@ def _txt2img(opt_W, opt_H, opt_n_samples, opt_ddim_steps, opt_scale, start_code,
|
|||||||
mask=mask,
|
mask=mask,
|
||||||
sampler = sampler_name,
|
sampler = sampler_name,
|
||||||
)
|
)
|
||||||
yield from samples_ddim
|
return samples_ddim
|
||||||
|
|
||||||
def _img2img(init_latent, t_enc, batch_size, opt_scale, c, uc, opt_ddim_steps, opt_ddim_eta, opt_seed, img_callback, mask, opt_C=1, opt_H=1, opt_W=1, opt_f=1):
|
def _img2img(init_latent, t_enc, batch_size, opt_scale, c, uc, opt_ddim_steps, opt_ddim_eta, opt_seed, img_callback, mask, opt_C=1, opt_H=1, opt_W=1, opt_f=1):
|
||||||
# encode (scaled latent)
|
# encode (scaled latent)
|
||||||
@ -842,7 +852,7 @@ def _img2img(init_latent, t_enc, batch_size, opt_scale, c, uc, opt_ddim_steps, o
|
|||||||
x_T=x_T,
|
x_T=x_T,
|
||||||
sampler = 'ddim'
|
sampler = 'ddim'
|
||||||
)
|
)
|
||||||
yield from samples_ddim
|
return samples_ddim
|
||||||
|
|
||||||
def gc():
|
def gc():
|
||||||
gc_collect()
|
gc_collect()
|
||||||
@ -910,8 +920,16 @@ def load_mask(mask_str, h0, w0, newH, newW, invert=False):
|
|||||||
|
|
||||||
# https://stackoverflow.com/a/61114178
|
# https://stackoverflow.com/a/61114178
|
||||||
def img_to_base64_str(img, output_format="PNG"):
|
def img_to_base64_str(img, output_format="PNG"):
|
||||||
|
buffered = img_to_buffer(img, output_format)
|
||||||
|
return buffer_to_base64_str(buffered, output_format)
|
||||||
|
|
||||||
|
def img_to_buffer(img, output_format="PNG"):
|
||||||
buffered = BytesIO()
|
buffered = BytesIO()
|
||||||
img.save(buffered, format=output_format)
|
img.save(buffered, format=output_format)
|
||||||
|
buffered.seek(0)
|
||||||
|
return buffered
|
||||||
|
|
||||||
|
def buffer_to_base64_str(buffered, output_format="PNG"):
|
||||||
buffered.seek(0)
|
buffered.seek(0)
|
||||||
img_byte = buffered.getvalue()
|
img_byte = buffered.getvalue()
|
||||||
mime_type = "image/png" if output_format.lower() == "png" else "image/jpeg"
|
mime_type = "image/png" if output_format.lower() == "png" else "image/jpeg"
|
||||||
|
@ -283,45 +283,26 @@ def thread_render(device):
|
|||||||
print(f'Session {task.request.session_id} starting task {id(task)} on {runtime.thread_data.device_name}')
|
print(f'Session {task.request.session_id} starting task {id(task)} on {runtime.thread_data.device_name}')
|
||||||
if not task.lock.acquire(blocking=False): raise Exception('Got locked task from queue.')
|
if not task.lock.acquire(blocking=False): raise Exception('Got locked task from queue.')
|
||||||
try:
|
try:
|
||||||
if runtime.thread_data.device == 'cpu' and is_alive() > 1:
|
if runtime.is_model_reload_necessary(task.request):
|
||||||
# CPU is not the only device. Keep track of active time to unload resources later.
|
|
||||||
runtime.thread_data.lastActive = time.time()
|
|
||||||
# Open data generator.
|
|
||||||
res = runtime.mk_img(task.request)
|
|
||||||
if current_model_path == task.request.use_stable_diffusion_model:
|
|
||||||
current_state = ServerStates.Rendering
|
|
||||||
else:
|
|
||||||
current_state = ServerStates.LoadingModel
|
current_state = ServerStates.LoadingModel
|
||||||
# Start reading from generator.
|
runtime.reload_model()
|
||||||
dataQueue = None
|
current_model_path = task.request.use_stable_diffusion_model
|
||||||
if task.request.stream_progress_updates:
|
current_vae_path = task.request.use_vae_model
|
||||||
dataQueue = task.buffer_queue
|
|
||||||
for result in res:
|
def step_callback():
|
||||||
if current_state == ServerStates.LoadingModel:
|
global current_state_error
|
||||||
current_state = ServerStates.Rendering
|
|
||||||
current_model_path = task.request.use_stable_diffusion_model
|
|
||||||
current_vae_path = task.request.use_vae_model
|
|
||||||
if isinstance(current_state_error, SystemExit) or isinstance(current_state_error, StopAsyncIteration) or isinstance(task.error, StopAsyncIteration):
|
if isinstance(current_state_error, SystemExit) or isinstance(current_state_error, StopAsyncIteration) or isinstance(task.error, StopAsyncIteration):
|
||||||
runtime.thread_data.stop_processing = True
|
runtime.thread_data.stop_processing = True
|
||||||
if isinstance(current_state_error, StopAsyncIteration):
|
if isinstance(current_state_error, StopAsyncIteration):
|
||||||
task.error = current_state_error
|
task.error = current_state_error
|
||||||
current_state_error = None
|
current_state_error = None
|
||||||
print(f'Session {task.request.session_id} sent cancel signal for task {id(task)}')
|
print(f'Session {task.request.session_id} sent cancel signal for task {id(task)}')
|
||||||
if dataQueue:
|
|
||||||
dataQueue.put(result)
|
task_cache.keep(task.request.session_id, TASK_TTL)
|
||||||
if isinstance(result, str):
|
|
||||||
result = json.loads(result)
|
current_state = ServerStates.Rendering
|
||||||
task.response = result
|
task.response = runtime.mk_img(task.request, task.buffer_queue, task.temp_images, step_callback)
|
||||||
if 'output' in result:
|
|
||||||
for out_obj in result['output']:
|
|
||||||
if 'path' in out_obj:
|
|
||||||
img_id = out_obj['path'][out_obj['path'].rindex('/') + 1:]
|
|
||||||
task.temp_images[int(img_id)] = runtime.thread_data.temp_images[out_obj['path'][11:]]
|
|
||||||
elif 'data' in out_obj:
|
|
||||||
buf = runtime.base64_str_to_buffer(out_obj['data'])
|
|
||||||
task.temp_images[result['output'].index(out_obj)] = buf
|
|
||||||
# Before looping back to the generator, mark cache as still alive.
|
|
||||||
task_cache.keep(task.request.session_id, TASK_TTL)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
task.error = e
|
task.error = e
|
||||||
print(traceback.format_exc())
|
print(traceback.format_exc())
|
||||||
|
42
ui/server.py
42
ui/server.py
@ -7,6 +7,7 @@ import traceback
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
import socket
|
||||||
import picklescan.scanner
|
import picklescan.scanner
|
||||||
import rich
|
import rich
|
||||||
|
|
||||||
@ -144,12 +145,19 @@ def setConfig(config):
|
|||||||
print(traceback.format_exc())
|
print(traceback.format_exc())
|
||||||
|
|
||||||
def resolve_model_to_use(model_name:str, model_type:str, model_dir:str, model_extensions:list, default_models=[]):
|
def resolve_model_to_use(model_name:str, model_type:str, model_dir:str, model_extensions:list, default_models=[]):
|
||||||
|
config = getConfig()
|
||||||
|
|
||||||
model_dirs = [os.path.join(MODELS_DIR, model_dir), SD_DIR]
|
model_dirs = [os.path.join(MODELS_DIR, model_dir), SD_DIR]
|
||||||
if not model_name: # When None try user configured model.
|
if not model_name: # When None try user configured model.
|
||||||
config = getConfig()
|
# config = getConfig()
|
||||||
if 'model' in config and model_type in config['model']:
|
if 'model' in config and model_type in config['model']:
|
||||||
model_name = config['model'][model_type]
|
model_name = config['model'][model_type]
|
||||||
if model_name:
|
if model_name:
|
||||||
|
is_sd2 = config.get('test_sd2', False)
|
||||||
|
if model_name.startswith('sd2_') and not is_sd2: # temp hack, until SD2 is unified with 1.4
|
||||||
|
print('ERROR: Cannot use SD 2.0 models with SD 1.0 code. Using the sd-v1-4 model instead!')
|
||||||
|
model_name = 'sd-v1-4'
|
||||||
|
|
||||||
# Check models directory
|
# Check models directory
|
||||||
models_dir_path = os.path.join(MODELS_DIR, model_dir, model_name)
|
models_dir_path = os.path.join(MODELS_DIR, model_dir, model_name)
|
||||||
for model_extension in model_extensions:
|
for model_extension in model_extensions:
|
||||||
@ -237,9 +245,9 @@ def is_malicious_model(file_path):
|
|||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print('error while scanning', file_path, 'error:', e)
|
print('error while scanning', file_path, 'error:', e)
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
known_models = {}
|
||||||
def getModels():
|
def getModels():
|
||||||
models = {
|
models = {
|
||||||
'active': {
|
'active': {
|
||||||
@ -262,9 +270,14 @@ def getModels():
|
|||||||
if not file.endswith(model_extension):
|
if not file.endswith(model_extension):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if is_malicious_model(os.path.join(models_dir, file)):
|
model_path = os.path.join(models_dir, file)
|
||||||
models['scan-error'] = file
|
mtime = os.path.getmtime(model_path)
|
||||||
return
|
mod_time = known_models[model_path] if model_path in known_models else -1
|
||||||
|
if mod_time != mtime:
|
||||||
|
if is_malicious_model(model_path):
|
||||||
|
models['scan-error'] = file
|
||||||
|
return
|
||||||
|
known_models[model_path] = mtime
|
||||||
|
|
||||||
model_name = file[:-len(model_extension)]
|
model_name = file[:-len(model_extension)]
|
||||||
models['options'][model_type].append(model_name)
|
models['options'][model_type].append(model_name)
|
||||||
@ -293,6 +306,11 @@ def getUIPlugins():
|
|||||||
|
|
||||||
return plugins
|
return plugins
|
||||||
|
|
||||||
|
def getIPConfig():
|
||||||
|
ips = socket.gethostbyname_ex(socket.getfqdn())
|
||||||
|
ips[2].append(ips[0])
|
||||||
|
return ips[2]
|
||||||
|
|
||||||
@app.get('/get/{key:path}')
|
@app.get('/get/{key:path}')
|
||||||
def read_web_data(key:str=None):
|
def read_web_data(key:str=None):
|
||||||
if not key: # /get without parameters, stable-diffusion easter egg.
|
if not key: # /get without parameters, stable-diffusion easter egg.
|
||||||
@ -302,11 +320,14 @@ def read_web_data(key:str=None):
|
|||||||
if config is None:
|
if config is None:
|
||||||
config = APP_CONFIG_DEFAULTS
|
config = APP_CONFIG_DEFAULTS
|
||||||
return JSONResponse(config, headers=NOCACHE_HEADERS)
|
return JSONResponse(config, headers=NOCACHE_HEADERS)
|
||||||
elif key == 'devices':
|
elif key == 'system_info':
|
||||||
config = getConfig()
|
config = getConfig()
|
||||||
devices = task_manager.get_devices()
|
system_info = {
|
||||||
devices['config'] = config.get('render_devices', "auto")
|
'devices': task_manager.get_devices(),
|
||||||
return JSONResponse(devices, headers=NOCACHE_HEADERS)
|
'hosts': getIPConfig(),
|
||||||
|
}
|
||||||
|
system_info['devices']['config'] = config.get('render_devices', "auto")
|
||||||
|
return JSONResponse(system_info, headers=NOCACHE_HEADERS)
|
||||||
elif key == 'models':
|
elif key == 'models':
|
||||||
return JSONResponse(getModels(), headers=NOCACHE_HEADERS)
|
return JSONResponse(getModels(), headers=NOCACHE_HEADERS)
|
||||||
elif key == 'modifiers': return FileResponse(os.path.join(SD_UI_DIR, 'modifiers.json'), headers=NOCACHE_HEADERS)
|
elif key == 'modifiers': return FileResponse(os.path.join(SD_UI_DIR, 'modifiers.json'), headers=NOCACHE_HEADERS)
|
||||||
@ -442,6 +463,9 @@ class LogSuppressFilter(logging.Filter):
|
|||||||
return True
|
return True
|
||||||
logging.getLogger('uvicorn.access').addFilter(LogSuppressFilter())
|
logging.getLogger('uvicorn.access').addFilter(LogSuppressFilter())
|
||||||
|
|
||||||
|
# Check models and prepare cache for UI open
|
||||||
|
getModels()
|
||||||
|
|
||||||
# Start the task_manager
|
# Start the task_manager
|
||||||
task_manager.default_model_to_load = resolve_ckpt_to_use()
|
task_manager.default_model_to_load = resolve_ckpt_to_use()
|
||||||
task_manager.default_vae_to_load = resolve_vae_to_use()
|
task_manager.default_vae_to_load = resolve_vae_to_use()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user