Restore last generation params
This commit is contained in:
parent
04c0e643f2
commit
e72adc999b
4 changed files with 14 additions and 1 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -17,6 +17,7 @@ __pycache__
|
||||||
/webui.settings.bat
|
/webui.settings.bat
|
||||||
/embeddings
|
/embeddings
|
||||||
/styles.csv
|
/styles.csv
|
||||||
|
/params.txt
|
||||||
/styles.csv.bak
|
/styles.csv.bak
|
||||||
/webui-user.bat
|
/webui-user.bat
|
||||||
/webui-user.sh
|
/webui-user.sh
|
||||||
|
|
|
@ -14,7 +14,7 @@ titles = {
|
||||||
"\u{1f3b2}\ufe0f": "Set seed to -1, which will cause a new random number to be used every time",
|
"\u{1f3b2}\ufe0f": "Set seed to -1, which will cause a new random number to be used every time",
|
||||||
"\u267b\ufe0f": "Reuse seed from last generation, mostly useful if it was randomed",
|
"\u267b\ufe0f": "Reuse seed from last generation, mostly useful if it was randomed",
|
||||||
"\u{1f3a8}": "Add a random artist to the prompt.",
|
"\u{1f3a8}": "Add a random artist to the prompt.",
|
||||||
"\u2199\ufe0f": "Read generation parameters from prompt into user interface.",
|
"\u2199\ufe0f": "Read generation parameters from prompt or last generation if prompt is empty into user interface.",
|
||||||
"\u{1f4c2}": "Open images output directory",
|
"\u{1f4c2}": "Open images output directory",
|
||||||
|
|
||||||
"Inpaint a part of image": "Draw a mask over an image, and the script will regenerate the masked area with content according to prompt",
|
"Inpaint a part of image": "Draw a mask over an image, and the script will regenerate the masked area with content according to prompt",
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
import gradio as gr
|
import gradio as gr
|
||||||
|
from modules.shared import script_path
|
||||||
|
|
||||||
re_param_code = r"\s*([\w ]+):\s*([^,]+)(?:,|$)"
|
re_param_code = r"\s*([\w ]+):\s*([^,]+)(?:,|$)"
|
||||||
re_param = re.compile(re_param_code)
|
re_param = re.compile(re_param_code)
|
||||||
|
@ -61,6 +63,12 @@ Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 965400086, Size: 512x512, Model
|
||||||
|
|
||||||
def connect_paste(button, paste_fields, input_comp, js=None):
|
def connect_paste(button, paste_fields, input_comp, js=None):
|
||||||
def paste_func(prompt):
|
def paste_func(prompt):
|
||||||
|
if not prompt:
|
||||||
|
filename = os.path.join(script_path, "params.txt")
|
||||||
|
if os.path.exists(filename):
|
||||||
|
with open(filename, "r", encoding="utf8") as file:
|
||||||
|
prompt = file.read()
|
||||||
|
|
||||||
params = parse_generation_parameters(prompt)
|
params = parse_generation_parameters(prompt)
|
||||||
res = []
|
res = []
|
||||||
|
|
||||||
|
|
|
@ -324,6 +324,10 @@ def process_images(p: StableDiffusionProcessing) -> Processed:
|
||||||
else:
|
else:
|
||||||
assert p.prompt is not None
|
assert p.prompt is not None
|
||||||
|
|
||||||
|
with open(os.path.join(shared.script_path, "params.txt"), "w", encoding="utf8") as file:
|
||||||
|
processed = Processed(p, [], p.seed, "")
|
||||||
|
file.write(processed.infotext(p, 0))
|
||||||
|
|
||||||
devices.torch_gc()
|
devices.torch_gc()
|
||||||
|
|
||||||
seed = get_fixed_seed(p.seed)
|
seed = get_fixed_seed(p.seed)
|
||||||
|
|
Loading…
Reference in a new issue