added support for setting hires fix from pasted prompts
added more robust detection of last line with parameters for pasted prompts
This commit is contained in:
parent
53be15c204
commit
600cc0342d
2 changed files with 55 additions and 49 deletions
|
@ -1,9 +1,11 @@
|
||||||
from collections import namedtuple
|
|
||||||
import re
|
import re
|
||||||
import gradio as gr
|
import gradio as gr
|
||||||
|
|
||||||
re_param = re.compile(r"\s*([\w ]+):\s*([^,]+)(?:,|$)")
|
re_param_code = r"\s*([\w ]+):\s*([^,]+)(?:,|$)"
|
||||||
|
re_param = re.compile(re_param_code)
|
||||||
|
re_params = re.compile(r"^(?:" + re_param_code + "){3,}$")
|
||||||
re_imagesize = re.compile(r"^(\d+)x(\d+)$")
|
re_imagesize = re.compile(r"^(\d+)x(\d+)$")
|
||||||
|
type_of_gr_update = type(gr.update())
|
||||||
|
|
||||||
|
|
||||||
def parse_generation_parameters(x: str):
|
def parse_generation_parameters(x: str):
|
||||||
|
@ -25,6 +27,10 @@ Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 965400086, Size: 512x512, Model
|
||||||
done_with_prompt = False
|
done_with_prompt = False
|
||||||
|
|
||||||
*lines, lastline = x.strip().split("\n")
|
*lines, lastline = x.strip().split("\n")
|
||||||
|
if not re_params.match(lastline):
|
||||||
|
lines.append(lastline)
|
||||||
|
lastline = ''
|
||||||
|
|
||||||
for i, line in enumerate(lines):
|
for i, line in enumerate(lines):
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
if line.startswith("Negative prompt:"):
|
if line.startswith("Negative prompt:"):
|
||||||
|
@ -32,9 +38,9 @@ Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 965400086, Size: 512x512, Model
|
||||||
line = line[16:].strip()
|
line = line[16:].strip()
|
||||||
|
|
||||||
if done_with_prompt:
|
if done_with_prompt:
|
||||||
negative_prompt += line
|
negative_prompt += ("" if negative_prompt == "" else "\n") + line
|
||||||
else:
|
else:
|
||||||
prompt += line
|
prompt += ("" if prompt == "" else "\n") + line
|
||||||
|
|
||||||
if len(prompt) > 0:
|
if len(prompt) > 0:
|
||||||
res["Prompt"] = prompt
|
res["Prompt"] = prompt
|
||||||
|
@ -53,19 +59,21 @@ Steps: 20, Sampler: Euler a, CFG scale: 7, Seed: 965400086, Size: 512x512, Model
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
def connect_paste(button, d, input_comp, js=None):
|
def connect_paste(button, paste_fields, input_comp, js=None):
|
||||||
items = []
|
|
||||||
outputs = []
|
|
||||||
|
|
||||||
def paste_func(prompt):
|
def paste_func(prompt):
|
||||||
params = parse_generation_parameters(prompt)
|
params = parse_generation_parameters(prompt)
|
||||||
res = []
|
res = []
|
||||||
|
|
||||||
for key, output in zip(items, outputs):
|
for output, key in paste_fields:
|
||||||
v = params.get(key, None)
|
if callable(key):
|
||||||
|
v = key(params)
|
||||||
|
else:
|
||||||
|
v = params.get(key, None)
|
||||||
|
|
||||||
if v is None:
|
if v is None:
|
||||||
res.append(gr.update())
|
res.append(gr.update())
|
||||||
|
elif isinstance(v, type_of_gr_update):
|
||||||
|
res.append(v)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
valtype = type(output.value)
|
valtype = type(output.value)
|
||||||
|
@ -76,13 +84,9 @@ def connect_paste(button, d, input_comp, js=None):
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
||||||
for k, v in d.items():
|
|
||||||
items.append(k)
|
|
||||||
outputs.append(v)
|
|
||||||
|
|
||||||
button.click(
|
button.click(
|
||||||
fn=paste_func,
|
fn=paste_func,
|
||||||
_js=js,
|
_js=js,
|
||||||
inputs=[input_comp],
|
inputs=[input_comp],
|
||||||
outputs=outputs,
|
outputs=[x[0] for x in paste_fields],
|
||||||
)
|
)
|
||||||
|
|
|
@ -521,23 +521,25 @@ def create_ui(txt2img, img2img, run_extras, run_pnginfo):
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
txt2img_paste_fields = {
|
txt2img_paste_fields = [
|
||||||
"Prompt": txt2img_prompt,
|
(txt2img_prompt, "Prompt"),
|
||||||
"Negative prompt": txt2img_negative_prompt,
|
(txt2img_negative_prompt, "Negative prompt"),
|
||||||
"Steps": steps,
|
(steps, "Steps"),
|
||||||
"Sampler": sampler_index,
|
(sampler_index, "Sampler"),
|
||||||
"Face restoration": restore_faces,
|
(restore_faces, "Face restoration"),
|
||||||
"CFG scale": cfg_scale,
|
(cfg_scale, "CFG scale"),
|
||||||
"Seed": seed,
|
(seed, "Seed"),
|
||||||
"Size-1": width,
|
(width, "Size-1"),
|
||||||
"Size-2": height,
|
(height, "Size-2"),
|
||||||
"Batch size": batch_size,
|
(batch_size, "Batch size"),
|
||||||
"Variation seed": subseed,
|
(subseed, "Variation seed"),
|
||||||
"Variation seed strength": subseed_strength,
|
(subseed_strength, "Variation seed strength"),
|
||||||
"Seed resize from-1": seed_resize_from_w,
|
(seed_resize_from_w, "Seed resize from-1"),
|
||||||
"Seed resize from-2": seed_resize_from_h,
|
(seed_resize_from_h, "Seed resize from-2"),
|
||||||
"Denoising strength": denoising_strength,
|
(denoising_strength, "Denoising strength"),
|
||||||
}
|
(enable_hr, lambda d: "Denoising strength" in d),
|
||||||
|
(hr_options, lambda d: gr.Row.update(visible="Denoising strength" in d)),
|
||||||
|
]
|
||||||
modules.generation_parameters_copypaste.connect_paste(paste, txt2img_paste_fields, txt2img_prompt)
|
modules.generation_parameters_copypaste.connect_paste(paste, txt2img_paste_fields, txt2img_prompt)
|
||||||
|
|
||||||
with gr.Blocks(analytics_enabled=False) as img2img_interface:
|
with gr.Blocks(analytics_enabled=False) as img2img_interface:
|
||||||
|
@ -741,23 +743,23 @@ def create_ui(txt2img, img2img, run_extras, run_pnginfo):
|
||||||
outputs=[prompt, negative_prompt, style1, style2],
|
outputs=[prompt, negative_prompt, style1, style2],
|
||||||
)
|
)
|
||||||
|
|
||||||
img2img_paste_fields = {
|
img2img_paste_fields = [
|
||||||
"Prompt": img2img_prompt,
|
(img2img_prompt, "Prompt"),
|
||||||
"Negative prompt": img2img_negative_prompt,
|
(img2img_negative_prompt, "Negative prompt"),
|
||||||
"Steps": steps,
|
(steps, "Steps"),
|
||||||
"Sampler": sampler_index,
|
(sampler_index, "Sampler"),
|
||||||
"Face restoration": restore_faces,
|
(restore_faces, "Face restoration"),
|
||||||
"CFG scale": cfg_scale,
|
(cfg_scale, "CFG scale"),
|
||||||
"Seed": seed,
|
(seed, "Seed"),
|
||||||
"Size-1": width,
|
(width, "Size-1"),
|
||||||
"Size-2": height,
|
(height, "Size-2"),
|
||||||
"Batch size": batch_size,
|
(batch_size, "Batch size"),
|
||||||
"Variation seed": subseed,
|
(subseed, "Variation seed"),
|
||||||
"Variation seed strength": subseed_strength,
|
(subseed_strength, "Variation seed strength"),
|
||||||
"Seed resize from-1": seed_resize_from_w,
|
(seed_resize_from_w, "Seed resize from-1"),
|
||||||
"Seed resize from-2": seed_resize_from_h,
|
(seed_resize_from_h, "Seed resize from-2"),
|
||||||
"Denoising strength": denoising_strength,
|
(denoising_strength, "Denoising strength"),
|
||||||
}
|
]
|
||||||
modules.generation_parameters_copypaste.connect_paste(paste, img2img_paste_fields, img2img_prompt)
|
modules.generation_parameters_copypaste.connect_paste(paste, img2img_paste_fields, img2img_prompt)
|
||||||
|
|
||||||
with gr.Blocks(analytics_enabled=False) as extras_interface:
|
with gr.Blocks(analytics_enabled=False) as extras_interface:
|
||||||
|
|
Loading…
Reference in a new issue