Added tag parsing for prompts from file

This commit is contained in:
shirase-0 2022-10-02 00:43:24 +10:00
parent 3f417566b0
commit 27fbf3de4a
1 changed files with 57 additions and 1 deletions

View File

@ -2,6 +2,7 @@ import math
import os import os
import sys import sys
import traceback import traceback
from xml.etree.ElementTree import tostring
import modules.scripts as scripts import modules.scripts as scripts
import gradio as gr import gradio as gr
@ -29,6 +30,44 @@ class Script(scripts.Script):
checkbox_txt.change(fn=lambda x: [gr.File.update(visible = not x), gr.TextArea.update(visible = x)], inputs=[checkbox_txt], outputs=[file, prompt_txt]) checkbox_txt.change(fn=lambda x: [gr.File.update(visible = not x), gr.TextArea.update(visible = x)], inputs=[checkbox_txt], outputs=[file, prompt_txt])
return [checkbox_txt, file, prompt_txt] return [checkbox_txt, file, prompt_txt]
def process_string_tag(self, tag):
return tag[1:-2]
def process_int_tag(self, tag):
return int(tag)
def process_float_tag(self, tag):
return float(tag)
def process_boolean_tag(self, tag):
return True if (tag == "true") else False
prompt_tags = {
"sd_model": None,
"outpath_samples": process_string_tag,
"outpath_grids": process_string_tag,
"prompt_for_display": process_string_tag,
"prompt": process_string_tag,
"negative_prompt": process_string_tag,
"styles": process_string_tag,
"seed": process_int_tag,
"subseed_strength": process_float_tag,
"subseed": process_int_tag,
"seed_resize_from_h": process_int_tag,
"seed_resize_from_w": process_int_tag,
"sampler_index": process_int_tag,
"batch_size": process_int_tag,
"n_iter": process_int_tag,
"steps": process_int_tag,
"cfg_scale": process_float_tag,
"width": process_int_tag,
"height": process_int_tag,
"restore_faces": process_boolean_tag,
"tiling": process_boolean_tag,
"do_not_save_samples": process_boolean_tag,
"do_not_save_grid": process_boolean_tag
}
def run(self, p, checkbox_txt, data: bytes, prompt_txt: str): def run(self, p, checkbox_txt, data: bytes, prompt_txt: str):
if (checkbox_txt): if (checkbox_txt):
lines = [x.strip() for x in prompt_txt.splitlines()] lines = [x.strip() for x in prompt_txt.splitlines()]
@ -39,6 +78,7 @@ class Script(scripts.Script):
img_count = len(lines) * p.n_iter img_count = len(lines) * p.n_iter
batch_count = math.ceil(img_count / p.batch_size) batch_count = math.ceil(img_count / p.batch_size)
loop_count = math.ceil(batch_count / p.n_iter) loop_count = math.ceil(batch_count / p.n_iter)
# These numbers no longer accurately reflect the total images and number of batches
print(f"Will process {img_count} images in {batch_count} batches.") print(f"Will process {img_count} images in {batch_count} batches.")
p.do_not_save_grid = True p.do_not_save_grid = True
@ -48,7 +88,23 @@ class Script(scripts.Script):
images = [] images = []
for loop_no in range(loop_count): for loop_no in range(loop_count):
state.job = f"{loop_no + 1} out of {loop_count}" state.job = f"{loop_no + 1} out of {loop_count}"
p.prompt = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter # The following line may need revising to remove batch_size references
current_line = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter
if(current_line[0][:2] != "--"):
p.prompt = current_line
else:
tokenized_line = current_line[0].split("--")
for tag in tokenized_line:
tag_split = tag.split(" ", 1)
if(tag_split[0] != ''):
value_func = self.prompt_tags.get(tag_split[0], None)
if(value_func != None):
value = value_func(self, tag_split[1])
setattr(p, tag_split[0], value)
else:
print(f"Unknown option \"{tag_split}\"")
proc = process_images(p) proc = process_images(p)
images += proc.images images += proc.images