do not touch p.seed/p.subseed during processing #1181

This commit is contained in:
AUTOMATIC 2022-10-04 17:36:39 +03:00
parent 957e29a8e9
commit e1b128d8e4
1 changed files with 17 additions and 9 deletions

View File

@ -248,9 +248,16 @@ def create_random_tensors(shape, seeds, subseeds=None, subseed_strength=0.0, see
return x
def get_fixed_seed(seed):
if seed is None or seed == '' or seed == -1:
return int(random.randrange(4294967294))
return seed
def fix_seed(p):
p.seed = int(random.randrange(4294967294)) if p.seed is None or p.seed == '' or p.seed == -1 else p.seed
p.subseed = int(random.randrange(4294967294)) if p.subseed is None or p.subseed == '' or p.subseed == -1 else p.subseed
p.seed = get_fixed_seed(p.seed)
p.subseed = get_fixed_seed(p.subseed)
def create_infotext(p, all_prompts, all_seeds, all_subseeds, comments, iteration=0, position_in_batch=0):
@ -292,7 +299,8 @@ def process_images(p: StableDiffusionProcessing) -> Processed:
devices.torch_gc()
fix_seed(p)
seed = get_fixed_seed(p.seed)
subseed = get_fixed_seed(p.subseed)
if p.outpath_samples is not None:
os.makedirs(p.outpath_samples, exist_ok=True)
@ -311,15 +319,15 @@ def process_images(p: StableDiffusionProcessing) -> Processed:
else:
all_prompts = p.batch_size * p.n_iter * [p.prompt]
if type(p.seed) == list:
all_seeds = p.seed
if type(seed) == list:
all_seeds = seed
else:
all_seeds = [int(p.seed) + (x if p.subseed_strength == 0 else 0) for x in range(len(all_prompts))]
all_seeds = [int(seed) + (x if p.subseed_strength == 0 else 0) for x in range(len(all_prompts))]
if type(p.subseed) == list:
all_subseeds = p.subseed
if type(subseed) == list:
all_subseeds = subseed
else:
all_subseeds = [int(p.subseed) + x for x in range(len(all_prompts))]
all_subseeds = [int(subseed) + x for x in range(len(all_prompts))]
def infotext(iteration=0, position_in_batch=0):
return create_infotext(p, all_prompts, all_seeds, all_subseeds, comments, iteration, position_in_batch)