add Discard penultimate sigma to infotext
This commit is contained in:
parent
c53852e257
commit
42fcc79bd3
|
@ -463,8 +463,12 @@ class KDiffusionSampler:
|
|||
return extra_params_kwargs
|
||||
|
||||
def get_sigmas(self, p, steps):
|
||||
disc = opts.always_discard_next_to_last_sigma or (self.config is not None and self.config.options.get('discard_next_to_last_sigma', False))
|
||||
steps += 1 if disc else 0
|
||||
discard_next_to_last_sigma = self.config is not None and self.config.options.get('discard_next_to_last_sigma', False)
|
||||
if opts.always_discard_next_to_last_sigma and not discard_next_to_last_sigma:
|
||||
discard_next_to_last_sigma = True
|
||||
p.extra_generation_params["Discard penultimate sigma"] = True
|
||||
|
||||
steps += 1 if discard_next_to_last_sigma else 0
|
||||
|
||||
if p.sampler_noise_scheduler_override:
|
||||
sigmas = p.sampler_noise_scheduler_override(steps)
|
||||
|
@ -475,7 +479,7 @@ class KDiffusionSampler:
|
|||
else:
|
||||
sigmas = self.model_wrap.get_sigmas(steps)
|
||||
|
||||
if disc:
|
||||
if discard_next_to_last_sigma:
|
||||
sigmas = torch.cat([sigmas[:-2], sigmas[-1:]])
|
||||
|
||||
return sigmas
|
||||
|
|
Loading…
Reference in New Issue