`trained_betas` ignored in some schedulers (#635)
* correcting the beta value assignment * updating DDIM and LMSDiscreteFlax schedulers * bringing back the changes that were lost as part of main branch merge
This commit is contained in:
parent
f10576ad5c
commit
3dacbb94ca
|
@ -131,7 +131,7 @@ class DDIMScheduler(SchedulerMixin, ConfigMixin):
|
||||||
|
|
||||||
if trained_betas is not None:
|
if trained_betas is not None:
|
||||||
self.betas = torch.from_numpy(trained_betas)
|
self.betas = torch.from_numpy(trained_betas)
|
||||||
if beta_schedule == "linear":
|
elif beta_schedule == "linear":
|
||||||
self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)
|
self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)
|
||||||
elif beta_schedule == "scaled_linear":
|
elif beta_schedule == "scaled_linear":
|
||||||
# this schedule is very specific to the latent diffusion model.
|
# this schedule is very specific to the latent diffusion model.
|
||||||
|
|
|
@ -86,7 +86,7 @@ class LMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
|
||||||
|
|
||||||
if trained_betas is not None:
|
if trained_betas is not None:
|
||||||
self.betas = torch.from_numpy(trained_betas)
|
self.betas = torch.from_numpy(trained_betas)
|
||||||
if beta_schedule == "linear":
|
elif beta_schedule == "linear":
|
||||||
self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)
|
self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)
|
||||||
elif beta_schedule == "scaled_linear":
|
elif beta_schedule == "scaled_linear":
|
||||||
# this schedule is very specific to the latent diffusion model.
|
# this schedule is very specific to the latent diffusion model.
|
||||||
|
|
|
@ -74,7 +74,7 @@ class FlaxLMSDiscreteScheduler(SchedulerMixin, ConfigMixin):
|
||||||
):
|
):
|
||||||
if trained_betas is not None:
|
if trained_betas is not None:
|
||||||
self.betas = jnp.asarray(trained_betas)
|
self.betas = jnp.asarray(trained_betas)
|
||||||
if beta_schedule == "linear":
|
elif beta_schedule == "linear":
|
||||||
self.betas = jnp.linspace(beta_start, beta_end, num_train_timesteps, dtype=jnp.float32)
|
self.betas = jnp.linspace(beta_start, beta_end, num_train_timesteps, dtype=jnp.float32)
|
||||||
elif beta_schedule == "scaled_linear":
|
elif beta_schedule == "scaled_linear":
|
||||||
# this schedule is very specific to the latent diffusion model.
|
# this schedule is very specific to the latent diffusion model.
|
||||||
|
|
|
@ -111,7 +111,7 @@ class PNDMScheduler(SchedulerMixin, ConfigMixin):
|
||||||
|
|
||||||
if trained_betas is not None:
|
if trained_betas is not None:
|
||||||
self.betas = torch.from_numpy(trained_betas)
|
self.betas = torch.from_numpy(trained_betas)
|
||||||
if beta_schedule == "linear":
|
elif beta_schedule == "linear":
|
||||||
self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)
|
self.betas = torch.linspace(beta_start, beta_end, num_train_timesteps, dtype=torch.float32)
|
||||||
elif beta_schedule == "scaled_linear":
|
elif beta_schedule == "scaled_linear":
|
||||||
# this schedule is very specific to the latent diffusion model.
|
# this schedule is very specific to the latent diffusion model.
|
||||||
|
|
|
@ -132,7 +132,7 @@ class FlaxPNDMScheduler(SchedulerMixin, ConfigMixin):
|
||||||
):
|
):
|
||||||
if trained_betas is not None:
|
if trained_betas is not None:
|
||||||
self.betas = jnp.asarray(trained_betas)
|
self.betas = jnp.asarray(trained_betas)
|
||||||
if beta_schedule == "linear":
|
elif beta_schedule == "linear":
|
||||||
self.betas = jnp.linspace(beta_start, beta_end, num_train_timesteps, dtype=jnp.float32)
|
self.betas = jnp.linspace(beta_start, beta_end, num_train_timesteps, dtype=jnp.float32)
|
||||||
elif beta_schedule == "scaled_linear":
|
elif beta_schedule == "scaled_linear":
|
||||||
# this schedule is very specific to the latent diffusion model.
|
# this schedule is very specific to the latent diffusion model.
|
||||||
|
|
Loading…
Reference in New Issue