resolve the issue with loading fp16 checkpoints while using --no-half

This commit is contained in:
AUTOMATIC1111 2023-08-17 07:54:07 +03:00
parent eaba3d7349
commit 0dc74545c0
1 changed files with 4 additions and 1 deletions

View File

@ -343,7 +343,10 @@ def load_model_weights(model, checkpoint_info: CheckpointInfo, state_dict, timer
model.to(memory_format=torch.channels_last)
timer.record("apply channels_last")
if not shared.cmd_opts.no_half:
if shared.cmd_opts.no_half:
model.float()
timer.record("apply float()")
else:
vae = model.first_stage_model
depth_model = getattr(model, 'depth_model', None)