Supporting `*.safetensors` format.

If a model file exists with extension `.safetensors` then we can load it
more safely than with PyTorch weights.
This commit is contained in:
Nicolas Patry 2022-11-21 14:04:25 +01:00
parent 98947d173e
commit 0efffbb407
1 changed files with 9 additions and 2 deletions

View File

@ -45,7 +45,7 @@ def checkpoint_tiles():
def list_models(): def list_models():
checkpoints_list.clear() checkpoints_list.clear()
model_list = modelloader.load_models(model_path=model_path, command_path=shared.cmd_opts.ckpt_dir, ext_filter=[".ckpt"]) model_list = modelloader.load_models(model_path=model_path, command_path=shared.cmd_opts.ckpt_dir, ext_filter=[".ckpt", ".safetensors"])
def modeltitle(path, shorthash): def modeltitle(path, shorthash):
abspath = os.path.abspath(path) abspath = os.path.abspath(path)
@ -180,6 +180,13 @@ def load_model_weights(model, checkpoint_info, vae_file="auto"):
# load from file # load from file
print(f"Loading weights [{sd_model_hash}] from {checkpoint_file}") print(f"Loading weights [{sd_model_hash}] from {checkpoint_file}")
if checkpoint_file.endswith(".safetensors"):
try:
from safetensors.torch import load_file
except ImportError as e:
raise ImportError(f"The model is in safetensors format and it is not installed, use `pip install safetensors`: {e}")
pl_sd = load_file(checkpoint_file, device=shared.weight_load_location)
else:
pl_sd = torch.load(checkpoint_file, map_location=shared.weight_load_location) pl_sd = torch.load(checkpoint_file, map_location=shared.weight_load_location)
if "global_step" in pl_sd: if "global_step" in pl_sd:
print(f"Global Step: {pl_sd['global_step']}") print(f"Global Step: {pl_sd['global_step']}")