From 642b832ca07e1abc9da87bc81833e1d0d7ee6ee3 Mon Sep 17 00:00:00 2001 From: Victor Hall Date: Sat, 10 Jun 2023 16:29:03 -0400 Subject: [PATCH] remove failure when people do weird custom configs on models --- utils/unet_utils.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/utils/unet_utils.py b/utils/unet_utils.py index 960b3d7..1c953fd 100644 --- a/utils/unet_utils.py +++ b/utils/unet_utils.py @@ -18,6 +18,7 @@ import json import logging import torch +from colorama import Fore, Style def enforce_zero_terminal_snr(betas): # from https://arxiv.org/pdf/2305.08891.pdf @@ -67,7 +68,9 @@ def get_attn_yaml(ckpt_path): elif prediction_type == "epsilon" and is_sd1attn: yaml = "v1-inference.yaml" else: - raise ValueError(f"Unknown model format for: {prediction_type} and attention_head_dim {unet_cfg['attention_head_dim']}") + logging.warning(f"{Fore.YELLOW}Unknown model format for: {prediction_type} and attention_head_dim {unet_cfg['attention_head_dim']}{Style.RESET_ALL}") + yaml = "v1-inference.yaml" # HACK: for now this means no yaml is saved together with .ckpt files during checkpointing + logging.info(f"Inferred yaml: {yaml}, attn: {'sd1' if is_sd1attn else 'sd2'}, prediction_type: {prediction_type}")