* only check for xformers when xformers are enabled * only test for xformers when enabling them
This commit is contained in:
parent
847daf25c7
commit
8e4733b3c3
|
@ -288,6 +288,7 @@ class AttentionBlock(nn.Module):
|
|||
self._use_memory_efficient_attention_xformers = False
|
||||
|
||||
def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool):
|
||||
if use_memory_efficient_attention_xformers:
|
||||
if not is_xformers_available():
|
||||
raise ModuleNotFoundError(
|
||||
"Refer to https://github.com/facebookresearch/xformers for more information on how to install"
|
||||
|
@ -450,6 +451,7 @@ class BasicTransformerBlock(nn.Module):
|
|||
self.norm3 = nn.LayerNorm(dim)
|
||||
|
||||
def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool):
|
||||
if use_memory_efficient_attention_xformers:
|
||||
if not is_xformers_available():
|
||||
print("Here is how to install it")
|
||||
raise ModuleNotFoundError(
|
||||
|
|
Loading…
Reference in New Issue