Only test for xformers when enabling them #1773 (#1776)

* only check for xformers when xformers are enabled

* only test for xformers when enabling them
This commit is contained in:
Ilmari Heikkinen 2022-12-20 08:38:28 +08:00 committed by GitHub
parent 847daf25c7
commit 8e4733b3c3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 47 additions and 45 deletions

View File

@ -288,6 +288,7 @@ class AttentionBlock(nn.Module):
self._use_memory_efficient_attention_xformers = False
def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool):
if use_memory_efficient_attention_xformers:
if not is_xformers_available():
raise ModuleNotFoundError(
"Refer to https://github.com/facebookresearch/xformers for more information on how to install"
@ -450,6 +451,7 @@ class BasicTransformerBlock(nn.Module):
self.norm3 = nn.LayerNorm(dim)
def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_attention_xformers: bool):
if use_memory_efficient_attention_xformers:
if not is_xformers_available():
print("Here is how to install it")
raise ModuleNotFoundError(