diff --git a/src/diffusers/models/attention.py b/src/diffusers/models/attention.py index 60f0c8bf..6f813733 100644 --- a/src/diffusers/models/attention.py +++ b/src/diffusers/models/attention.py @@ -297,8 +297,8 @@ class AttentionBlock(nn.Module): ) elif not torch.cuda.is_available(): raise ValueError( - "torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is only" - " available for GPU " + "torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is" + " only available for GPU " ) else: try: @@ -461,8 +461,8 @@ class BasicTransformerBlock(nn.Module): ) elif not torch.cuda.is_available(): raise ValueError( - "torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is only" - " available for GPU " + "torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is" + " only available for GPU " ) else: try: