From c5142e2fbecb50531a55aa804ea132c5d870858c Mon Sep 17 00:00:00 2001 From: brkirch Date: Fri, 24 Mar 2023 02:58:18 -0400 Subject: [PATCH 1/2] Add workaround for broken nn.Linear on macOS 13.2 Credit to danieldk (https://github.com/explosion/curated-transformers/pull/124) for the workaround this is based on. --- html/licenses.html | 26 ++++++++++++++++++++++++++ modules/mac_specific.py | 5 +++++ 2 files changed, 31 insertions(+) diff --git a/html/licenses.html b/html/licenses.html index bddbf4665..bc995aa07 100644 --- a/html/licenses.html +++ b/html/licenses.html @@ -635,4 +635,30 @@ SOFTWARE. WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + + +

Curated transformers

+The MPS workaround for nn.Linear on macOS 13.2.X is based on the MPS workaround for nn.Linear created by danieldk for Curated transformers +
+The MIT License (MIT)
+
+Copyright (C) 2021 ExplosionAI GmbH
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
 
\ No newline at end of file diff --git a/modules/mac_specific.py b/modules/mac_specific.py index 18e6ff720..3a170f60b 100644 --- a/modules/mac_specific.py +++ b/modules/mac_specific.py @@ -1,4 +1,5 @@ import torch +import platform from modules import paths from modules.sd_hijack_utils import CondFunc from packaging import version @@ -32,6 +33,10 @@ if has_mps: # MPS fix for randn in torchsde CondFunc('torchsde._brownian.brownian_interval._randn', lambda _, size, dtype, device, seed: torch.randn(size, dtype=dtype, device=torch.device("cpu"), generator=torch.Generator(torch.device("cpu")).manual_seed(int(seed))).to(device), lambda _, size, dtype, device, seed: device.type == 'mps') + if platform.mac_ver()[0].startswith("13.2."): + # MPS workaround for https://github.com/pytorch/pytorch/issues/95188, thanks to danieldk (https://github.com/explosion/curated-transformers/pull/124) + CondFunc('torch.nn.functional.linear', lambda _, input, weight, bias: (torch.matmul(input, weight.t()) + bias) if bias is not None else torch.matmul(input, weight.t()), lambda _, input, weight, bias: input.numel() > 10485760) + if version.parse(torch.__version__) < version.parse("1.13"): # PyTorch 1.13 doesn't need these fixes but unfortunately is slower and has regressions that prevent training from working From 27fe3eb6a9d8f866af8b90dff18f4445124702da Mon Sep 17 00:00:00 2001 From: brkirch Date: Fri, 24 Mar 2023 03:04:47 -0400 Subject: [PATCH 2/2] Add workaround for MPS layer_norm on PyTorch 2.0 On PyTorch 2.0, with MPS layer_norm only accepts float32 inputs. This was fixed shortly after 2.0 was finalized so the workaround can be applied with an exact version match. --- modules/mac_specific.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/mac_specific.py b/modules/mac_specific.py index 3a170f60b..6fe8dea07 100644 --- a/modules/mac_specific.py +++ b/modules/mac_specific.py @@ -54,4 +54,6 @@ if has_mps: CondFunc('torch.cumsum', cumsum_fix_func, None) CondFunc('torch.Tensor.cumsum', cumsum_fix_func, None) CondFunc('torch.narrow', lambda orig_func, *args, **kwargs: orig_func(*args, **kwargs).clone(), None) - + if version.parse(torch.__version__) == version.parse("2.0"): + # MPS workaround for https://github.com/pytorch/pytorch/issues/96113 + CondFunc('torch.nn.functional.layer_norm', lambda orig_func, x, normalized_shape, weight, bias, eps, **kwargs: orig_func(x.float(), normalized_shape, weight.float() if weight is not None else None, bias.float() if bias is not None else bias, eps).to(x.dtype), lambda *args, **kwargs: len(args) == 6)