2022-11-19 20:35:26 -07:00
|
|
|
from torch.utils.checkpoint import checkpoint
|
|
|
|
|
2023-01-18 13:04:24 -07:00
|
|
|
import ldm.modules.attention
|
|
|
|
import ldm.modules.diffusionmodules.openaimodel
|
|
|
|
|
|
|
|
|
2022-11-19 20:35:26 -07:00
|
|
|
def BasicTransformerBlock_forward(self, x, context=None):
|
2024-06-08 01:54:41 -06:00
|
|
|
return checkpoint(self._forward, x, context)
|
2022-11-19 20:35:26 -07:00
|
|
|
|
2023-01-18 13:04:24 -07:00
|
|
|
|
2022-11-19 20:35:26 -07:00
|
|
|
def AttentionBlock_forward(self, x):
|
2024-06-08 01:54:41 -06:00
|
|
|
return checkpoint(self._forward, x)
|
2022-11-19 20:35:26 -07:00
|
|
|
|
2023-01-18 13:04:24 -07:00
|
|
|
|
2022-11-19 20:35:26 -07:00
|
|
|
def ResBlock_forward(self, x, emb):
|
2024-06-08 01:54:41 -06:00
|
|
|
return checkpoint(self._forward, x, emb)
|
2023-01-18 13:04:24 -07:00
|
|
|
|
|
|
|
|
|
|
|
stored = []
|
|
|
|
|
|
|
|
|
|
|
|
def add():
|
|
|
|
if len(stored) != 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
stored.extend([
|
|
|
|
ldm.modules.attention.BasicTransformerBlock.forward,
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.ResBlock.forward,
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward
|
|
|
|
])
|
|
|
|
|
|
|
|
ldm.modules.attention.BasicTransformerBlock.forward = BasicTransformerBlock_forward
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.ResBlock.forward = ResBlock_forward
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward = AttentionBlock_forward
|
|
|
|
|
|
|
|
|
|
|
|
def remove():
|
|
|
|
if len(stored) == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
ldm.modules.attention.BasicTransformerBlock.forward = stored[0]
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.ResBlock.forward = stored[1]
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward = stored[2]
|
|
|
|
|
|
|
|
stored.clear()
|
|
|
|
|