2022-11-19 20:35:26 -07:00
|
|
|
from torch.utils.checkpoint import checkpoint
|
|
|
|
|
2023-01-18 13:04:24 -07:00
|
|
|
import ldm.modules.attention
|
|
|
|
import ldm.modules.diffusionmodules.openaimodel
|
|
|
|
|
|
|
|
|
2022-11-19 20:35:26 -07:00
|
|
|
def BasicTransformerBlock_forward(self, x, context=None):
|
|
|
|
return checkpoint(self._forward, x, context)
|
|
|
|
|
2023-01-18 13:04:24 -07:00
|
|
|
|
2022-11-19 20:35:26 -07:00
|
|
|
def AttentionBlock_forward(self, x):
|
|
|
|
return checkpoint(self._forward, x)
|
|
|
|
|
2023-01-18 13:04:24 -07:00
|
|
|
|
2022-11-19 20:35:26 -07:00
|
|
|
def ResBlock_forward(self, x, emb):
|
2023-01-18 13:04:24 -07:00
|
|
|
return checkpoint(self._forward, x, emb)
|
|
|
|
|
|
|
|
|
|
|
|
stored = []
|
|
|
|
|
|
|
|
|
|
|
|
def add():
|
|
|
|
if len(stored) != 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
stored.extend([
|
|
|
|
ldm.modules.attention.BasicTransformerBlock.forward,
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.ResBlock.forward,
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward
|
|
|
|
])
|
|
|
|
|
|
|
|
ldm.modules.attention.BasicTransformerBlock.forward = BasicTransformerBlock_forward
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.ResBlock.forward = ResBlock_forward
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward = AttentionBlock_forward
|
|
|
|
|
|
|
|
|
|
|
|
def remove():
|
|
|
|
if len(stored) == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
ldm.modules.attention.BasicTransformerBlock.forward = stored[0]
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.ResBlock.forward = stored[1]
|
|
|
|
ldm.modules.diffusionmodules.openaimodel.AttentionBlock.forward = stored[2]
|
|
|
|
|
|
|
|
stored.clear()
|
|
|
|
|