Use devices.torch_gc() instead of empty_cache()

This commit is contained in:
Ritesh Gangnani 2023-11-05 20:31:57 +05:30
parent 44db35fb1a
commit 44c5097375
2 changed files with 1 additions and 5 deletions

View File

@ -1,5 +1,3 @@
import gc
import torch
from torch.nn.functional import silu
from types import MethodType
@ -193,8 +191,7 @@ class StableDiffusionModelHijack:
delattr(m.model.diffusion_model.output_blocks[1][1].transformer_blocks,i)
delattr(m.model.diffusion_model.output_blocks[4][1].transformer_blocks,'1')
delattr(m.model.diffusion_model.output_blocks[5][1].transformer_blocks,'1')
torch.cuda.empty_cache()
gc.collect()
devices.torch_gc()
def hijack(self, m):
conditioner = getattr(m, 'conditioner', None)

View File

@ -347,7 +347,6 @@ def load_model_weights(model, checkpoint_info: CheckpointInfo, state_dict, timer
model.is_sd2 = not model.is_sdxl and hasattr(model.cond_stage_model, 'model')
model.is_sd1 = not model.is_sdxl and not model.is_sd2
model.is_ssd = model.is_sdxl and 'model.diffusion_model.middle_block.1.transformer_blocks.0.attn1.to_q.weight' not in state_dict.keys()
if model.is_sdxl:
sd_models_xl.extend_sdxl(model)