From 23cbc19adb57a4ab1d5649bae2a3e72fcc675898 Mon Sep 17 00:00:00 2001 From: Cyberes <64224601+Cyberes@users.noreply.github.com> Date: Thu, 20 Oct 2022 21:01:19 -0600 Subject: [PATCH] Add files via upload --- other/prune_stable_diffusion_ckpt.py | 61 ++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 other/prune_stable_diffusion_ckpt.py diff --git a/other/prune_stable_diffusion_ckpt.py b/other/prune_stable_diffusion_ckpt.py new file mode 100644 index 0000000..4ab776c --- /dev/null +++ b/other/prune_stable_diffusion_ckpt.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 + +import argparse +import glob +import os + +import torch + +parser = argparse.ArgumentParser(description='Pruning') +parser.add_argument('ckpt', type=str, default=None, help='path to model ckpt') +args = parser.parse_args() +ckpt = args.ckpt + + +def prune_it(p, keep_only_ema=False): + print(f"prunin' in path: {p}") + size_initial = os.path.getsize(p) + nsd = dict() + sd = torch.load(p, map_location="cpu") + print(sd.keys()) + for k in sd.keys(): + if k != "optimizer_states": + nsd[k] = sd[k] + else: + print(f"removing optimizer states for path {p}") + if "global_step" in sd: + print(f"This is global step {sd['global_step']}.") + if keep_only_ema: + sd = nsd["state_dict"].copy() + # infer ema keys + ema_keys = {k: "model_ema." + k[6:].replace(".", ".") for k in sd.keys() if k.startswith("model.")} + new_sd = dict() + + for k in sd: + if k in ema_keys: + new_sd[k] = sd[ema_keys[k]].half() + elif not k.startswith("model_ema.") or k in ["model_ema.num_updates", "model_ema.decay"]: + new_sd[k] = sd[k].half() + + assert len(new_sd) == len(sd) - len(ema_keys) + nsd["state_dict"] = new_sd + else: + sd = nsd['state_dict'].copy() + new_sd = dict() + for k in sd: + new_sd[k] = sd[k].half() + nsd['state_dict'] = new_sd + + fn = f"{os.path.splitext(p)[0]}-pruned.ckpt" if not keep_only_ema else f"{os.path.splitext(p)[0]}-ema-pruned.ckpt" + print(f"saving pruned checkpoint at: {fn}") + torch.save(nsd, fn) + newsize = os.path.getsize(fn) + MSG = f"New ckpt size: {newsize*1e-9:.2f} GB. " + \ + f"Saved {(size_initial - newsize)*1e-9:.2f} GB by removing optimizer states" + if keep_only_ema: + MSG += " and non-EMA weights" + print(MSG) + + +if __name__ == "__main__": + prune_it(ckpt)