2022-09-16 22:49:31 -06:00
|
|
|
import threading
|
|
|
|
import time
|
|
|
|
from collections import defaultdict
|
|
|
|
|
|
|
|
import torch
|
|
|
|
|
|
|
|
|
|
|
|
class MemUsageMonitor(threading.Thread):
|
|
|
|
run_flag = None
|
|
|
|
device = None
|
|
|
|
disabled = False
|
|
|
|
opts = None
|
|
|
|
data = None
|
|
|
|
|
|
|
|
def __init__(self, name, device, opts):
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
self.name = name
|
|
|
|
self.device = device
|
|
|
|
self.opts = opts
|
|
|
|
|
|
|
|
self.daemon = True
|
|
|
|
self.run_flag = threading.Event()
|
|
|
|
self.data = defaultdict(int)
|
|
|
|
|
2022-09-18 03:20:33 -06:00
|
|
|
try:
|
2023-03-12 12:04:17 -06:00
|
|
|
self.cuda_mem_get_info()
|
2022-09-18 03:20:33 -06:00
|
|
|
torch.cuda.memory_stats(self.device)
|
|
|
|
except Exception as e: # AMD or whatever
|
|
|
|
print(f"Warning: caught exception '{e}', memory monitor disabled")
|
|
|
|
self.disabled = True
|
|
|
|
|
2023-03-12 12:04:17 -06:00
|
|
|
def cuda_mem_get_info(self):
|
|
|
|
index = self.device.index if self.device.index is not None else torch.cuda.current_device()
|
|
|
|
return torch.cuda.mem_get_info(index)
|
|
|
|
|
2022-09-16 22:49:31 -06:00
|
|
|
def run(self):
|
|
|
|
if self.disabled:
|
|
|
|
return
|
|
|
|
|
|
|
|
while True:
|
|
|
|
self.run_flag.wait()
|
|
|
|
|
|
|
|
torch.cuda.reset_peak_memory_stats()
|
|
|
|
self.data.clear()
|
|
|
|
|
|
|
|
if self.opts.memmon_poll_rate <= 0:
|
|
|
|
self.run_flag.clear()
|
|
|
|
continue
|
|
|
|
|
2023-03-12 12:04:17 -06:00
|
|
|
self.data["min_free"] = self.cuda_mem_get_info()[0]
|
2022-09-16 22:49:31 -06:00
|
|
|
|
|
|
|
while self.run_flag.is_set():
|
2023-03-12 12:04:17 -06:00
|
|
|
free, total = self.cuda_mem_get_info()
|
2022-09-16 22:49:31 -06:00
|
|
|
self.data["min_free"] = min(self.data["min_free"], free)
|
|
|
|
|
|
|
|
time.sleep(1 / self.opts.memmon_poll_rate)
|
|
|
|
|
|
|
|
def dump_debug(self):
|
|
|
|
print(self, 'recorded data:')
|
|
|
|
for k, v in self.read().items():
|
|
|
|
print(k, -(v // -(1024 ** 2)))
|
|
|
|
|
|
|
|
print(self, 'raw torch memory stats:')
|
|
|
|
tm = torch.cuda.memory_stats(self.device)
|
|
|
|
for k, v in tm.items():
|
|
|
|
if 'bytes' not in k:
|
|
|
|
continue
|
|
|
|
print('\t' if 'peak' in k else '', k, -(v // -(1024 ** 2)))
|
|
|
|
|
|
|
|
print(torch.cuda.memory_summary())
|
|
|
|
|
|
|
|
def monitor(self):
|
|
|
|
self.run_flag.set()
|
|
|
|
|
|
|
|
def read(self):
|
2022-09-18 03:20:33 -06:00
|
|
|
if not self.disabled:
|
2023-03-12 12:04:17 -06:00
|
|
|
free, total = self.cuda_mem_get_info()
|
2022-12-30 17:36:36 -07:00
|
|
|
self.data["free"] = free
|
2022-09-18 03:20:33 -06:00
|
|
|
self.data["total"] = total
|
|
|
|
|
|
|
|
torch_stats = torch.cuda.memory_stats(self.device)
|
2022-12-30 17:36:36 -07:00
|
|
|
self.data["active"] = torch_stats["active.all.current"]
|
2022-09-18 03:20:33 -06:00
|
|
|
self.data["active_peak"] = torch_stats["active_bytes.all.peak"]
|
2022-12-30 17:36:36 -07:00
|
|
|
self.data["reserved"] = torch_stats["reserved_bytes.all.current"]
|
2022-09-18 03:20:33 -06:00
|
|
|
self.data["reserved_peak"] = torch_stats["reserved_bytes.all.peak"]
|
|
|
|
self.data["system_peak"] = total - self.data["min_free"]
|
2022-09-16 22:49:31 -06:00
|
|
|
|
|
|
|
return self.data
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
self.run_flag.clear()
|
|
|
|
return self.read()
|