@ -10,7 +10,7 @@ def empty_cuda():
while True:
gc.collect()
torch.cuda.empty_cache()
time.sleep(0.5)
vram = nvidia_smi.get_gpu_stats()["memory_used"]
print("vram: %d MB" % vram)
if vram < 200: