From 389bbe48a5ef025aaab7eabf8831e0582df8e612 Mon Sep 17 00:00:00 2001 From: DerThomy Date: Sun, 22 Dec 2024 13:45:05 +0100 Subject: [PATCH] Empty torch cache after optimizer tensor replacement Without the cache emptying I got really high VRAM usage and was not able to train beyond 1 million Gaussians with 24GB VRAM. Now I can train to over 5 million Gaussians. --- scene/gaussian_model.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scene/gaussian_model.py b/scene/gaussian_model.py index f611338d8..e9fd1abfa 100644 --- a/scene/gaussian_model.py +++ b/scene/gaussian_model.py @@ -442,6 +442,8 @@ def replace_tensors_to_optimizer(self, inds=None): self._scaling = optimizable_tensors["scaling"] self._rotation = optimizable_tensors["rotation"] + torch.cuda.empty_cache() + return optimizable_tensors