From 4a100eef4314dd89a509624e0655995b0a8ad5e9 Mon Sep 17 00:00:00 2001 From: Wing Lian Date: Mon, 2 Dec 2024 13:40:20 -0500 Subject: [PATCH] support for wrapped schedulefree optimizer when using deepspeed (#3266) * support for wrapped schedulefree optimizer when using deepspeed * add comment and lint --- src/accelerate/optimizer.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/accelerate/optimizer.py b/src/accelerate/optimizer.py index acc238a1a99..25e2b95d98e 100644 --- a/src/accelerate/optimizer.py +++ b/src/accelerate/optimizer.py @@ -126,6 +126,13 @@ def train(self): """ if hasattr(self.optimizer, "train") and callable(self.optimizer.train): self.optimizer.train() + elif ( + hasattr(self.optimizer, "optimizer") + and hasattr(self.optimizer.optimizer, "train") + and callable(self.optimizer.optimizer.train) + ): + # the deepspeed optimizer further wraps the optimizer + self.optimizer.optimizer.train() def eval(self): """