From d58fceee837807b4e63ad956beee0124015c03f8 Mon Sep 17 00:00:00 2001 From: jyaacoub Date: Thu, 14 Dec 2023 20:56:14 -0500 Subject: [PATCH] fix(raytune_DDP): algo not defined #68 --- rayTrain_Tune.py | 2 +- raytune_DDP.py | 22 +++++++++++----------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/rayTrain_Tune.py b/rayTrain_Tune.py index 7ec4a861..c210987e 100644 --- a/rayTrain_Tune.py +++ b/rayTrain_Tune.py @@ -81,7 +81,7 @@ def train_func(config): "lr": ray.tune.loguniform(1e-4, 1e-2), "dropout": ray.tune.uniform(0, 0.5), "embedding_dim": ray.tune.choice([64, 128, 256]), - "batch_size": ray.tune.choice([16, 32, 48]), + "batch_size": ray.tune.choice([16, 32, 48]), # batch size is per GPU! } scaling_config = ScalingConfig(num_workers=2, # number of ray actors to launch diff --git a/raytune_DDP.py b/raytune_DDP.py index 85e3125f..cf025b88 100644 --- a/raytune_DDP.py +++ b/raytune_DDP.py @@ -120,15 +120,15 @@ def objective_DDP(config): # NO inter-node distribution due to communication dif ray.init(num_gpus=1, num_cpus=8, ignore_reinit_error=True) -tuner = tune.Tuner( - tune.with_resources(objective_DDP, resources={"cpu": 6, "gpu": 2}), - param_space=search_space, - tune_config=tune.TuneConfig( - metric="mean_loss", - mode="min", - search_alg=algo, - num_samples=50, - ), -) + tuner = tune.Tuner( + tune.with_resources(objective_DDP, resources={"cpu": 6, "gpu": 2}), + param_space=search_space, + tune_config=tune.TuneConfig( + metric="mean_loss", + mode="min", + search_alg=OptunaSearch(), + num_samples=50, + ), + ) -results = tuner.fit() + results = tuner.fit()