From 0ee456afc67b180066fd94b35c7dda13a384d2ce Mon Sep 17 00:00:00 2001 From: Yu Wu Date: Wed, 31 Jan 2024 14:59:16 +0800 Subject: [PATCH] fix lr loss(#5466) Signed-off-by: Yu Wu --- python/fate/ml/glm/hetero/coordinated_lr/guest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/fate/ml/glm/hetero/coordinated_lr/guest.py b/python/fate/ml/glm/hetero/coordinated_lr/guest.py index 6c26897938..e7fbff2cdd 100644 --- a/python/fate/ml/glm/hetero/coordinated_lr/guest.py +++ b/python/fate/ml/glm/hetero/coordinated_lr/guest.py @@ -290,7 +290,7 @@ def asynchronous_compute_gradient(self, batch_ctx, encryptor, w, X, Y, weight): loss += torch.matmul((1 / h * Xw).T, Xw_h) - torch.matmul((2 / h * Y).T, Xw_h) for Xw2_h in batch_ctx.hosts.get("Xw2_h"): - loss += 0.125 / h * Xw2_h + loss += 2 / h * Xw2_h h_loss_list = batch_ctx.hosts.get("h_loss") for h_loss in h_loss_list: if h_loss is not None: