Skip to content

Commit

Permalink
Fix: lr is not used in binary logistic regression
Browse files Browse the repository at this point in the history
  • Loading branch information
SleepyBag authored Jun 11, 2021
1 parent c170b06 commit 60380f4
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions 06.LogisticRegression-MaxEntropy/BinaryLogisticRegression.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from utils import binary_cross_entropy, sigmoid, wbline

class LogisticRegression:
def __init__(self, lr=1e-4, max_steps=1000, verbose=True):
def __init__(self, lr=1, max_steps=1000, verbose=True):
self.lr = lr
self.max_steps = max_steps
self.verbose = verbose
Expand All @@ -33,8 +33,8 @@ def fit(self, X, Y):
# get mean of gradient across all data
gradient_b = gradient_b.mean(axis=0)
gradient_w = gradient_w.mean(axis=0)
self.w += gradient_w
self.b += gradient_b
self.w += gradient_w * self.lr
self.b += gradient_b * self.lr
if self.verbose:
loss = binary_cross_entropy(pred, Y)
print(f"Step {step}, Loss is {loss}...")
Expand Down

0 comments on commit 60380f4

Please sign in to comment.