@@ -90,7 +90,7 @@ def _alpha_grid(X, y, Xy=None, l1_ratio=1.0, fit_intercept=True,
90
90
def lasso_path (X , y , eps = 1e-3 , n_alphas = 100 , alphas = None ,
91
91
precompute = 'auto' , Xy = None , fit_intercept = None ,
92
92
normalize = None , copy_X = True , coef_init = None ,
93
- verbose = False , return_models = True ,
93
+ verbose = False , return_models = False ,
94
94
** params ):
95
95
"""Compute Lasso path with coordinate descent
96
96
@@ -200,7 +200,7 @@ def lasso_path(X, y, eps=1e-3, n_alphas=100, alphas=None,
200
200
>>> y = np.array([1, 2, 3.1])
201
201
>>> # Use lasso_path to compute a coefficient path
202
202
>>> _, coef_path, _ = lasso_path(X, y, alphas=[5., 1., .5],
203
- ... return_models=False, fit_intercept=False)
203
+ ... fit_intercept=False)
204
204
>>> print(coef_path)
205
205
[[ 0. 0. 0.46874778]
206
206
[ 0.2159048 0.4425765 0.23689075]]
@@ -236,7 +236,7 @@ def lasso_path(X, y, eps=1e-3, n_alphas=100, alphas=None,
236
236
def enet_path (X , y , l1_ratio = 0.5 , eps = 1e-3 , n_alphas = 100 , alphas = None ,
237
237
precompute = 'auto' , Xy = None , fit_intercept = True ,
238
238
normalize = False , copy_X = True , coef_init = None ,
239
- verbose = False , return_models = True ,
239
+ verbose = False , return_models = False ,
240
240
** params ):
241
241
"""Compute Elastic-Net path with coordinate descent
242
242
@@ -296,7 +296,7 @@ def enet_path(X, y, l1_ratio=0.5, eps=1e-3, n_alphas=100, alphas=None,
296
296
verbose : bool or integer
297
297
Amount of verbosity
298
298
299
- return_models : boolean, optional, default True
299
+ return_models : boolean, optional, default False
300
300
If ``True``, the function will return list of models. Setting it
301
301
to ``False`` will change the function output returning the values
302
302
of the alphas and the coefficients along the path. Returning the
@@ -628,7 +628,7 @@ def fit(self, X, y):
628
628
precompute = precompute , Xy = this_Xy ,
629
629
fit_intercept = False , normalize = False , copy_X = True ,
630
630
verbose = False , tol = self .tol , positive = self .positive ,
631
- return_models = False , X_mean = X_mean , X_std = X_std ,
631
+ X_mean = X_mean , X_std = X_std ,
632
632
coef_init = coef_ [k ], max_iter = self .max_iter )
633
633
coef_ [k ] = this_coef [:, 0 ]
634
634
dual_gaps_ [k ] = this_dual_gap [0 ]
@@ -827,7 +827,6 @@ def _path_residuals(X, y, train, test, path, path_params, l1_ratio=1,
827
827
828
828
# del path_params['precompute']
829
829
path_params = path_params .copy ()
830
- path_params ['return_models' ] = False
831
830
path_params ['fit_intercept' ] = False
832
831
path_params ['normalize' ] = False
833
832
path_params ['Xy' ] = Xy
0 commit comments