From d20c1b537922296fc49abe89c958997a8ee960b5 Mon Sep 17 00:00:00 2001 From: nabenabe0928 Date: Fri, 9 Apr 2021 00:07:26 +0900 Subject: [PATCH] [fix] Fix a contradiction in holdout_stratified_validation Since stratified splitting requires to shuffle by default and it raises error in the github check, I fixed this issue. --- autoPyTorch/datasets/resampling_strategy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autoPyTorch/datasets/resampling_strategy.py b/autoPyTorch/datasets/resampling_strategy.py index dfd797023..765a31cdb 100644 --- a/autoPyTorch/datasets/resampling_strategy.py +++ b/autoPyTorch/datasets/resampling_strategy.py @@ -113,7 +113,7 @@ def stratified_holdout_validation(val_share: float, indices: np.ndarray, **kwargs: Any ) -> Tuple[np.ndarray, np.ndarray]: - train, val = train_test_split(indices, test_size=val_share, shuffle=False, stratify=kwargs["stratify"]) + train, val = train_test_split(indices, test_size=val_share, shuffle=True, stratify=kwargs["stratify"]) return train, val @classmethod