Skip to content

Commit

Permalink
Merge branch 'master' into vignette
Browse files Browse the repository at this point in the history
  • Loading branch information
trivialfis authored Jan 7, 2025
2 parents 107f18d + 4500941 commit 196273d
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 4 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,7 @@ jobs:
- runner=${{ matrix.runner }}
- run-id=${{ github.run_id }}
- tag=main-test-cpp-gpu-${{ matrix.suite }}
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
Expand Down Expand Up @@ -213,6 +214,7 @@ jobs:
- runner=${{ matrix.runner }}
- run-id=${{ github.run_id }}
- tag=main-test-python-wheel-${{ matrix.description }}
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
Expand Down
7 changes: 7 additions & 0 deletions tests/test_distributed/test_gpu_with_dask/test_gpu_ranking.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,15 @@
import pytest
from distributed import Client

from xgboost import testing as tm
from xgboost.testing import dask as dtm

pytestmark = [
pytest.mark.skipif(**tm.no_dask()),
pytest.mark.skipif(**tm.no_dask_cuda()),
tm.timeout(120),
]


@pytest.mark.filterwarnings("error")
def test_no_group_split(local_cuda_client: Client) -> None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -376,8 +376,6 @@ def test_early_stopping(self, local_cuda_client: Client) -> None:
@pytest.mark.skipif(**tm.no_cudf())
@pytest.mark.parametrize("model", ["boosting"])
def test_dask_classifier(self, model: str, local_cuda_client: Client) -> None:
import dask_cudf

X_, y_, w_ = generate_array(with_weights=True)
y_ = (y_ * 10).astype(np.int32)
X = dd.from_dask_array(X_).to_backend("cudf")
Expand Down
5 changes: 3 additions & 2 deletions tests/test_distributed/test_with_dask/test_with_dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -643,7 +643,7 @@ def run_dask_classifier(
tree_method: Optional[str],
device: Literal["cpu", "cuda"],
client: "Client",
n_classes,
n_classes: int,
) -> None:
metric = "merror" if n_classes > 2 else "logloss"

Expand Down Expand Up @@ -930,8 +930,9 @@ def run_empty_dmatrix_auc(client: "Client", device: str, n_workers: int) -> None
valid_X = dd.from_array(valid_X_, chunksize=n_samples)
valid_y = dd.from_array(valid_y_, chunksize=n_samples)

# Specify base score in case if there are only two workers and one sample.
cls = dxgb.DaskXGBClassifier(
device=device, n_estimators=2, eval_metric=["auc", "aucpr"]
device=device, n_estimators=2, eval_metric=["auc", "aucpr"], base_score=0.5
)
cls.fit(X, y, eval_set=[(valid_X, valid_y)])

Expand Down

0 comments on commit 196273d

Please sign in to comment.