Skip to content

Commit

Permalink
[misc][ci] fix cpu test with plugins (vllm-project#7489)
Browse files Browse the repository at this point in the history
  • Loading branch information
youkaichao authored Aug 14, 2024
1 parent 9799280 commit ea49e6a
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .buildkite/run-cpu-test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ docker exec cpu-test-avx2 bash -c "python3 examples/offline_inference.py"
# Run basic model test
docker exec cpu-test bash -c "
pip install pytest Pillow protobuf
pytest -v -s tests/models -m \"not vlm\" --ignore=tests/models/test_embedding.py --ignore=tests/models/test_registry.py --ignore=tests/models/test_jamba.py --ignore=tests/models/test_danube3_4b.py" # Mamba and Danube3-4B on CPU is not supported
pytest -v -s tests/models -m \"not vlm\" --ignore=tests/models/test_embedding.py --ignore=tests/models/test_oot_registration.py --ignore=tests/models/test_registry.py --ignore=tests/models/test_jamba.py --ignore=tests/models/test_danube3_4b.py" # Mamba and Danube3-4B on CPU is not supported

# online inference
docker exec cpu-test bash -c "
Expand Down
1 change: 1 addition & 0 deletions .buildkite/test-pipeline.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,7 @@ steps:
- pytest -v -s distributed/test_chunked_prefill_distributed.py
- pytest -v -s distributed/test_multimodal_broadcast.py
- pytest -v -s spec_decode/e2e/test_integration_dist_tp2.py
- pip install -e ./plugins/vllm_add_dummy_model
- pytest -v -s distributed/test_distributed_oot.py
- CUDA_VISIBLE_DEVICES=0,1 pytest -v -s test_sharded_state_loader.py
- CUDA_VISIBLE_DEVICES=0,1 pytest -v -s distributed/test_utils.py
Expand Down
7 changes: 3 additions & 4 deletions tests/models/test_oot_registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,18 @@

from vllm import LLM, SamplingParams

# NOTE: the order of the tests is important
# the first test does not load any plugins
# the second test loads the plugin
# they share the same process, so the plugin is loaded for the second test
from ..utils import fork_new_process_for_each_test


@fork_new_process_for_each_test
def test_plugin(dummy_opt_path):
os.environ["VLLM_PLUGINS"] = ""
with pytest.raises(Exception) as excinfo:
LLM(model=dummy_opt_path, load_format="dummy")
assert "are not supported for now" in str(excinfo.value)


@fork_new_process_for_each_test
def test_oot_registration(dummy_opt_path):
os.environ["VLLM_PLUGINS"] = "register_dummy_model"
prompts = ["Hello, my name is", "The text does not matter"]
Expand Down
13 changes: 10 additions & 3 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,17 @@
from typing import Any, Callable, Dict, List, Optional

import openai
import ray
import requests
from transformers import AutoTokenizer
from typing_extensions import ParamSpec

from vllm.distributed import (ensure_model_parallel_initialized,
init_distributed_environment)
from vllm.entrypoints.openai.cli_args import make_arg_parser
from vllm.platforms import current_platform
from vllm.utils import FlexibleArgumentParser, get_open_port, is_hip

if is_hip():
if current_platform.is_rocm():
from amdsmi import (amdsmi_get_gpu_vram_usage,
amdsmi_get_processor_handles, amdsmi_init,
amdsmi_shut_down)
Expand All @@ -32,7 +32,7 @@ def _nvml():
yield
finally:
amdsmi_shut_down()
else:
elif current_platform.is_cuda():
from pynvml import (nvmlDeviceGetHandleByIndex, nvmlDeviceGetMemoryInfo,
nvmlInit, nvmlShutdown)

Expand All @@ -43,6 +43,11 @@ def _nvml():
yield
finally:
nvmlShutdown()
else:

@contextmanager
def _nvml():
yield


VLLM_PATH = Path(__file__).parent.parent
Expand Down Expand Up @@ -293,6 +298,8 @@ def multi_process_parallel(
pp_size: int,
test_target: Any,
) -> None:
import ray

# Using ray helps debugging the error when it failed
# as compared to multiprocessing.
# NOTE: We need to set working_dir for distributed tests,
Expand Down

0 comments on commit ea49e6a

Please sign in to comment.