From 65705a8fdc6d699c1438ad3a5ae0663b9a5a8647 Mon Sep 17 00:00:00 2001 From: Silvano Cerza <3314350+silvanocerza@users.noreply.github.com> Date: Fri, 5 Apr 2024 17:02:26 +0200 Subject: [PATCH 1/2] Add HF_API_TOKEN env var in tests.yml (#7490) --- .github/workflows/tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9c747899c9..7272cccffc 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -28,6 +28,7 @@ env: AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + HF_API_TOKEN: ${{ secrets.HUGGINGFACE_API_KEY }} PYTHON_VERSION: "3.8" HATCH_VERSION: "1.9.3" From ff269db12da974c2a52a311ef6971ebcad99963e Mon Sep 17 00:00:00 2001 From: Silvano Cerza <3314350+silvanocerza@users.noreply.github.com> Date: Fri, 5 Apr 2024 18:05:43 +0200 Subject: [PATCH 2/2] Fix unit tests failing if HF_API_TOKEN is set (#7491) --- .../generators/chat/test_hugging_face_local.py | 7 ++++--- .../rankers/test_sentence_transformers_diversity.py | 4 +++- test/components/rankers/test_transformers_similarity.py | 7 ++++--- test/components/readers/test_extractive.py | 9 ++++++--- test/components/routers/test_zero_shot_text_router.py | 3 ++- 5 files changed, 19 insertions(+), 11 deletions(-) diff --git a/test/components/generators/chat/test_hugging_face_local.py b/test/components/generators/chat/test_hugging_face_local.py index 2668a93862..05db610fbd 100644 --- a/test/components/generators/chat/test_hugging_face_local.py +++ b/test/components/generators/chat/test_hugging_face_local.py @@ -1,5 +1,4 @@ -from unittest.mock import patch, Mock -from haystack.utils.auth import Secret +from unittest.mock import Mock, patch import pytest from transformers import PreTrainedTokenizer @@ -7,6 +6,7 @@ from haystack.components.generators.chat import HuggingFaceLocalChatGenerator from haystack.dataclasses import ChatMessage, ChatRole from haystack.utils import ComponentDevice +from haystack.utils.auth import Secret # used to test serialization of streaming_callback @@ -160,7 +160,8 @@ def test_from_dict(self, model_info_mock): assert generator_2.streaming_callback is streaming_callback_handler @patch("haystack.components.generators.chat.hugging_face_local.pipeline") - def test_warm_up(self, pipeline_mock): + def test_warm_up(self, pipeline_mock, monkeypatch): + monkeypatch.delenv("HF_API_TOKEN", raising=False) generator = HuggingFaceLocalChatGenerator( model="mistralai/Mistral-7B-Instruct-v0.2", task="text2text-generation", diff --git a/test/components/rankers/test_sentence_transformers_diversity.py b/test/components/rankers/test_sentence_transformers_diversity.py index 0ed6a579bb..fc3c8830f7 100644 --- a/test/components/rankers/test_sentence_transformers_diversity.py +++ b/test/components/rankers/test_sentence_transformers_diversity.py @@ -198,10 +198,12 @@ def test_run_without_warm_up(self, similarity): ranker.run(query="test query", documents=documents) @pytest.mark.parametrize("similarity", ["dot_product", "cosine"]) - def test_warm_up(self, similarity): + def test_warm_up(self, similarity, monkeypatch): """ Test that ranker loads the SentenceTransformer model correctly during warm up. """ + monkeypatch.delenv("HF_API_TOKEN", raising=False) + mock_model_class = MagicMock() mock_model_instance = MagicMock() mock_model_class.return_value = mock_model_instance diff --git a/test/components/rankers/test_transformers_similarity.py b/test/components/rankers/test_transformers_similarity.py index dfb25f119c..bfa9a4369c 100644 --- a/test/components/rankers/test_transformers_similarity.py +++ b/test/components/rankers/test_transformers_similarity.py @@ -1,13 +1,13 @@ +import logging from unittest.mock import MagicMock, patch -from haystack.utils.auth import Secret import pytest -import logging import torch from transformers.modeling_outputs import SequenceClassifierOutput from haystack import ComponentError, Document from haystack.components.rankers.transformers_similarity import TransformersSimilarityRanker +from haystack.utils.auth import Secret from haystack.utils.device import ComponentDevice, DeviceMap @@ -272,7 +272,8 @@ def test_device_map_and_device_raises(self, caplog): @patch("haystack.components.rankers.transformers_similarity.AutoTokenizer.from_pretrained") @patch("haystack.components.rankers.transformers_similarity.AutoModelForSequenceClassification.from_pretrained") - def test_device_map_dict(self, mocked_automodel, mocked_autotokenizer): + def test_device_map_dict(self, mocked_automodel, _mocked_autotokenizer, monkeypatch): + monkeypatch.delenv("HF_API_TOKEN", raising=False) ranker = TransformersSimilarityRanker("model", model_kwargs={"device_map": {"layer_1": 1, "classifier": "cpu"}}) class MockedModel: diff --git a/test/components/readers/test_extractive.py b/test/components/readers/test_extractive.py index 04f2f1b287..66dcf0cf02 100644 --- a/test/components/readers/test_extractive.py +++ b/test/components/readers/test_extractive.py @@ -410,7 +410,8 @@ def __init__(self): @patch("haystack.components.readers.extractive.AutoTokenizer.from_pretrained") @patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained") -def test_device_map_auto(mocked_automodel, mocked_autotokenizer): +def test_device_map_auto(mocked_automodel, _mocked_autotokenizer, monkeypatch): + monkeypatch.delenv("HF_API_TOKEN", raising=False) reader = ExtractiveReader("deepset/roberta-base-squad2", model_kwargs={"device_map": "auto"}) auto_device = ComponentDevice.resolve_device(None) @@ -427,7 +428,8 @@ def __init__(self): @patch("haystack.components.readers.extractive.AutoTokenizer.from_pretrained") @patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained") -def test_device_map_str(mocked_automodel, mocked_autotokenizer): +def test_device_map_str(mocked_automodel, _mocked_autotokenizer, monkeypatch): + monkeypatch.delenv("HF_API_TOKEN", raising=False) reader = ExtractiveReader("deepset/roberta-base-squad2", model_kwargs={"device_map": "cpu:0"}) class MockedModel: @@ -443,7 +445,8 @@ def __init__(self): @patch("haystack.components.readers.extractive.AutoTokenizer.from_pretrained") @patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained") -def test_device_map_dict(mocked_automodel, mocked_autotokenizer): +def test_device_map_dict(mocked_automodel, _mocked_autotokenizer, monkeypatch): + monkeypatch.delenv("HF_API_TOKEN", raising=False) reader = ExtractiveReader( "deepset/roberta-base-squad2", model_kwargs={"device_map": {"layer_1": 1, "classifier": "cpu"}} ) diff --git a/test/components/routers/test_zero_shot_text_router.py b/test/components/routers/test_zero_shot_text_router.py index 6472134979..0f1848538f 100644 --- a/test/components/routers/test_zero_shot_text_router.py +++ b/test/components/routers/test_zero_shot_text_router.py @@ -23,7 +23,8 @@ def test_to_dict(self): }, } - def test_from_dict(self): + def test_from_dict(self, monkeypatch): + monkeypatch.delenv("HF_API_TOKEN", raising=False) data = { "type": "haystack.components.routers.zero_shot_text_router.TransformersZeroShotTextRouter", "init_parameters": {