Skip to content

Commit

Permalink
Merge branch 'main' into docstrings-linting
Browse files Browse the repository at this point in the history
  • Loading branch information
davidsbatista authored Apr 5, 2024
2 parents 99c556e + ff269db commit d85e17b
Show file tree
Hide file tree
Showing 6 changed files with 20 additions and 11 deletions.
1 change: 1 addition & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ env:
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
HF_API_TOKEN: ${{ secrets.HUGGINGFACE_API_KEY }}
PYTHON_VERSION: "3.8"
HATCH_VERSION: "1.9.3"

Expand Down
7 changes: 4 additions & 3 deletions test/components/generators/chat/test_hugging_face_local.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
from unittest.mock import patch, Mock
from haystack.utils.auth import Secret
from unittest.mock import Mock, patch

import pytest
from transformers import PreTrainedTokenizer

from haystack.components.generators.chat import HuggingFaceLocalChatGenerator
from haystack.dataclasses import ChatMessage, ChatRole
from haystack.utils import ComponentDevice
from haystack.utils.auth import Secret


# used to test serialization of streaming_callback
Expand Down Expand Up @@ -160,7 +160,8 @@ def test_from_dict(self, model_info_mock):
assert generator_2.streaming_callback is streaming_callback_handler

@patch("haystack.components.generators.chat.hugging_face_local.pipeline")
def test_warm_up(self, pipeline_mock):
def test_warm_up(self, pipeline_mock, monkeypatch):
monkeypatch.delenv("HF_API_TOKEN", raising=False)
generator = HuggingFaceLocalChatGenerator(
model="mistralai/Mistral-7B-Instruct-v0.2",
task="text2text-generation",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,10 +198,12 @@ def test_run_without_warm_up(self, similarity):
ranker.run(query="test query", documents=documents)

@pytest.mark.parametrize("similarity", ["dot_product", "cosine"])
def test_warm_up(self, similarity):
def test_warm_up(self, similarity, monkeypatch):
"""
Test that ranker loads the SentenceTransformer model correctly during warm up.
"""
monkeypatch.delenv("HF_API_TOKEN", raising=False)

mock_model_class = MagicMock()
mock_model_instance = MagicMock()
mock_model_class.return_value = mock_model_instance
Expand Down
7 changes: 4 additions & 3 deletions test/components/rankers/test_transformers_similarity.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import logging
from unittest.mock import MagicMock, patch
from haystack.utils.auth import Secret

import pytest
import logging
import torch
from transformers.modeling_outputs import SequenceClassifierOutput

from haystack import ComponentError, Document
from haystack.components.rankers.transformers_similarity import TransformersSimilarityRanker
from haystack.utils.auth import Secret
from haystack.utils.device import ComponentDevice, DeviceMap


Expand Down Expand Up @@ -272,7 +272,8 @@ def test_device_map_and_device_raises(self, caplog):

@patch("haystack.components.rankers.transformers_similarity.AutoTokenizer.from_pretrained")
@patch("haystack.components.rankers.transformers_similarity.AutoModelForSequenceClassification.from_pretrained")
def test_device_map_dict(self, mocked_automodel, mocked_autotokenizer):
def test_device_map_dict(self, mocked_automodel, _mocked_autotokenizer, monkeypatch):
monkeypatch.delenv("HF_API_TOKEN", raising=False)
ranker = TransformersSimilarityRanker("model", model_kwargs={"device_map": {"layer_1": 1, "classifier": "cpu"}})

class MockedModel:
Expand Down
9 changes: 6 additions & 3 deletions test/components/readers/test_extractive.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,7 +410,8 @@ def __init__(self):

@patch("haystack.components.readers.extractive.AutoTokenizer.from_pretrained")
@patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained")
def test_device_map_auto(mocked_automodel, mocked_autotokenizer):
def test_device_map_auto(mocked_automodel, _mocked_autotokenizer, monkeypatch):
monkeypatch.delenv("HF_API_TOKEN", raising=False)
reader = ExtractiveReader("deepset/roberta-base-squad2", model_kwargs={"device_map": "auto"})
auto_device = ComponentDevice.resolve_device(None)

Expand All @@ -427,7 +428,8 @@ def __init__(self):

@patch("haystack.components.readers.extractive.AutoTokenizer.from_pretrained")
@patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained")
def test_device_map_str(mocked_automodel, mocked_autotokenizer):
def test_device_map_str(mocked_automodel, _mocked_autotokenizer, monkeypatch):
monkeypatch.delenv("HF_API_TOKEN", raising=False)
reader = ExtractiveReader("deepset/roberta-base-squad2", model_kwargs={"device_map": "cpu:0"})

class MockedModel:
Expand All @@ -443,7 +445,8 @@ def __init__(self):

@patch("haystack.components.readers.extractive.AutoTokenizer.from_pretrained")
@patch("haystack.components.readers.extractive.AutoModelForQuestionAnswering.from_pretrained")
def test_device_map_dict(mocked_automodel, mocked_autotokenizer):
def test_device_map_dict(mocked_automodel, _mocked_autotokenizer, monkeypatch):
monkeypatch.delenv("HF_API_TOKEN", raising=False)
reader = ExtractiveReader(
"deepset/roberta-base-squad2", model_kwargs={"device_map": {"layer_1": 1, "classifier": "cpu"}}
)
Expand Down
3 changes: 2 additions & 1 deletion test/components/routers/test_zero_shot_text_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ def test_to_dict(self):
},
}

def test_from_dict(self):
def test_from_dict(self, monkeypatch):
monkeypatch.delenv("HF_API_TOKEN", raising=False)
data = {
"type": "haystack.components.routers.zero_shot_text_router.TransformersZeroShotTextRouter",
"init_parameters": {
Expand Down

0 comments on commit d85e17b

Please sign in to comment.