Skip to content

Commit

Permalink
Add NUA REMi E2E
Browse files Browse the repository at this point in the history
  • Loading branch information
carlesonielfa committed Dec 19, 2024
1 parent b82ad62 commit be11db6
Showing 1 changed file with 25 additions and 0 deletions.
25 changes: 25 additions & 0 deletions nua/e2e/regional/test_predict.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from nuclia.sdk.predict import AsyncNucliaPredict

from regional.models import ALL_ENCODERS, ALL_LLMS
from nuclia_models.predict.remi import RemiRequest


@pytest.mark.asyncio_cooperative
Expand Down Expand Up @@ -55,3 +56,27 @@ async def test_predict_rephrase(nua_config, model):
# TODO: Test that custom rephrase prompt works once SDK supports it
rephrased = await np.rephrase(question="Barcelona best coffe", model=model)

Check failure on line 57 in nua/e2e/regional/test_predict.py

View workflow job for this annotation

GitHub Actions / JUnit Test Report

test_predict.test_predict_rephrase[europe-1.stashify.cloud-chatgpt-vision]

nuclia.exceptions.NuaAPIException: Exception calling NUA API: 412 {"detail":"Unknown LLM exception: Unknown API exception"}
Raw output
nua_config = 'europe-1.stashify.cloud', model = 'chatgpt-vision'

    @pytest.mark.asyncio_cooperative
    @pytest.mark.parametrize("model", ALL_LLMS)
    async def test_predict_rephrase(nua_config, model):
        # Check that rephrase is working for all models
        np = AsyncNucliaPredict()
        # TODO: Test that custom rephrase prompt works once SDK supports it
>       rephrased = await np.rephrase(question="Barcelona best coffe", model=model)

nua/e2e/regional/test_predict.py:57: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/decorators.py:136: in async_wrapper_checkout_nua
    return await func(*args, **kwargs)
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/sdk/predict.py:275: in rephrase
    return (await nc.rephrase(question, user_context, context, model, prompt)).root
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/lib/nua.py:603: in rephrase
    return await self._request(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <nuclia.lib.nua.AsyncNuaClient object at 0x7f169c5d8b00>, method = 'POST'
url = 'https://europe-1.stashify.cloud/api/v1/predict/rephrase?model=chatgpt-vision'
output = <class 'nuclia.lib.nua_responses.RephraseModel'>
payload = {'question': 'Barcelona best coffe', 'user_context': None, 'user_id': 'USER'}
timeout = 60

    async def _request(
        self,
        method: str,
        url: str,
        output: Type[ConvertType],
        payload: Optional[dict[Any, Any]] = None,
        timeout: int = 60,
    ) -> ConvertType:
        resp = await self.client.request(method, url, json=payload, timeout=timeout)
        if resp.status_code != 200:
>           raise NuaAPIException(code=resp.status_code, detail=resp.content.decode())
E           nuclia.exceptions.NuaAPIException: Exception calling NUA API: 412 {"detail":"Unknown LLM exception: Unknown API exception"}

/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/lib/nua.py:412: NuaAPIException
assert rephrased != "Barcelona best coffe" and rephrased != ""


@pytest.mark.asyncio_cooperative
async def test_predict_remi(nua_config):
# Check that rephrase is working for all models
np = AsyncNucliaPredict()
results = await np.remi(
RemiRequest(
user_id="NUA E2E",
question="What is the capital of France?",
answer="Paris is the capital of france!",
contexts=[
"Paris is the capital of France.",
"Berlin is the capital of Germany.",
],
)
)
assert results.answer_relevance.score >= 4

assert results.context_relevance[0] >= 4
assert results.groundedness[0] >= 4

assert results.context_relevance[1] < 2
assert results.groundedness[1] < 2

0 comments on commit be11db6

Please sign in to comment.