GitHub Actions / JUnit Test Report
failed
Dec 19, 2024 in 0s
252 tests run, 81 passed, 168 skipped, 3 failed.
Annotations
Check failure on line 11 in nua/e2e/regional/test_llm_generate.py
github-actions / JUnit Test Report
test_llm_generate.test_llm_generate[europe-1.stashify.cloud-chatgpt-vision]
pydantic_core._pydantic_core.ValidationError: 1 validation error for GenerativeChunk
chunk
Field required [type=missing, input_value={'detail': 'Unknown LLM e... Unknown API exception'}, input_type=dict]
For further information visit https://errors.pydantic.dev/2.10/v/missing
Raw output
nua_config = 'europe-1.stashify.cloud', model = 'chatgpt-vision'
@pytest.mark.asyncio_cooperative
@pytest.mark.parametrize("model", ALL_LLMS)
async def test_llm_generate(nua_config, model):
np = AsyncNucliaPredict()
> generated = await np.generate("Which is the capital of Catalonia?", model=model)
nua/e2e/regional/test_llm_generate.py:11:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/decorators.py:136: in async_wrapper_checkout_nua
return await func(*args, **kwargs)
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/sdk/predict.py:215: in generate
return await nc.generate(body, model)
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/lib/nua.py:523: in generate
async for chunk in self._stream(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <nuclia.lib.nua.AsyncNuaClient object at 0x7f169e731100>, method = 'POST'
url = 'https://europe-1.stashify.cloud/api/v1/predict/chat?model=chatgpt-vision'
payload = {'chat_history': [], 'citations': False, 'context': [], 'generative_model': None, ...}
timeout = 300
async def _stream(
self,
method: str,
url: str,
payload: Optional[dict[Any, Any]] = None,
timeout: int = 60,
) -> AsyncIterator[GenerativeChunk]:
async with self.stream_client.stream(
method,
url,
json=payload,
timeout=timeout,
) as response:
async for json_body in response.aiter_lines():
> yield GenerativeChunk.model_validate_json(json_body) # type: ignore
E pydantic_core._pydantic_core.ValidationError: 1 validation error for GenerativeChunk
E chunk
E Field required [type=missing, input_value={'detail': 'Unknown LLM e... Unknown API exception'}, input_type=dict]
E For further information visit https://errors.pydantic.dev/2.10/v/missing
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/lib/nua.py:433: ValidationError
Check failure on line 11 in nua/e2e/regional/test_llm_rag.py
github-actions / JUnit Test Report
test_llm_rag.test_llm_rag[europe-1.stashify.cloud-chatgpt-vision]
nuclia.exceptions.NuaAPIException: Exception calling NUA API: 412 {"detail":"Unknown LLM exception: Unknown API exception"}
Raw output
nua_config = 'europe-1.stashify.cloud', model = 'chatgpt-vision'
@pytest.mark.asyncio_cooperative
@pytest.mark.parametrize("model", ALL_LLMS)
async def test_llm_rag(nua_config, model):
np = AsyncNucliaPredict()
> generated = await np.rag(
question="Which is the CEO of Nuclia?",
context=[
"Nuclia CTO is Ramon Navarro",
"Eudald Camprubí is CEO at the same company as Ramon Navarro",
],
model=model,
)
nua/e2e/regional/test_llm_rag.py:11:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/decorators.py:136: in async_wrapper_checkout_nua
return await func(*args, **kwargs)
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/sdk/predict.py:282: in rag
return await nc.generate_retrieval(question, context, model)
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/lib/nua.py:634: in generate_retrieval
return await self._request(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <nuclia.lib.nua.AsyncNuaClient object at 0x7f169c369fa0>, method = 'POST'
url = 'https://europe-1.stashify.cloud/api/v1/predict/chat?model=chatgpt-vision'
output = <class 'nuclia.lib.nua_responses.ChatResponse'>
payload = {'chat_history': [], 'citations': False, 'context': [], 'generative_model': None, ...}
timeout = 60
async def _request(
self,
method: str,
url: str,
output: Type[ConvertType],
payload: Optional[dict[Any, Any]] = None,
timeout: int = 60,
) -> ConvertType:
resp = await self.client.request(method, url, json=payload, timeout=timeout)
if resp.status_code != 200:
> raise NuaAPIException(code=resp.status_code, detail=resp.content.decode())
E nuclia.exceptions.NuaAPIException: Exception calling NUA API: 412 {"detail":"Unknown LLM exception: Unknown API exception"}
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/lib/nua.py:412: NuaAPIException
Check failure on line 57 in nua/e2e/regional/test_predict.py
github-actions / JUnit Test Report
test_predict.test_predict_rephrase[europe-1.stashify.cloud-chatgpt-vision]
nuclia.exceptions.NuaAPIException: Exception calling NUA API: 412 {"detail":"Unknown LLM exception: Unknown API exception"}
Raw output
nua_config = 'europe-1.stashify.cloud', model = 'chatgpt-vision'
@pytest.mark.asyncio_cooperative
@pytest.mark.parametrize("model", ALL_LLMS)
async def test_predict_rephrase(nua_config, model):
# Check that rephrase is working for all models
np = AsyncNucliaPredict()
# TODO: Test that custom rephrase prompt works once SDK supports it
> rephrased = await np.rephrase(question="Barcelona best coffe", model=model)
nua/e2e/regional/test_predict.py:57:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/decorators.py:136: in async_wrapper_checkout_nua
return await func(*args, **kwargs)
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/sdk/predict.py:275: in rephrase
return (await nc.rephrase(question, user_context, context, model, prompt)).root
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/lib/nua.py:603: in rephrase
return await self._request(
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <nuclia.lib.nua.AsyncNuaClient object at 0x7f169c5d8b00>, method = 'POST'
url = 'https://europe-1.stashify.cloud/api/v1/predict/rephrase?model=chatgpt-vision'
output = <class 'nuclia.lib.nua_responses.RephraseModel'>
payload = {'question': 'Barcelona best coffe', 'user_context': None, 'user_id': 'USER'}
timeout = 60
async def _request(
self,
method: str,
url: str,
output: Type[ConvertType],
payload: Optional[dict[Any, Any]] = None,
timeout: int = 60,
) -> ConvertType:
resp = await self.client.request(method, url, json=payload, timeout=timeout)
if resp.status_code != 200:
> raise NuaAPIException(code=resp.status_code, detail=resp.content.decode())
E nuclia.exceptions.NuaAPIException: Exception calling NUA API: 412 {"detail":"Unknown LLM exception: Unknown API exception"}
/opt/hostedtoolcache/Python/3.12.8/x64/lib/python3.12/site-packages/nuclia/lib/nua.py:412: NuaAPIException
Loading