Skip to content

Commit 55477e0

Browse files
authored
Add functional test for /conversation/custom endpoint (Azure-Samples#504)
* Add functional test for /conversation/custom endpoint - Allow the disabling of app insights and loading config from blob storage to make testing easier - Switch to `Thread` from `Process` for running the app - This is to stop pytest calling the session scoped fixure multiple times - It also fixes an issue where we were not seeing app logs - Setup the mock server as https due to the azure libraries enforcing it - This introduced additional fixtures and the `trustme` dependency - Commit the encoding file for tiktoken - This is to stop it making a http call over the internet to retrive it Required by Azure-Samples#420 * Parameterize tests with pytest.mark.parametrize * Improve test logging * Extract boolean env var behaviour * Add note about tiktoken issue * Allow tests to be run from anywhere
1 parent 2e5cdac commit 55477e0

File tree

12 files changed

+100558
-65
lines changed

12 files changed

+100558
-65
lines changed

code/backend/batch/utilities/helpers/ConfigHelper.py

+13-7
Original file line numberDiff line numberDiff line change
@@ -70,13 +70,19 @@ def __init__(self, logging: dict):
7070
class ConfigHelper:
7171
@staticmethod
7272
def get_active_config_or_default():
73-
try:
74-
blob_client = AzureBlobStorageClient(container_name=CONFIG_CONTAINER_NAME)
75-
config = blob_client.download_file("active.json")
76-
config = Config(json.loads(config))
77-
except Exception:
78-
print("Returning default config")
79-
config = ConfigHelper.get_default_config()
73+
env_helper = EnvHelper()
74+
config = ConfigHelper.get_default_config()
75+
76+
if env_helper.LOAD_CONFIG_FROM_BLOB_STORAGE:
77+
try:
78+
blob_client = AzureBlobStorageClient(
79+
container_name=CONFIG_CONTAINER_NAME
80+
)
81+
config_file = blob_client.download_file("active.json")
82+
config = Config(json.loads(config_file))
83+
except Exception:
84+
print("Returning default config")
85+
8086
return config
8187

8288
@staticmethod

code/backend/batch/utilities/helpers/EnvHelper.py

+9
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,8 @@ def __init__(self, **kwargs) -> None:
128128
"AZURE_FORM_RECOGNIZER_KEY"
129129
)
130130
# Azure App Insights
131+
self.APPINSIGHTS_ENABLED = self.get_env_var_bool("APPINSIGHTS_ENABLED")
132+
131133
self.APPINSIGHTS_CONNECTION_STRING = os.getenv(
132134
"APPINSIGHTS_CONNECTION_STRING", ""
133135
)
@@ -150,6 +152,10 @@ def __init__(self, **kwargs) -> None:
150152
# Speech Service
151153
self.AZURE_SPEECH_SERVICE_REGION = os.getenv("AZURE_SPEECH_SERVICE_REGION")
152154

155+
self.LOAD_CONFIG_FROM_BLOB_STORAGE = self.get_env_var_bool(
156+
"LOAD_CONFIG_FROM_BLOB_STORAGE"
157+
)
158+
153159
def should_use_data(self) -> bool:
154160
if (
155161
self.AZURE_SEARCH_SERVICE
@@ -164,6 +170,9 @@ def is_chat_model(self):
164170
return True
165171
return False
166172

173+
def get_env_var_bool(self, var_name: str, default: str = "True") -> bool:
174+
return os.getenv(var_name, default).lower() == "true"
175+
167176
@staticmethod
168177
def check_env():
169178
for attr, value in EnvHelper().__dict__.items():

code/backend/batch/utilities/loggers/TokenLogger.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,12 @@ class TokenLogger:
77
def __init__(self, name: str = __name__):
88
env_helper: EnvHelper = EnvHelper()
99
self.logger = logging.getLogger(name)
10-
self.logger.addHandler(
11-
AzureLogHandler(connection_string=env_helper.APPINSIGHTS_CONNECTION_STRING)
12-
)
10+
if env_helper.APPINSIGHTS_ENABLED:
11+
self.logger.addHandler(
12+
AzureLogHandler(
13+
connection_string=env_helper.APPINSIGHTS_CONNECTION_STRING
14+
)
15+
)
1316
self.logger.setLevel(logging.INFO)
1417

1518
def get_logger(self):

code/tests/functional/backend_api/app_config.py

+11-3
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
import os
23
from typing import Any, Dict
34

@@ -6,6 +7,13 @@ class AppConfig:
67
config: Dict[str, Any] = {
78
"AZURE_SPEECH_SERVICE_KEY": "some-azure-speech-service-key",
89
"AZURE_SPEECH_SERVICE_REGION": "some-azure-speech-service-region",
10+
"APPINSIGHTS_ENABLED": "False",
11+
"AZURE_OPENAI_API_KEY": "some-azure-openai-api-key",
12+
"AZURE_SEARCH_KEY": "some-azure-search-key",
13+
"AZURE_OPENAI_EMBEDDING_MODEL": "some-embedding-model",
14+
"AZURE_OPENAI_MODEL": "some-openai-model",
15+
"LOAD_CONFIG_FROM_BLOB_STORAGE": "false",
16+
"TIKTOKEN_CACHE_DIR": f"{os.path.dirname(os.path.realpath(__file__))}/resources",
917
}
1018

1119
def __init__(self, config_overrides: Dict[str, Any] = {}) -> None:
@@ -23,13 +31,13 @@ def get_all(self) -> Dict[str, Any]:
2331
def apply_to_environment(self) -> None:
2432
for key, value in self.config.items():
2533
if value is not None:
26-
print(f"Applying env var: {key}={value}")
34+
logging.info(f"Applying env var: {key}={value}")
2735
os.environ[key] = value
2836
else:
29-
print(f"Removing env var: {key}")
37+
logging.info(f"Removing env var: {key}")
3038
os.environ.pop(key, None)
3139

3240
def remove_from_environment(self) -> None:
3341
for key in self.config.keys():
34-
print(f"Removing env var: {key}")
42+
logging.info(f"Removing env var: {key}")
3543
os.environ.pop(key, None)

code/tests/functional/backend_api/conftest.py

+141-39
Original file line numberDiff line numberDiff line change
@@ -1,69 +1,171 @@
1-
from multiprocessing import Process
1+
import logging
22
import socket
3+
import ssl
4+
import threading
35
import time
46
import pytest
57
from pytest_httpserver import HTTPServer
68
import requests
7-
from app import app
89
from tests.functional.backend_api.app_config import AppConfig
10+
from threading import Thread
11+
import trustme
12+
import importlib
13+
from app import app as flask_app
14+
import app
915

1016

11-
@pytest.fixture(scope="module")
12-
def app_port() -> int:
13-
print("Getting free port")
14-
return get_free_port()
17+
@pytest.fixture(scope="session")
18+
def ca():
19+
return trustme.CA()
1520

1621

17-
@pytest.fixture(scope="module")
18-
def app_url(app_port: int) -> int:
19-
return f"http://localhost:{app_port}"
22+
@pytest.fixture(scope="session")
23+
def httpserver_ssl_context(ca):
24+
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
25+
localhost_cert = ca.issue_cert("localhost")
26+
localhost_cert.configure_cert(context)
27+
return context
2028

2129

22-
@pytest.fixture(scope="module")
23-
def mock_httpserver(make_httpserver):
24-
"""
25-
This is required as the default pytest httpserver fixture is scoped at the function level
26-
"""
27-
print("Starting HTTP Mock Server")
28-
server = make_httpserver
29-
yield server
30-
print("Stopping HTTP Mock Server")
31-
server.clear()
30+
@pytest.fixture(scope="session")
31+
def httpclient_ssl_context(ca):
32+
with ca.cert_pem.tempfile() as ca_temp_path:
33+
return ssl.create_default_context(cafile=ca_temp_path)
34+
35+
36+
@pytest.fixture(scope="session")
37+
def app_port() -> int:
38+
logging.info("Getting free port")
39+
return get_free_port()
3240

3341

34-
@pytest.fixture(scope="module")
35-
def app_config(mock_httpserver: HTTPServer) -> AppConfig:
36-
return AppConfig({"AZURE_OPENAI_ENDPOINT": mock_httpserver.url_for("/")})
42+
@pytest.fixture(scope="session")
43+
def app_url(app_port: int) -> int:
44+
return f"http://localhost:{app_port}"
3745

3846

39-
@pytest.fixture(scope="module", autouse=True)
47+
@pytest.fixture(scope="session")
48+
def app_config(make_httpserver, ca):
49+
logging.info("Creating APP CONFIG")
50+
with ca.cert_pem.tempfile() as ca_temp_path:
51+
app_config = AppConfig(
52+
{
53+
"AZURE_OPENAI_ENDPOINT": f"https://localhost:{make_httpserver.port}",
54+
"AZURE_SEARCH_SERVICE": f"https://localhost:{make_httpserver.port}",
55+
"AZURE_CONTENT_SAFETY_ENDPOINT": f"https://localhost:{make_httpserver.port}",
56+
"SSL_CERT_FILE": ca_temp_path,
57+
"CURL_CA_BUNDLE": ca_temp_path,
58+
}
59+
)
60+
logging.info(f"Created app config: {app_config.get_all()}")
61+
yield app_config
62+
63+
64+
@pytest.fixture(scope="session", autouse=True)
4065
def manage_app(app_port: int, app_config: AppConfig):
4166
app_config.apply_to_environment()
42-
app_process = start_app(app_port)
67+
start_app(app_port)
4368
yield
44-
stop_app(app_process)
4569
app_config.remove_from_environment()
4670

4771

48-
def start_app(port: int) -> Process:
49-
print(f"Starting application on port {port}")
50-
proc = Process(target=app.run, kwargs={"port": port, "debug": True})
51-
proc.start()
52-
wait_for_app(port)
53-
print("Application started")
54-
return proc
72+
@pytest.fixture(scope="function", autouse=True)
73+
def setup_default_mocking(httpserver: HTTPServer, app_config: AppConfig):
74+
httpserver.expect_request(
75+
f"/openai/deployments/{app_config.get('AZURE_OPENAI_EMBEDDING_MODEL')}/embeddings",
76+
query_string="api-version=2023-12-01-preview",
77+
method="POST",
78+
).respond_with_json(
79+
{
80+
"object": "list",
81+
"data": [
82+
{
83+
"object": "embedding",
84+
"embedding": [0.018990106880664825, -0.0073809814639389515],
85+
"index": 0,
86+
}
87+
],
88+
"model": "text-embedding-ada-002",
89+
}
90+
)
91+
92+
httpserver.expect_request(
93+
"/indexes('conversations')",
94+
query_string="api-version=2023-11-01",
95+
method="GET",
96+
).respond_with_json({})
97+
98+
httpserver.expect_request(
99+
"/contentsafety/text:analyze",
100+
query_string="api-version=2023-10-01",
101+
method="POST",
102+
).respond_with_json(
103+
{
104+
"blocklistsMatch": [],
105+
"categoriesAnalysis": [],
106+
}
107+
)
108+
109+
httpserver.expect_request(
110+
f"/openai/deployments/{app_config.get('AZURE_OPENAI_MODEL')}/chat/completions",
111+
query_string="api-version=2023-12-01-preview",
112+
method="POST",
113+
).respond_with_json(
114+
{
115+
"id": "chatcmpl-6v7mkQj980V1yBec6ETrKPRqFjNw9",
116+
"object": "chat.completion",
117+
"created": 1679072642,
118+
"model": "gpt-35-turbo",
119+
"usage": {
120+
"prompt_tokens": 58,
121+
"completion_tokens": 68,
122+
"total_tokens": 126,
123+
},
124+
"choices": [
125+
{
126+
"message": {
127+
"role": "assistant",
128+
"content": "42 is the meaning of life",
129+
},
130+
"finish_reason": "stop",
131+
"index": 0,
132+
}
133+
],
134+
}
135+
)
136+
137+
httpserver.expect_request(
138+
"/indexes('conversations')/docs/search.index",
139+
query_string="api-version=2023-11-01",
140+
method="POST",
141+
).respond_with_json(
142+
{
143+
"value": [
144+
{"key": "1", "status": True, "errorMessage": None, "statusCode": 201}
145+
]
146+
}
147+
)
55148

149+
yield
56150

57-
def stop_app(proc: Process):
58-
print("Shutting down application")
59-
proc.terminate()
60-
proc.join() # Wait until the process is fully shut down
61-
print("Application shut down")
151+
httpserver.check()
62152

63153

64-
def wait_for_app(port: int):
65-
attempts = 0
154+
def start_app(app_port: int) -> Thread:
155+
logging.info(f"Starting application on port {app_port}")
156+
# ensure app is reloaded now that new environment variables are set
157+
importlib.reload(app)
158+
app_process = threading.Thread(target=lambda: flask_app.run(port=app_port))
159+
app_process.daemon = True
160+
app_process.start()
161+
wait_for_app(app_port)
162+
logging.info("Application started")
163+
return app_process
164+
66165

166+
def wait_for_app(port: int, initial_check_delay: int = 10):
167+
attempts = 0
168+
time.sleep(initial_check_delay)
67169
while attempts < 10:
68170
try:
69171
response = requests.get(f"http://localhost:{port}/api/config")

0 commit comments

Comments
 (0)