2
2
Test ai API endpoints in the impress core app.
3
3
"""
4
4
5
- from unittest .mock import MagicMock , patch
5
+ from unittest .mock import patch
6
6
7
7
from django .core .exceptions import ImproperlyConfigured
8
- from django .test .utils import override_settings
9
8
10
9
import pytest
11
10
from openai import OpenAIError
15
14
pytestmark = pytest .mark .django_db
16
15
17
16
17
+ @pytest .fixture (autouse = True )
18
+ def ai_settings (settings ):
19
+ """Fixture to set AI settings."""
20
+ settings .AI_MODEL = "llama"
21
+ settings .AI_BASE_URL = "http://example.com"
22
+ settings .AI_API_KEY = "test-key"
23
+ settings .AI_FEATURE_ENABLED = True
24
+
25
+
18
26
@pytest .mark .parametrize (
19
27
"setting_name, setting_value" ,
20
28
[
23
31
("AI_MODEL" , None ),
24
32
],
25
33
)
26
- def test_api_ai_setting_missing (setting_name , setting_value ):
34
+ def test_services_ai_setting_missing (setting_name , setting_value , settings ):
27
35
"""Setting should be set"""
36
+ setattr (settings , setting_name , setting_value )
28
37
29
- with override_settings (** {setting_name : setting_value }):
30
- with pytest .raises (
31
- ImproperlyConfigured ,
32
- match = "AI configuration not set" ,
33
- ):
34
- AIService ()
38
+ with pytest .raises (
39
+ ImproperlyConfigured ,
40
+ match = "AI configuration not set" ,
41
+ ):
42
+ AIService ()
35
43
36
44
37
- @override_settings (
38
- AI_BASE_URL = "http://example.com" , AI_API_KEY = "test-key" , AI_MODEL = "test-model"
39
- )
40
45
@patch ("openai.resources.chat.completions.Completions.create" )
41
- def test_api_ai__client_error (mock_create ):
46
+ def test_services_ai_proxy_client_error (mock_create ):
42
47
"""Fail when the client raises an error"""
43
48
44
49
mock_create .side_effect = OpenAIError ("Mocked client error" )
@@ -50,15 +55,11 @@ def test_api_ai__client_error(mock_create):
50
55
AIService ().proxy ({"messages" : [{"role" : "user" , "content" : "hello" }]})
51
56
52
57
53
- @override_settings (
54
- AI_BASE_URL = "http://example.com" , AI_API_KEY = "test-key" , AI_MODEL = "test-model"
55
- )
56
58
@patch ("openai.resources.chat.completions.Completions.create" )
57
- def test_api_ai__success (mock_create ):
59
+ def test_services_ai_proxy_success (mock_create ):
58
60
"""The AI request should work as expect when called with valid arguments."""
59
61
60
- mock_response = MagicMock ()
61
- mock_response .model_dump .return_value = {
62
+ mock_create .return_value = {
62
63
"id" : "chatcmpl-test" ,
63
64
"object" : "chat.completion" ,
64
65
"created" : 1234567890 ,
@@ -71,7 +72,6 @@ def test_api_ai__success(mock_create):
71
72
}
72
73
],
73
74
}
74
- mock_create .return_value = mock_response
75
75
76
76
response = AIService ().proxy ({"messages" : [{"role" : "user" , "content" : "hello" }]})
77
77
@@ -89,3 +89,47 @@ def test_api_ai__success(mock_create):
89
89
],
90
90
}
91
91
assert response == expected_response
92
+ mock_create .assert_called_once_with (
93
+ messages = [{"role" : "user" , "content" : "hello" }], stream = False
94
+ )
95
+
96
+
97
+ @patch ("openai.resources.chat.completions.Completions.create" )
98
+ def test_services_ai_proxy_with_stream (mock_create ):
99
+ """The AI request should work as expect when called with valid arguments."""
100
+
101
+ mock_create .return_value = {
102
+ "id" : "chatcmpl-test" ,
103
+ "object" : "chat.completion" ,
104
+ "created" : 1234567890 ,
105
+ "model" : "test-model" ,
106
+ "choices" : [
107
+ {
108
+ "index" : 0 ,
109
+ "message" : {"role" : "assistant" , "content" : "Salut" },
110
+ "finish_reason" : "stop" ,
111
+ }
112
+ ],
113
+ }
114
+
115
+ response = AIService ().proxy (
116
+ {"messages" : [{"role" : "user" , "content" : "hello" }]}, stream = True
117
+ )
118
+
119
+ expected_response = {
120
+ "id" : "chatcmpl-test" ,
121
+ "object" : "chat.completion" ,
122
+ "created" : 1234567890 ,
123
+ "model" : "test-model" ,
124
+ "choices" : [
125
+ {
126
+ "index" : 0 ,
127
+ "message" : {"role" : "assistant" , "content" : "Salut" },
128
+ "finish_reason" : "stop" ,
129
+ }
130
+ ],
131
+ }
132
+ assert response == expected_response
133
+ mock_create .assert_called_once_with (
134
+ messages = [{"role" : "user" , "content" : "hello" }], stream = True
135
+ )
0 commit comments