diff --git a/crowdin_api/api_resources/__init__.py b/crowdin_api/api_resources/__init__.py index 78e6102..11a5888 100644 --- a/crowdin_api/api_resources/__init__.py +++ b/crowdin_api/api_resources/__init__.py @@ -1,3 +1,4 @@ +from .ai.resource import AIResource, EnterpriseAIResource from .application.resource import ApplicationResource from .bundles.resource import BundlesResource from .dictionaries.resource import DictionariesResource @@ -28,6 +29,8 @@ from .workflows.resource import WorkflowsResource __all__ = [ + "AIResource", + "EnterpriseAIResource", "ApplicationResource", "BundlesResource", "DictionariesResource", diff --git a/crowdin_api/api_resources/ai/__init__.py b/crowdin_api/api_resources/ai/__init__.py new file mode 100644 index 0000000..9f77d63 --- /dev/null +++ b/crowdin_api/api_resources/ai/__init__.py @@ -0,0 +1 @@ +__pdoc__ = {'tests': False} diff --git a/crowdin_api/api_resources/ai/enums.py b/crowdin_api/api_resources/ai/enums.py new file mode 100644 index 0000000..c4c16c9 --- /dev/null +++ b/crowdin_api/api_resources/ai/enums.py @@ -0,0 +1,39 @@ +from enum import Enum + + +class AIPromptAction(Enum): + ASSIST = "assist" + PRE_TRANSLATE = "pre_translate" + + +class AIPromptOperation(Enum): + REPLACE = "replace" + TEST = "test" + + +class EditAIPromptPath(Enum): + NAME = "/name" + ACTION = "/action" + AI_PROVIDER_ID = "/aiProviderId" + AI_MODEL_ID = "/aiModelId" + IS_ENABLED = "/isEnabled" + ENABLED_PROJECT_IDS = "/enabledProjectIds" + CONFIG = "/config" + + +class AIProviderType(Enum): + OPEN_AI = "open_ai" + AZUER_OPEN_AI = "azure_open_ai" + GOOGLE_GEMINI = "google_gemini" + MISTRAL_AI = "mistral_ai" + ANTHROPIC = "anthropic" + CUSTOM_AI = "custom_ai" + + +class EditAIProviderPath(Enum): + NAME = "/name" + TYPE = "/type" + CREDENTIALS = "/credentials" + CONFIG = "/config" + IS_ENABLED = "/isEnabled" + USE_SYSTEM_CREDENTIALS = "/useSystemCredentials" diff --git a/crowdin_api/api_resources/ai/resource.py b/crowdin_api/api_resources/ai/resource.py new file mode 100644 index 0000000..47c2ba2 --- /dev/null +++ b/crowdin_api/api_resources/ai/resource.py @@ -0,0 +1,408 @@ +from typing import Iterable, Optional, Union + +from crowdin_api.api_resources.abstract.resources import BaseResource +from crowdin_api.api_resources.ai.enums import AIPromptAction +from crowdin_api.api_resources.ai.types import ( + AddAIPromptRequestScheme, + AddAIProviderReqeustScheme, + EditAIPromptScheme, + EditAIProviderRequestScheme, + GoogleGeminiChatProxy, + OtherChatProxy, +) + + +class AIResource(BaseResource): + """ + Resource for AI. + + Link to documentation: + https://developer.crowdin.com/api/v2/#tag/AI + """ + + def get_ai_path(self, userId: int, aiPromptId: Optional[int] = None): + if aiPromptId is not None: + return f"users/{userId}/ai/prompts/{aiPromptId}" + return f"users/{userId}/ai/prompts" + + def get_ai_provider_path(self, userId: int, aiProviderId: Optional[int] = None): + if aiProviderId is not None: + return f"users/{userId}/ai/providers/{aiProviderId}" + return f"users/{userId}/ai/providers" + + def list_ai_prompts( + self, + userId: int, + projectId: Optional[int] = None, + action: Optional[AIPromptAction] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, + ): + """ + List AI Prompts + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.ai.prompts.getMany + """ + params = {"projectId": projectId, "action": action} + params.update(self.get_page_params(limit=limit, offset=offset)) + + return self.requester.request( + method="get", path=self.get_ai_path(userId=userId), params=params + ) + + def add_ai_prompt(self, userId: int, request_data: AddAIPromptRequestScheme): + """ + Add AI Prompt + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.users.ai.prompts.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_path(userId=userId), + request_data=request_data, + ) + + def get_ai_prompt(self, userId: int, aiPromptId: int): + """ + Get AI Prompt + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.users.ai.prompts.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_path(userId=userId, aiPromptId=aiPromptId), + ) + + def delete_ai_prompt(self, userId: int, aiPromptId: int): + """ + Delete AI Prompt + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.users.ai.prompts.delete + """ + + return self.requester.request( + method="delete", + path=self.get_ai_path(userId=userId, aiPromptId=aiPromptId), + ) + + def edit_ai_prompt( + self, userId: int, aiPromptId: int, request_data: Iterable[EditAIPromptScheme] + ): + """ + Edit AI Prompt + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.users.ai.prompts.patch + """ + + return self.requester.request( + method="patch", + path=self.get_ai_path(userId=userId, aiPromptId=aiPromptId), + request_data=request_data, + ) + + def list_ai_providers( + self, + userId: int, + limit: Optional[int] = None, + offset: Optional[int] = None, + ): + """ + List AI Providers + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.ai.providers.getMany + """ + params = self.get_page_params(limit=limit, offset=offset) + return self.requester.request( + method="get", path=self.get_ai_provider_path(userId=userId), params=params + ) + + def add_ai_provider(self, userId: int, request_data: AddAIProviderReqeustScheme): + """ + Add AI Provider + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.users.ai.providers.post + """ + return self.requester.request( + method="post", + path=self.get_ai_provider_path(userId=userId), + request_data=request_data, + ) + + def get_ai_provider(self, userId: int, aiProviderId: int): + """ + Get AI Provider + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.users.ai.providers.get + """ + return self.requester.request( + method="get", + path=self.get_ai_provider_path(userId=userId, aiProviderId=aiProviderId), + ) + + def delete_ai_provider(self, userId: int, aiProviderId: int): + """ + Delete AI Provider + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.users.ai.providers.delete + """ + return self.requester.request( + method="delete", + path=self.get_ai_provider_path(userId=userId, aiProviderId=aiProviderId), + ) + + def edit_ai_provider( + self, userId: int, aiProviderId: int, request_data: EditAIProviderRequestScheme + ): + """ + Edit AI Provider + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.users.ai.providers.patch + """ + return self.requester.request( + method="patch", + path=self.get_ai_provider_path(userId=userId, aiProviderId=aiProviderId), + request_data=request_data, + ) + + def list_ai_provider_models(self, userId: int, aiProviderId: int): + """ + List AI Provider Models + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.ai.providers.models.getMany + """ + return self.requester.request( + method="get", + path=self.get_ai_provider_path(userId=userId, aiProviderId=aiProviderId) + + "/models", + ) + + def create_ai_proxy_chat_completion( + self, + userId: int, + aiProviderId: int, + request_data: Union[GoogleGeminiChatProxy, OtherChatProxy], + ): + """ + Create AI Proxy Chat Completion + + This API method serves as an intermediary, forwarding your requests directly to the selected provider. + Please refer to the documentation for the specific provider you use to determine the required payload format. + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.users.ai.providers.chat.completions.post + """ + return self.requester.request( + method="post", + path=self.get_ai_provider_path(userId=userId, aiProviderId=aiProviderId) + + "/chat/completions", + request_data=request_data, + ) + + +class EnterpriseAIResource(BaseResource): + """ + Enterprise Resource for AI. + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#tag/AI + """ + + def get_ai_path(self, aiPromptId: Optional[int] = None): + if aiPromptId is not None: + return f"ai/prompts/{aiPromptId}" + return "ai/prompts" + + def get_ai_provider_path(self, aiProviderId: Optional[int] = None): + if aiProviderId is not None: + return f"ai/providers/{aiProviderId}" + return "ai/providers" + + def list_ai_prompts( + self, + projectId: Optional[int] = None, + action: Optional[AIPromptAction] = None, + limit: Optional[int] = None, + offset: Optional[int] = None, + ): + """ + List AI Prompts + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.prompts.getMany + """ + params = {"projectId": projectId, "action": action} + params.update(self.get_page_params(limit=limit, offset=offset)) + + return self.requester.request( + method="get", path=self.get_ai_path(), params=params + ) + + def add_ai_prompt(self, request_data: AddAIPromptRequestScheme): + """ + Add AI Prompt + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.prompts.post + """ + + return self.requester.request( + method="post", + path=self.get_ai_path(), + request_data=request_data, + ) + + def get_ai_prompt(self, aiPromptId: int): + """ + Get AI Prompt + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.prompts.get + """ + + return self.requester.request( + method="get", + path=self.get_ai_path(aiPromptId=aiPromptId), + ) + + def delete_ai_prompt(self, aiPromptId: int): + """ + Delete AI Prompt + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.prompts.delete + """ + + return self.requester.request( + method="delete", + path=self.get_ai_path(aiPromptId=aiPromptId), + ) + + def edit_ai_prompt( + self, aiPromptId: int, request_data: Iterable[EditAIPromptScheme] + ): + """ + Edit AI Prompt + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.prompts.patch + """ + + return self.requester.request( + method="patch", + path=self.get_ai_path(aiPromptId=aiPromptId), + request_data=request_data, + ) + + def list_ai_providers( + self, + limit: Optional[int] = None, + offset: Optional[int] = None, + ): + """ + List AI Providers + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.providers.getMany + """ + params = self.get_page_params(limit=limit, offset=offset) + return self.requester.request( + method="get", path=self.get_ai_provider_path(), params=params + ) + + def add_ai_provider(self, request_data: AddAIProviderReqeustScheme): + """ + Add AI Provider + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.providers.post + """ + return self.requester.request( + method="post", + path=self.get_ai_provider_path(), + request_data=request_data, + ) + + def get_ai_provider(self, aiProviderId: int): + """ + Get AI Provider + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.providers.get + """ + return self.requester.request( + method="get", + path=self.get_ai_provider_path(aiProviderId=aiProviderId), + ) + + def delete_ai_provider(self, aiProviderId: int): + """ + Delete AI Provider + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.providers.delete + """ + return self.requester.request( + method="delete", + path=self.get_ai_provider_path(aiProviderId=aiProviderId), + ) + + def edit_ai_provider( + self, aiProviderId: int, request_data: EditAIProviderRequestScheme + ): + """ + Edit AI Provider + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.providers.patch + """ + return self.requester.request( + method="patch", + path=self.get_ai_provider_path(aiProviderId=aiProviderId), + request_data=request_data, + ) + + def list_ai_provider_models(self, aiProviderId: int): + """ + List AI Provider Models + + Link to documentation: + https://developer.crowdin.com/api/v2/#operation/api.ai.providers.models.getMany + """ + return self.requester.request( + method="get", + path=self.get_ai_provider_path(aiProviderId=aiProviderId) + "/models", + ) + + def create_ai_proxy_chat_completion( + self, + aiProviderId: int, + request_data: Union[GoogleGeminiChatProxy, OtherChatProxy], + ): + """ + Create AI Proxy Chat Completion + + This API method serves as an intermediary, forwarding your requests directly to the selected provider. + Please refer to the documentation for the specific provider you use to determine the required payload format. + + Link to documentation: + https://developer.crowdin.com/enterprise/api/v2/#operation/api.ai.providers.chat.completions.post + """ + return self.requester.request( + method="post", + path=self.get_ai_provider_path(aiProviderId=aiProviderId) + + "/chat/completions", + request_data=request_data, + ) diff --git a/crowdin_api/api_resources/ai/tests/test_ai_resources.py b/crowdin_api/api_resources/ai/tests/test_ai_resources.py new file mode 100644 index 0000000..f43e8c6 --- /dev/null +++ b/crowdin_api/api_resources/ai/tests/test_ai_resources.py @@ -0,0 +1,772 @@ +from unittest import mock + +import pytest +from crowdin_api.api_resources.ai.enums import AIPromptAction, AIProviderType +from crowdin_api.api_resources.ai.resource import AIResource, EnterpriseAIResource +from crowdin_api.api_resources.ai.types import AIPromptOperation, EditAIPromptPath +from crowdin_api.requester import APIRequester + + +class TestAIResources: + resource_class = AIResource + + def get_resource(self, base_absolut_url): + return self.resource_class(requester=APIRequester(base_url=base_absolut_url)) + + def test_resource_with_id(self, base_absolut_url): + project_id = 1 + resource = self.resource_class( + requester=APIRequester(base_url=base_absolut_url), project_id=project_id + ) + assert resource.get_project_id() == project_id + + @pytest.mark.parametrize( + "in_params, path", + ( + ({"userId": 1}, "users/1/ai/prompts"), + ({"userId": 1, "aiPromptId": 2}, "users/1/ai/prompts/2"), + ), + ) + def test_get_ai_path(self, in_params, path, base_absolut_url): + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_path(**in_params) == path + + @pytest.mark.parametrize( + "in_params, path", + ( + ({"userId": 1}, "users/1/ai/providers"), + ({"userId": 1, "aiProviderId": 2}, "users/1/ai/providers/2"), + ), + ) + def test_get_ai_provider_path(self, in_params, path, base_absolut_url): + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_provider_path(**in_params) == path + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + {}, + { + "projectId": None, + "action": None, + "limit": 25, + "offset": 0, + }, + ), + ( + { + "projectId": 1, + "action": AIPromptAction.ASSIST, + "limit": 20, + "offset": 2, + }, + { + "projectId": 1, + "action": AIPromptAction.ASSIST, + "limit": 20, + "offset": 2, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_prompts( + self, m_request, incoming_data, request_params, base_absolut_url + ): + m_request.return_value = "response" + + userId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_prompts(userId=userId, **incoming_data) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_path(userId=userId), + params=request_params, + ) + + @pytest.mark.parametrize( + "incoming_data, request_data", + ( + ( + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "config": {"mode": "advanced", "prompt": "test prompt"}, + }, + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "config": {"mode": "advanced", "prompt": "test prompt"}, + }, + ), + ( + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "isEnabled": False, + "enabledProjectIds": [1, 2, 3], + "config": {"mode": "advanced", "prompt": "test prompt"}, + }, + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "isEnabled": False, + "enabledProjectIds": [1, 2, 3], + "config": {"mode": "advanced", "prompt": "test prompt"}, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_add_ai_prompt( + self, m_request, incoming_data, request_data, base_absolut_url + ): + m_request.return_value = "response" + + userId = 1 + resource = self.get_resource(base_absolut_url) + assert ( + resource.add_ai_prompt(userId=userId, request_data=incoming_data) + == "response" + ) + m_request.assert_called_once_with( + method="post", + path=resource.get_ai_path(userId=userId), + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + userId = 1 + aiPromptId = 2 + resource = self.get_resource(base_absolut_url) + assert ( + resource.get_ai_prompt(userId=userId, aiPromptId=aiPromptId) == "response" + ) + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_path(userId=userId, aiPromptId=aiPromptId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_delete_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + userId = 1 + aiPromptId = 2 + resource = self.get_resource(base_absolut_url) + assert ( + resource.delete_ai_prompt(userId=userId, aiPromptId=aiPromptId) + == "response" + ) + m_request.assert_called_once_with( + method="delete", + path=resource.get_ai_path(userId=userId, aiPromptId=aiPromptId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_edit_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + userId = 1 + aiPromptId = 2 + request_data = [ + { + "op": AIPromptOperation.REPLACE, + "path": EditAIPromptPath.NAME, + "value": "test", + } + ] + resource = self.get_resource(base_absolut_url) + assert ( + resource.edit_ai_prompt( + userId=userId, aiPromptId=aiPromptId, request_data=request_data + ) + == "response" + ) + m_request.assert_called_once_with( + method="patch", + path=resource.get_ai_path(userId=userId, aiPromptId=aiPromptId), + request_data=request_data, + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + {}, + { + "limit": 25, + "offset": 0, + }, + ), + ( + { + "limit": 20, + "offset": 2, + }, + { + "limit": 20, + "offset": 2, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_providers( + self, m_request, incoming_data, request_params, base_absolut_url + ): + m_request.return_value = "response" + + userId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_providers(userId=userId, **incoming_data) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_provider_path(userId=userId), + params=request_params, + ) + + @pytest.mark.parametrize( + "incoming_data, request_data", + ( + ( + {"name": "basic", "type": AIProviderType.OPEN_AI}, + {"name": "basic", "type": AIProviderType.OPEN_AI}, + ), + ( + { + "name": "basic", + "type": AIProviderType.OPEN_AI, + "credentials": {"apiKey": "test-api-key"}, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "enabledProjectIds": [1, 2, 3], + "config": { + "actionRules": [ + { + "action": AIPromptAction.PRE_TRANSLATE, + "availableAiModelIds": ["gpt-3.5-turbo-instruct"], + } + ] + }, + "isEnabled": True, + "useSystemCredentials": False, + }, + { + "name": "basic", + "type": AIProviderType.OPEN_AI, + "credentials": {"apiKey": "test-api-key"}, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "enabledProjectIds": [1, 2, 3], + "config": { + "actionRules": [ + { + "action": AIPromptAction.PRE_TRANSLATE, + "availableAiModelIds": ["gpt-3.5-turbo-instruct"], + } + ] + }, + "isEnabled": True, + "useSystemCredentials": False, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_add_ai_provider( + self, m_request, incoming_data, request_data, base_absolut_url + ): + m_request.return_value = "response" + + userId = 1 + resource = self.get_resource(base_absolut_url) + assert ( + resource.add_ai_provider(userId=userId, request_data=incoming_data) + == "response" + ) + m_request.assert_called_once_with( + method="post", + path=resource.get_ai_provider_path(userId=userId), + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_provider(self, m_request, base_absolut_url): + m_request.return_value = "response" + + userId = 1 + aiProviderId = 2 + resource = self.get_resource(base_absolut_url) + assert ( + resource.get_ai_provider(userId=userId, aiProviderId=aiProviderId) + == "response" + ) + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_provider_path( + userId=userId, aiProviderId=aiProviderId + ), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_delete_ai_provider(self, m_request, base_absolut_url): + m_request.return_value = "response" + + userId = 1 + aiProviderId = 2 + resource = self.get_resource(base_absolut_url) + assert ( + resource.delete_ai_provider(userId=userId, aiProviderId=aiProviderId) + == "response" + ) + m_request.assert_called_once_with( + method="delete", + path=resource.get_ai_provider_path( + userId=userId, aiProviderId=aiProviderId + ), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_edit_ai_provider(self, m_request, base_absolut_url): + m_request.return_value = "response" + + userId = 1 + aiProviderId = 2 + request_data = [ + { + "op": AIPromptOperation.REPLACE, + "path": EditAIPromptPath.NAME, + "value": "test", + } + ] + resource = self.get_resource(base_absolut_url) + assert ( + resource.edit_ai_provider( + userId=userId, aiProviderId=aiProviderId, request_data=request_data + ) + == "response" + ) + m_request.assert_called_once_with( + method="patch", + path=resource.get_ai_provider_path( + userId=userId, aiProviderId=aiProviderId + ), + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_provider_models(self, m_request, base_absolut_url): + m_request.return_value = "response" + + userId = 1 + aiProviderId = 2 + resource = self.get_resource(base_absolut_url) + assert ( + resource.list_ai_provider_models(userId=userId, aiProviderId=aiProviderId) + == "response" + ) + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_provider_path(userId=userId, aiProviderId=aiProviderId) + + "/models", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_create_ai_proxy_chat_completion(self, m_request, base_absolut_url): + m_request.return_value = "response" + + userId = 1 + aiProviderId = 2 + request_data = {"model": "string", "stream": True} + resource = self.get_resource(base_absolut_url) + assert ( + resource.create_ai_proxy_chat_completion( + userId=userId, aiProviderId=aiProviderId, request_data=request_data + ) + == "response" + ) + m_request.assert_called_once_with( + method="post", + path=resource.get_ai_provider_path(userId=userId, aiProviderId=aiProviderId) + + "/chat/completions", + request_data=request_data, + ) + + +class TestEnterpriseAIResources: + resource_class = EnterpriseAIResource + + def get_resource(self, base_absolut_url): + return self.resource_class(requester=APIRequester(base_url=base_absolut_url)) + + def test_resource_with_id(self, base_absolut_url): + project_id = 1 + resource = self.resource_class( + requester=APIRequester(base_url=base_absolut_url), project_id=project_id + ) + assert resource.get_project_id() == project_id + + @pytest.mark.parametrize( + "in_params, path", + ( + ({}, "ai/prompts"), + ({"aiPromptId": 1}, "ai/prompts/1"), + ), + ) + def test_get_ai_path(self, in_params, path, base_absolut_url): + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_path(**in_params) == path + + @pytest.mark.parametrize( + "in_params, path", + ( + ({}, "ai/providers"), + ({"aiProviderId": 1}, "ai/providers/1"), + ), + ) + def test_get_ai_provider_path(self, in_params, path, base_absolut_url): + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_provider_path(**in_params) == path + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + {}, + { + "projectId": None, + "action": None, + "limit": 25, + "offset": 0, + }, + ), + ( + { + "projectId": 1, + "action": AIPromptAction.ASSIST, + "limit": 20, + "offset": 2, + }, + { + "projectId": 1, + "action": AIPromptAction.ASSIST, + "limit": 20, + "offset": 2, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_prompts( + self, m_request, incoming_data, request_params, base_absolut_url + ): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_prompts(**incoming_data) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_path(), + params=request_params, + ) + + @pytest.mark.parametrize( + "incoming_data, request_data", + ( + ( + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "config": {"mode": "advanced", "prompt": "test prompt"}, + }, + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "config": {"mode": "advanced", "prompt": "test prompt"}, + }, + ), + ( + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "isEnabled": False, + "enabledProjectIds": [1, 2, 3], + "config": { + "mode": "advanced", + "prompt": "test prompt", + "screenshot": True, + }, + }, + { + "name": "basic", + "action": AIPromptAction.ASSIST, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "isEnabled": False, + "enabledProjectIds": [1, 2, 3], + "config": { + "mode": "advanced", + "prompt": "test prompt", + "screenshot": True, + }, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_add_ai_prompt( + self, m_request, incoming_data, request_data, base_absolut_url + ): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.add_ai_prompt(request_data=incoming_data) == "response" + m_request.assert_called_once_with( + method="post", + path=resource.get_ai_path(), + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiPromptId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_prompt(aiPromptId=aiPromptId) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_path(aiPromptId=aiPromptId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_delete_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiPromptId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.delete_ai_prompt(aiPromptId=aiPromptId) == "response" + m_request.assert_called_once_with( + method="delete", + path=resource.get_ai_path(aiPromptId=aiPromptId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_edit_ai_prompt(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiPromptId = 1 + request_data = [ + { + "op": AIPromptOperation.REPLACE, + "path": EditAIPromptPath.NAME, + "value": "test", + } + ] + resource = self.get_resource(base_absolut_url) + assert ( + resource.edit_ai_prompt(aiPromptId=aiPromptId, request_data=request_data) + == "response" + ) + m_request.assert_called_once_with( + method="patch", + path=resource.get_ai_path(aiPromptId=aiPromptId), + request_data=request_data, + ) + + @pytest.mark.parametrize( + "incoming_data, request_params", + ( + ( + {}, + { + "limit": 25, + "offset": 0, + }, + ), + ( + { + "limit": 20, + "offset": 2, + }, + { + "limit": 20, + "offset": 2, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_providers( + self, m_request, incoming_data, request_params, base_absolut_url + ): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_providers(**incoming_data) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_provider_path(), + params=request_params, + ) + + @pytest.mark.parametrize( + "incoming_data, request_data", + ( + ( + {"name": "basic", "type": AIProviderType.OPEN_AI}, + {"name": "basic", "type": AIProviderType.OPEN_AI}, + ), + ( + { + "name": "basic", + "type": AIProviderType.OPEN_AI, + "credentials": {"apiKey": "test-api-key"}, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "enabledProjectIds": [1, 2, 3], + "config": { + "actionRules": [ + { + "action": AIPromptAction.PRE_TRANSLATE, + "availableAiModelIds": ["gpt-3.5-turbo-instruct"], + } + ] + }, + "isEnabled": True, + "useSystemCredentials": False, + }, + { + "name": "basic", + "type": AIProviderType.OPEN_AI, + "credentials": {"apiKey": "test-api-key"}, + "aiProviderId": 1, + "aiModelId": "gpt-3.5-turbo-instruct", + "enabledProjectIds": [1, 2, 3], + "config": { + "actionRules": [ + { + "action": AIPromptAction.PRE_TRANSLATE, + "availableAiModelIds": ["gpt-3.5-turbo-instruct"], + } + ] + }, + "isEnabled": True, + "useSystemCredentials": False, + }, + ), + ), + ) + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_add_ai_provider( + self, m_request, incoming_data, request_data, base_absolut_url + ): + m_request.return_value = "response" + + resource = self.get_resource(base_absolut_url) + assert resource.add_ai_provider(request_data=incoming_data) == "response" + m_request.assert_called_once_with( + method="post", + path=resource.get_ai_provider_path(), + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_get_ai_provider(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.get_ai_provider(aiProviderId=aiProviderId) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_delete_ai_provider(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.delete_ai_provider(aiProviderId=aiProviderId) == "response" + m_request.assert_called_once_with( + method="delete", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId), + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_edit_ai_provider(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + request_data = [ + { + "op": AIPromptOperation.REPLACE, + "path": EditAIPromptPath.NAME, + "value": "test", + } + ] + resource = self.get_resource(base_absolut_url) + assert ( + resource.edit_ai_provider( + aiProviderId=aiProviderId, request_data=request_data + ) + == "response" + ) + m_request.assert_called_once_with( + method="patch", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId), + request_data=request_data, + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_list_ai_provider_models(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + resource = self.get_resource(base_absolut_url) + assert resource.list_ai_provider_models(aiProviderId=aiProviderId) == "response" + m_request.assert_called_once_with( + method="get", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId) + "/models", + ) + + @mock.patch("crowdin_api.requester.APIRequester.request") + def test_create_ai_proxy_chat_completion(self, m_request, base_absolut_url): + m_request.return_value = "response" + + aiProviderId = 1 + request_data = {"model": "string", "stream": True} + resource = self.get_resource(base_absolut_url) + assert ( + resource.create_ai_proxy_chat_completion( + aiProviderId=aiProviderId, request_data=request_data + ) + == "response" + ) + m_request.assert_called_once_with( + method="post", + path=resource.get_ai_provider_path(aiProviderId=aiProviderId) + + "/chat/completions", + request_data=request_data, + ) diff --git a/crowdin_api/api_resources/ai/types.py b/crowdin_api/api_resources/ai/types.py new file mode 100644 index 0000000..5bcd532 --- /dev/null +++ b/crowdin_api/api_resources/ai/types.py @@ -0,0 +1,150 @@ +from typing import Any, Dict, Iterable, Optional, Union + +from crowdin_api.api_resources.ai.enums import ( + AIPromptAction, + AIPromptOperation, + AIProviderType, + EditAIPromptPath, + EditAIProviderPath, +) +from crowdin_api.typing import TypedDict + + +class OtherLanguageTranslation(TypedDict): + isEnabled: Optional[bool] + languageIds: Optional[Iterable[int]] + + +class BasicModePreTranslateActionCondfig(TypedDict): + mode: str + companyDescription: Optional[bool] + projectDescription: Optional[bool] + audienceDescription: Optional[bool] + otherLanguageTranslation: Optional[OtherLanguageTranslation] + glossaryTerms: Optional[bool] + tmSuggestions: Optional[bool] + fileContent: Optional[bool] + fileContext: Optional[bool] + screenshots: Optional[bool] + publicProjectDescription: Optional[bool] + + +class BasicModeAssistActionCondfig(TypedDict): + mode: str + companyDescription: Optional[bool] + projectDescription: Optional[bool] + audienceDescription: Optional[bool] + otherLanguageTranslation: Optional[OtherLanguageTranslation] + glossaryTerms: Optional[bool] + tmSuggestions: Optional[bool] + fileContext: Optional[bool] + screenshots: Optional[bool] + publicProjectDescription: Optional[bool] + siblingsStrings: Optional[bool] + filteredStrings: Optional[bool] + + +class AdvancedModeConfig(TypedDict): + mode: str + screenshots: Optional[bool] + prompt: str + + +class ExternalMode(TypedDict): + name: str + identifier: str + key: str + options: Dict + + +class AddAIPromptRequestScheme(TypedDict): + name: str + action: AIPromptAction + aiProviderId: int + aiModelId: str + isEnabled: Optional[bool] + enabledProjectIds: Optional[Iterable[int]] + config: Union[ + BasicModePreTranslateActionCondfig, + BasicModeAssistActionCondfig, + AdvancedModeConfig, + ExternalMode, + ] + + +class EditAIPromptScheme(TypedDict): + op: AIPromptOperation + path: EditAIPromptPath + value: Any + + +class OpenAICredential(TypedDict): + apiKey: str + + +class AzureOpenAICredential(TypedDict): + resourceName: str + apiKey: str + deploymentName: str + apiVersion: str + + +class GoogleGeminiCredential(TypedDict): + project: str + region: str + serviceAccountKey: Dict + + +class MistralAICredential(TypedDict): + apiKey: str + + +class AnthropicCredential(TypedDict): + apiKey: str + + +class CustomAICredential(TypedDict): + identifier: str + key: str + + +class ActionRule(TypedDict): + action: AIPromptAction + availableAiModelIds: Iterable[int] + + +class ActionRules(TypedDict): + actionRules: Iterable[ActionRule] + + +class AddAIProviderReqeustScheme(TypedDict): + name: str + type: AIProviderType + credentials: Optional[ + Union[ + OpenAICredential, + AzureOpenAICredential, + GoogleGeminiCredential, + MistralAICredential, + AnthropicCredential, + CustomAICredential, + ] + ] + config: Optional[ActionRules] + isEnabled: Optional[bool] + useSystemCredentials: Optional[bool] + + +class EditAIProviderRequestScheme(TypedDict): + op: AIPromptOperation + path: EditAIProviderPath + value: Union[str, Dict, bool] + + +class GoogleGeminiChatProxy(TypedDict): + model: str + stream: Optional[bool] + + +class OtherChatProxy(TypedDict): + stream: Optional[bool] diff --git a/crowdin_api/client.py b/crowdin_api/client.py index b58be06..c121322 100644 --- a/crowdin_api/client.py +++ b/crowdin_api/client.py @@ -94,6 +94,21 @@ def get_api_requestor(self) -> APIRequester: return self._api_requestor + @property + def ai(self) -> Union[api_resources.AIResource, api_resources.EnterpriseAIResource]: + if self._is_enterprise_platform: + ai_class = api_resources.EnterpriseAIResource + else: + ai_class = api_resources.AIResource + + if self.PROJECT_ID: + return ai_class( + requester=self.get_api_requestor(), + project_id=self.PROJECT_ID, + page_size=self.PAGE_SIZE, + ) + return ai_class(requester=self.get_api_requestor(), page_size=self.PAGE_SIZE) + @property def applications(self) -> api_resources.ApplicationResource: return api_resources.ApplicationResource( diff --git a/crowdin_api/tests/test_client.py b/crowdin_api/tests/test_client.py index 47aafa4..836291d 100644 --- a/crowdin_api/tests/test_client.py +++ b/crowdin_api/tests/test_client.py @@ -160,6 +160,7 @@ def test_api_requestor_custom_values(self, m_APIRequester): @pytest.mark.parametrize( "property_name, class_name", ( + ("ai", "AIResource"), ("bundles", "BundlesResource"), ("dictionaries", "DictionariesResource"), ("distributions", "DistributionsResource"), @@ -214,6 +215,7 @@ class TestCrowdinClientEnterprise: @pytest.mark.parametrize( "property_name, class_name", ( + ("ai", "EnterpriseAIResource"), ("bundles", "BundlesResource"), ("dictionaries", "DictionariesResource"), ("distributions", "DistributionsResource"),