diff --git a/.flake8 b/.flake8 index 5d383e28..c016a01d 100644 --- a/.flake8 +++ b/.flake8 @@ -4,10 +4,12 @@ filename = ./scripts/*.py, ./src/*.py, ./tests/*.py +# Todo: remove "src/apify_client/consts.py: F401" once consts are removed from the package per-file-ignores = docs/*: D scripts/*: D tests/*: D + src/apify_client/consts.py: F401 # Google docstring convention + D204 & D401 docstring-convention = all diff --git a/.isort.cfg b/.isort.cfg index 5883453e..f7c00600 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -4,4 +4,4 @@ line_length = 150 use_parentheses = True multi_line_output = 3 sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER -known_first_party = apify_client +known_first_party = apify_client, apify_shared diff --git a/docs/docs.md b/docs/docs.md index 5fbf88e8..1e04f712 100644 --- a/docs/docs.md +++ b/docs/docs.md @@ -6155,258 +6155,3 @@ List all webhook dispatches of a user. * **Return type** [`ListPage`](#listpage) - -*** - -### [](#listpage) ListPage - -A single page of items returned from a list() method. - -#### Instance attributes - -Name | Type | Description ----- | ---- | ----------- -`items` | `list` | List of returned objects on this page -`offset` | `int` | The limit on the number of returned objects offset specified in the API call -`limit` | `int` | The offset of the first object specified in the API call -`count` | `int` | Count of the returned objects on this page -`total` | `int` | Total number of objects matching the API call criteria -`desc` | `bool` | Whether the listing is descending or not - -*** - -### [](#actorjobstatus) ActorJobStatus - -Available statuses for actor jobs (runs or builds). - -* [READY](#actorjobstatus-ready) -* [RUNNING](#actorjobstatus-running) -* [SUCCEEDED](#actorjobstatus-succeeded) -* [FAILED](#actorjobstatus-failed) -* [TIMING\_OUT](#actorjobstatus-timing\_out) -* [TIMED\_OUT](#actorjobstatus-timed\_out) -* [ABORTING](#actorjobstatus-aborting) -* [ABORTED](#actorjobstatus-aborted) - -*** - -#### [](#actorjobstatus-ready) `ActorJobStatus.READY` - -Actor job initialized but not started yet - -*** - -#### [](#actorjobstatus-running) `ActorJobStatus.RUNNING` - -Actor job in progress - -*** - -#### [](#actorjobstatus-succeeded) `ActorJobStatus.SUCCEEDED` - -Actor job finished successfully - -*** - -#### [](#actorjobstatus-failed) `ActorJobStatus.FAILED` - -Actor job or build failed - -*** - -#### [](#actorjobstatus-timing_out) `ActorJobStatus.TIMING_OUT` - -Actor job currently timing out - -*** - -#### [](#actorjobstatus-timed_out) `ActorJobStatus.TIMED_OUT` - -Actor job timed out - -*** - -#### [](#actorjobstatus-aborting) `ActorJobStatus.ABORTING` - -Actor job currently being aborted by user - -*** - -#### [](#actorjobstatus-aborted) `ActorJobStatus.ABORTED` - -Actor job aborted by user - -*** - -### [](#actorsourcetype) ActorSourceType - -Available source types for actors. - -* [SOURCE\_FILES](#actorsourcetype-source\_files) -* [GIT\_REPO](#actorsourcetype-git\_repo) -* [TARBALL](#actorsourcetype-tarball) -* [GITHUB\_GIST](#actorsourcetype-github\_gist) - -*** - -#### [](#actorsourcetype-source_files) `ActorSourceType.SOURCE_FILES` - -Actor source code is comprised of multiple files - -*** - -#### [](#actorsourcetype-git_repo) `ActorSourceType.GIT_REPO` - -Actor source code is cloned from a Git repository - -*** - -#### [](#actorsourcetype-tarball) `ActorSourceType.TARBALL` - -Actor source code is downloaded using a tarball or Zip file - -*** - -#### [](#actorsourcetype-github_gist) `ActorSourceType.GITHUB_GIST` - -Actor source code is taken from a GitHub Gist - -*** - -### [](#webhookeventtype) WebhookEventType - -Events that can trigger a webhook. - -* [ACTOR\_RUN\_CREATED](#webhookeventtype-actor\_run\_created) -* [ACTOR\_RUN\_SUCCEEDED](#webhookeventtype-actor\_run\_succeeded) -* [ACTOR\_RUN\_FAILED](#webhookeventtype-actor\_run\_failed) -* [ACTOR\_RUN\_TIMED\_OUT](#webhookeventtype-actor\_run\_timed\_out) -* [ACTOR\_RUN\_ABORTED](#webhookeventtype-actor\_run\_aborted) -* [ACTOR\_RUN\_RESURRECTED](#webhookeventtype-actor\_run\_resurrected) -* [ACTOR\_BUILD\_CREATED](#webhookeventtype-actor\_build\_created) -* [ACTOR\_BUILD\_SUCCEEDED](#webhookeventtype-actor\_build\_succeeded) -* [ACTOR\_BUILD\_FAILED](#webhookeventtype-actor\_build\_failed) -* [ACTOR\_BUILD\_TIMED\_OUT](#webhookeventtype-actor\_build\_timed\_out) -* [ACTOR\_BUILD\_ABORTED](#webhookeventtype-actor\_build\_aborted) - -*** - -#### [](#webhookeventtype-actor_run_created) `WebhookEventType.ACTOR_RUN_CREATED` - -The actor run was created - -*** - -#### [](#webhookeventtype-actor_run_succeeded) `WebhookEventType.ACTOR_RUN_SUCCEEDED` - -The actor run has succeeded - -*** - -#### [](#webhookeventtype-actor_run_failed) `WebhookEventType.ACTOR_RUN_FAILED` - -The actor run has failed - -*** - -#### [](#webhookeventtype-actor_run_timed_out) `WebhookEventType.ACTOR_RUN_TIMED_OUT` - -The actor run has timed out - -*** - -#### [](#webhookeventtype-actor_run_aborted) `WebhookEventType.ACTOR_RUN_ABORTED` - -The actor run was aborted - -*** - -#### [](#webhookeventtype-actor_run_resurrected) `WebhookEventType.ACTOR_RUN_RESURRECTED` - -The actor run was resurrected - -*** - -#### [](#webhookeventtype-actor_build_created) `WebhookEventType.ACTOR_BUILD_CREATED` - -The actor build was created - -*** - -#### [](#webhookeventtype-actor_build_succeeded) `WebhookEventType.ACTOR_BUILD_SUCCEEDED` - -The actor build has succeeded - -*** - -#### [](#webhookeventtype-actor_build_failed) `WebhookEventType.ACTOR_BUILD_FAILED` - -The actor build has failed - -*** - -#### [](#webhookeventtype-actor_build_timed_out) `WebhookEventType.ACTOR_BUILD_TIMED_OUT` - -The actor build has timed out - -*** - -#### [](#webhookeventtype-actor_build_aborted) `WebhookEventType.ACTOR_BUILD_ABORTED` - -The actor build was aborted - -*** - -### [](#metaorigin) MetaOrigin - -Possible origins for actor runs, i.e. how were the jobs started. - -* [DEVELOPMENT](#metaorigin-development) -* [WEB](#metaorigin-web) -* [API](#metaorigin-api) -* [SCHEDULER](#metaorigin-scheduler) -* [TEST](#metaorigin-test) -* [WEBHOOK](#metaorigin-webhook) -* [ACTOR](#metaorigin-actor) - -*** - -#### [](#metaorigin-development) `MetaOrigin.DEVELOPMENT` - -Job started from Developer console in Source section of actor - -*** - -#### [](#metaorigin-web) `MetaOrigin.WEB` - -Job started from other place on the website (either console or task detail page) - -*** - -#### [](#metaorigin-api) `MetaOrigin.API` - -Job started through API - -*** - -#### [](#metaorigin-scheduler) `MetaOrigin.SCHEDULER` - -Job started through Scheduler - -*** - -#### [](#metaorigin-test) `MetaOrigin.TEST` - -Job started through test actor page - -*** - -#### [](#metaorigin-webhook) `MetaOrigin.WEBHOOK` - -Job started by the webhook - -*** - -#### [](#metaorigin-actor) `MetaOrigin.ACTOR` - -Job started by another actor run diff --git a/docs/res/sphinx-config/index.rst b/docs/res/sphinx-config/index.rst index d0577405..e03c6c76 100644 --- a/docs/res/sphinx-config/index.rst +++ b/docs/res/sphinx-config/index.rst @@ -7,7 +7,3 @@ :members: .. automodule:: apify_client.clients.resource_clients :members: -.. autoclass:: apify_client._utils.ListPage - :members: -.. automodule:: apify_client.consts - :members: diff --git a/pyproject.toml b/pyproject.toml index a36a1b86..7bfac550 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ description = "Apify API client for Python" readme = "README.md" license = {text = "Apache Software License"} authors = [ - {name = "Apify Technologies s.r.o.", email = "support@apify.com" } + {name = "Apify Technologies s.r.o.", email = "support@apify.com"}, ] keywords = ["apify", "api", "client", "scraping", "automation"] @@ -23,7 +23,8 @@ classifiers = [ requires-python = ">=3.8" dependencies = [ - "httpx >= 0.24.1" + "apify-shared ~= 1.0.0", + "httpx >= 0.24.1", ] [project.optional-dependencies] diff --git a/src/apify_client/_errors.py b/src/apify_client/_errors.py index b31fadb2..6def61b1 100644 --- a/src/apify_client/_errors.py +++ b/src/apify_client/_errors.py @@ -2,7 +2,7 @@ import httpx -from ._utils import ignore_docs +from apify_shared.utils import ignore_docs class ApifyClientError(Exception): diff --git a/src/apify_client/_http_client.py b/src/apify_client/_http_client.py index 4274ceb2..e83709d9 100644 --- a/src/apify_client/_http_client.py +++ b/src/apify_client/_http_client.py @@ -9,17 +9,12 @@ import httpx +from apify_shared.types import JSONSerializable +from apify_shared.utils import ignore_docs, is_content_type_json, is_content_type_text, is_content_type_xml + from ._errors import ApifyApiError, InvalidResponseBodyError, _is_retryable_error from ._logging import logger_name -from ._types import JSONSerializable -from ._utils import ( - _is_content_type_json, - _is_content_type_text, - _is_content_type_xml, - _retry_with_exp_backoff, - _retry_with_exp_backoff_async, - ignore_docs, -) +from ._utils import _retry_with_exp_backoff, _retry_with_exp_backoff_async DEFAULT_BACKOFF_EXPONENTIAL_FACTOR = 2 DEFAULT_BACKOFF_RANDOM_FACTOR = 1 @@ -70,9 +65,9 @@ def _maybe_parse_response(response: httpx.Response) -> Any: content_type = response.headers['content-type'].split(';')[0].strip() try: - if _is_content_type_json(content_type): + if is_content_type_json(content_type): return response.json() - elif _is_content_type_xml(content_type) or _is_content_type_text(content_type): + elif is_content_type_xml(content_type) or is_content_type_text(content_type): return response.text else: return response.content diff --git a/src/apify_client/_types.py b/src/apify_client/_types.py deleted file mode 100644 index 66a6a0b9..00000000 --- a/src/apify_client/_types.py +++ /dev/null @@ -1,6 +0,0 @@ -from typing import Any, Dict, List, Union - -# Type for representing json-serializable values -# It's close enough to the real thing supported by json.parse, and the best we can do until mypy supports recursive types -# It was suggested in a discussion with (and approved by) Guido van Rossum, so I'd consider it correct enough -JSONSerializable = Union[str, int, float, bool, None, Dict[str, Any], List[Any]] diff --git a/src/apify_client/_utils.py b/src/apify_client/_utils.py index 328715e0..2d384571 100644 --- a/src/apify_client/_utils.py +++ b/src/apify_client/_utils.py @@ -1,14 +1,12 @@ import asyncio import base64 -import io import json import random -import re import time -from datetime import datetime, timezone -from enum import Enum from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, Generic, List, Optional, Tuple, TypeVar, cast +from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, List, Optional, Tuple, TypeVar, cast + +from apify_shared.utils import is_file_or_bytes, maybe_extract_enum_member_value if TYPE_CHECKING: from ._errors import ApifyApiError @@ -18,6 +16,9 @@ RECORD_NOT_FOUND_EXCEPTION_TYPES = ['record-not-found', 'record-or-token-not-found'] +T = TypeVar('T') +StopRetryingType = Callable[[], None] + def _to_safe_id(id: str) -> str: # Identificators of resources in the API are either in the format `resource_id` or `username/resource_id`. @@ -26,35 +27,6 @@ def _to_safe_id(id: str) -> str: return id.replace('/', '~') -ListOrDict = TypeVar('ListOrDict', List, Dict) - - -def _parse_date_fields(data: ListOrDict, max_depth: int = PARSE_DATE_FIELDS_MAX_DEPTH) -> ListOrDict: - if max_depth < 0: - return data - - if isinstance(data, list): - return [_parse_date_fields(item, max_depth - 1) for item in data] - - if isinstance(data, dict): - def parse(key: str, value: object) -> object: - parsed_value = value - if key.endswith(PARSE_DATE_FIELDS_KEY_SUFFIX) and isinstance(value, str): - try: - parsed_value = datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=timezone.utc) - except ValueError: - pass - elif isinstance(value, dict): - parsed_value = _parse_date_fields(value, max_depth - 1) - elif isinstance(value, list): - parsed_value = _parse_date_fields(value, max_depth) - return parsed_value - - return {key: parse(key, value) for (key, value) in data.items()} - - return data - - def _pluck_data(parsed_response: Any) -> Dict: if isinstance(parsed_response, dict) and 'data' in parsed_response: return cast(Dict, parsed_response['data']) @@ -69,29 +41,6 @@ def _pluck_data_as_list(parsed_response: Any) -> List: raise ValueError('The "data" property is missing in the response.') -def _is_content_type_json(content_type: str) -> bool: - return bool(re.search(r'^application/json', content_type, flags=re.IGNORECASE)) - - -def _is_content_type_xml(content_type: str) -> bool: - return bool(re.search(r'^application/.*xml$', content_type, flags=re.IGNORECASE)) - - -def _is_content_type_text(content_type: str) -> bool: - return bool(re.search(r'^text/', content_type, flags=re.IGNORECASE)) - - -def _is_file_or_bytes(value: Any) -> bool: - # The check for IOBase is not ideal, it would be better to use duck typing, - # but then the check would be super complex, judging from how the 'requests' library does it. - # This way should be good enough for the vast majority of use cases, if it causes issues, we can improve it later. - return isinstance(value, (bytes, bytearray, io.IOBase)) - - -T = TypeVar('T') -StopRetryingType = Callable[[], None] - - def _retry_with_exp_backoff( func: Callable[[StopRetryingType, int], T], *, @@ -174,7 +123,7 @@ def _encode_webhook_list_to_base64(webhooks: List[Dict]) -> str: data = [] for webhook in webhooks: webhook_representation = { - 'eventTypes': [_maybe_extract_enum_member_value(event_type) for event_type in webhook['event_types']], + 'eventTypes': [maybe_extract_enum_member_value(event_type) for event_type in webhook['event_types']], 'requestUrl': webhook['request_url'], } if 'payload_template' in webhook: @@ -184,72 +133,16 @@ def _encode_webhook_list_to_base64(webhooks: List[Dict]) -> str: return base64.b64encode(json.dumps(data).encode('utf-8')).decode('ascii') -def _filter_out_none_values_recursively(dictionary: Dict) -> Dict: - """Return copy of the dictionary, recursively omitting all keys for which values are None.""" - return cast(dict, _filter_out_none_values_recursively_internal(dictionary)) - - -# Unfortunately, it's necessary to have an internal function for the correct result typing, without having to create complicated overloads -def _filter_out_none_values_recursively_internal(dictionary: Dict, remove_empty_dicts: Optional[bool] = None) -> Optional[Dict]: - result = {} - for k, v in dictionary.items(): - if isinstance(v, dict): - v = _filter_out_none_values_recursively_internal(v, remove_empty_dicts is True or remove_empty_dicts is None) - if v is not None: - result[k] = v - if not result and remove_empty_dicts: - return None - return result - - def _encode_key_value_store_record_value(value: Any, content_type: Optional[str] = None) -> Tuple[Any, str]: if not content_type: - if _is_file_or_bytes(value): + if is_file_or_bytes(value): content_type = 'application/octet-stream' elif isinstance(value, str): content_type = 'text/plain; charset=utf-8' else: content_type = 'application/json; charset=utf-8' - if 'application/json' in content_type and not _is_file_or_bytes(value) and not isinstance(value, str): + if 'application/json' in content_type and not is_file_or_bytes(value) and not isinstance(value, str): value = json.dumps(value, ensure_ascii=False, indent=2, allow_nan=False, default=str).encode('utf-8') return (value, content_type) - - -def _maybe_extract_enum_member_value(maybe_enum_member: Any) -> Any: - if isinstance(maybe_enum_member, Enum): - return maybe_enum_member.value - return maybe_enum_member - - -def ignore_docs(method: T) -> T: - """Mark that a method's documentation should not be rendered. Functionally, this decorator is a noop.""" - return method - - -class ListPage(Generic[T]): - """A single page of items returned from a list() method.""" - - #: list: List of returned objects on this page - items: List[T] - #: int: Count of the returned objects on this page - count: int - #: int: The limit on the number of returned objects offset specified in the API call - offset: int - #: int: The offset of the first object specified in the API call - limit: int - #: int: Total number of objects matching the API call criteria - total: int - #: bool: Whether the listing is descending or not - desc: bool - - @ignore_docs - def __init__(self, data: Dict) -> None: - """Initialize a ListPage instance from the API response data.""" - self.items = data['items'] if 'items' in data else [] - self.offset = data['offset'] if 'offset' in data else 0 - self.limit = data['limit'] if 'limit' in data else 0 - self.count = data['count'] if 'count' in data else len(self.items) - self.total = data['total'] if 'total' in data else self.offset + self.count - self.desc = data['desc'] if 'desc' in data else False diff --git a/src/apify_client/client.py b/src/apify_client/client.py index be978669..f3f83b26 100644 --- a/src/apify_client/client.py +++ b/src/apify_client/client.py @@ -1,7 +1,8 @@ from typing import Dict, Optional, Union +from apify_shared.utils import ignore_docs + from ._http_client import _HTTPClient, _HTTPClientAsync -from ._utils import ignore_docs from .clients import ( ActorClient, ActorClientAsync, diff --git a/src/apify_client/clients/base/actor_job_base_client.py b/src/apify_client/clients/base/actor_job_base_client.py index 454d0d4b..0e7e62e7 100644 --- a/src/apify_client/clients/base/actor_job_base_client.py +++ b/src/apify_client/clients/base/actor_job_base_client.py @@ -4,9 +4,11 @@ from datetime import datetime, timezone from typing import Dict, Optional +from apify_shared.consts import ActorJobStatus +from apify_shared.utils import ignore_docs, parse_date_fields + from ..._errors import ApifyApiError -from ..._utils import _catch_not_found_or_throw, _parse_date_fields, _pluck_data, ignore_docs -from ...consts import ActorJobStatus +from ..._utils import _catch_not_found_or_throw, _pluck_data from .resource_client import ResourceClient, ResourceClientAsync DEFAULT_WAIT_FOR_FINISH_SEC = 999999 @@ -36,7 +38,7 @@ def _wait_for_finish(self, wait_secs: Optional[int] = None) -> Optional[Dict]: method='GET', params=self._params(waitForFinish=wait_for_finish), ) - job = _parse_date_fields(_pluck_data(response.json())) + job = parse_date_fields(_pluck_data(response.json())) seconds_elapsed = math.floor(((datetime.now(timezone.utc) - started_at).total_seconds())) if ( @@ -69,7 +71,7 @@ def _abort(self, gracefully: Optional[bool] = None) -> Dict: gracefully=gracefully, ), ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) @ignore_docs @@ -93,7 +95,7 @@ async def _wait_for_finish(self, wait_secs: Optional[int] = None) -> Optional[Di method='GET', params=self._params(waitForFinish=wait_for_finish), ) - job = _parse_date_fields(_pluck_data(response.json())) + job = parse_date_fields(_pluck_data(response.json())) seconds_elapsed = math.floor(((datetime.now(timezone.utc) - started_at).total_seconds())) if ( @@ -126,4 +128,4 @@ async def _abort(self, gracefully: Optional[bool] = None) -> Dict: gracefully=gracefully, ), ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) diff --git a/src/apify_client/clients/base/base_client.py b/src/apify_client/clients/base/base_client.py index f3e33c09..c2b928a8 100644 --- a/src/apify_client/clients/base/base_client.py +++ b/src/apify_client/clients/base/base_client.py @@ -2,9 +2,11 @@ from typing import TYPE_CHECKING, Any, Dict, Optional, Union +from apify_shared.utils import ignore_docs + from ..._http_client import _HTTPClient, _HTTPClientAsync from ..._logging import _WithLogDetailsClient -from ..._utils import _to_safe_id, ignore_docs +from ..._utils import _to_safe_id # Conditional import only executed when type checking, otherwise we'd get circular dependency issues if TYPE_CHECKING: diff --git a/src/apify_client/clients/base/resource_client.py b/src/apify_client/clients/base/resource_client.py index 1f8d8036..5004a940 100644 --- a/src/apify_client/clients/base/resource_client.py +++ b/src/apify_client/clients/base/resource_client.py @@ -1,7 +1,9 @@ from typing import Dict, Optional +from apify_shared.utils import ignore_docs, parse_date_fields + from ..._errors import ApifyApiError -from ..._utils import _catch_not_found_or_throw, _parse_date_fields, _pluck_data, ignore_docs +from ..._utils import _catch_not_found_or_throw, _pluck_data from .base_client import BaseClient, BaseClientAsync @@ -17,7 +19,7 @@ def _get(self) -> Optional[Dict]: params=self._params(), ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) except ApifyApiError as exc: _catch_not_found_or_throw(exc) @@ -32,7 +34,7 @@ def _update(self, updated_fields: Dict) -> Dict: json=updated_fields, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def _delete(self) -> None: try: @@ -58,7 +60,7 @@ async def _get(self) -> Optional[Dict]: params=self._params(), ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) except ApifyApiError as exc: _catch_not_found_or_throw(exc) @@ -73,7 +75,7 @@ async def _update(self, updated_fields: Dict) -> Dict: json=updated_fields, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def _delete(self) -> None: try: diff --git a/src/apify_client/clients/base/resource_collection_client.py b/src/apify_client/clients/base/resource_collection_client.py index effb848d..408bbf92 100644 --- a/src/apify_client/clients/base/resource_collection_client.py +++ b/src/apify_client/clients/base/resource_collection_client.py @@ -1,6 +1,9 @@ from typing import Any, Dict, Optional -from ..._utils import ListPage, _parse_date_fields, _pluck_data, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import ignore_docs, parse_date_fields + +from ..._utils import _pluck_data from .base_client import BaseClient, BaseClientAsync @@ -15,7 +18,7 @@ def _list(self, **kwargs: Any) -> ListPage: params=self._params(**kwargs), ) - return ListPage(_parse_date_fields(_pluck_data(response.json()))) + return ListPage(parse_date_fields(_pluck_data(response.json()))) def _create(self, resource: Dict) -> Dict: response = self.http_client.call( @@ -25,7 +28,7 @@ def _create(self, resource: Dict) -> Dict: json=resource, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def _get_or_create(self, name: Optional[str] = None, resource: Optional[Dict] = None) -> Dict: response = self.http_client.call( @@ -35,7 +38,7 @@ def _get_or_create(self, name: Optional[str] = None, resource: Optional[Dict] = json=resource, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) @ignore_docs @@ -49,7 +52,7 @@ async def _list(self, **kwargs: Any) -> ListPage: params=self._params(**kwargs), ) - return ListPage(_parse_date_fields(_pluck_data(response.json()))) + return ListPage(parse_date_fields(_pluck_data(response.json()))) async def _create(self, resource: Dict) -> Dict: response = await self.http_client.call( @@ -59,7 +62,7 @@ async def _create(self, resource: Dict) -> Dict: json=resource, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def _get_or_create(self, name: Optional[str] = None, resource: Optional[Dict] = None) -> Dict: response = await self.http_client.call( @@ -69,4 +72,4 @@ async def _get_or_create(self, name: Optional[str] = None, resource: Optional[Di json=resource, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) diff --git a/src/apify_client/clients/resource_clients/actor.py b/src/apify_client/clients/resource_clients/actor.py index 6bbd2f1b..59961b5f 100644 --- a/src/apify_client/clients/resource_clients/actor.py +++ b/src/apify_client/clients/resource_clients/actor.py @@ -1,15 +1,9 @@ from typing import Any, Dict, List, Optional -from ..._utils import ( - _encode_key_value_store_record_value, - _encode_webhook_list_to_base64, - _filter_out_none_values_recursively, - _maybe_extract_enum_member_value, - _parse_date_fields, - _pluck_data, - ignore_docs, -) -from ...consts import ActorJobStatus, MetaOrigin +from apify_shared.consts import ActorJobStatus, MetaOrigin +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, maybe_extract_enum_member_value, parse_date_fields + +from ..._utils import _encode_key_value_store_record_value, _encode_webhook_list_to_base64, _pluck_data from ..base import ResourceClient, ResourceClientAsync from .actor_version import ActorVersionClient, ActorVersionClientAsync from .actor_version_collection import ActorVersionCollectionClient, ActorVersionCollectionClientAsync @@ -145,7 +139,7 @@ def update( example_run_input_content_type=example_run_input_content_type, ) - return self._update(_filter_out_none_values_recursively(actor_representation)) + return self._update(filter_out_none_values_recursively(actor_representation)) def delete(self) -> None: """Delete the actor. @@ -210,7 +204,7 @@ def start( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def call( self, @@ -299,7 +293,7 @@ def build( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def builds(self) -> BuildCollectionClient: """Retrieve a client for the builds of this actor.""" @@ -325,8 +319,8 @@ def last_run(self, *, status: Optional[ActorJobStatus] = None, origin: Optional[ resource_id='last', resource_path='runs', params=self._params( - status=_maybe_extract_enum_member_value(status), - origin=_maybe_extract_enum_member_value(origin), + status=maybe_extract_enum_member_value(status), + origin=maybe_extract_enum_member_value(origin), ), )) @@ -433,7 +427,7 @@ async def update( example_run_input_content_type=example_run_input_content_type, ) - return await self._update(_filter_out_none_values_recursively(actor_representation)) + return await self._update(filter_out_none_values_recursively(actor_representation)) async def delete(self) -> None: """Delete the actor. @@ -498,7 +492,7 @@ async def start( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def call( self, @@ -587,7 +581,7 @@ async def build( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def builds(self) -> BuildCollectionClientAsync: """Retrieve a client for the builds of this actor.""" @@ -613,8 +607,8 @@ def last_run(self, *, status: Optional[ActorJobStatus] = None, origin: Optional[ resource_id='last', resource_path='runs', params=self._params( - status=_maybe_extract_enum_member_value(status), - origin=_maybe_extract_enum_member_value(origin), + status=maybe_extract_enum_member_value(status), + origin=maybe_extract_enum_member_value(origin), ), )) diff --git a/src/apify_client/clients/resource_clients/actor_collection.py b/src/apify_client/clients/resource_clients/actor_collection.py index 451be94a..fe259807 100644 --- a/src/apify_client/clients/resource_clients/actor_collection.py +++ b/src/apify_client/clients/resource_clients/actor_collection.py @@ -1,6 +1,8 @@ from typing import Any, Dict, List, Optional -from ..._utils import ListPage, _filter_out_none_values_recursively, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync from .actor import _get_actor_representation @@ -101,7 +103,7 @@ def create( example_run_input_content_type=example_run_input_content_type, ) - return self._create(_filter_out_none_values_recursively(actor_representation)) + return self._create(filter_out_none_values_recursively(actor_representation)) class ActorCollectionClientAsync(ResourceCollectionClientAsync): @@ -200,4 +202,4 @@ async def create( example_run_input_content_type=example_run_input_content_type, ) - return await self._create(_filter_out_none_values_recursively(actor_representation)) + return await self._create(filter_out_none_values_recursively(actor_representation)) diff --git a/src/apify_client/clients/resource_clients/actor_env_var.py b/src/apify_client/clients/resource_clients/actor_env_var.py index 57761f4a..601c9b7f 100644 --- a/src/apify_client/clients/resource_clients/actor_env_var.py +++ b/src/apify_client/clients/resource_clients/actor_env_var.py @@ -1,6 +1,7 @@ from typing import Any, Dict, Optional -from ..._utils import _filter_out_none_values_recursively, ignore_docs +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..base import ResourceClient, ResourceClientAsync @@ -61,7 +62,7 @@ def update( value=value, ) - return self._update(_filter_out_none_values_recursively(actor_env_var_representation)) + return self._update(filter_out_none_values_recursively(actor_env_var_representation)) def delete(self) -> None: """Delete the actor environment variable. @@ -115,7 +116,7 @@ async def update( value=value, ) - return await self._update(_filter_out_none_values_recursively(actor_env_var_representation)) + return await self._update(filter_out_none_values_recursively(actor_env_var_representation)) async def delete(self) -> None: """Delete the actor environment variable. diff --git a/src/apify_client/clients/resource_clients/actor_env_var_collection.py b/src/apify_client/clients/resource_clients/actor_env_var_collection.py index 6ffc41ca..ed41d08b 100644 --- a/src/apify_client/clients/resource_clients/actor_env_var_collection.py +++ b/src/apify_client/clients/resource_clients/actor_env_var_collection.py @@ -1,6 +1,8 @@ from typing import Any, Dict, Optional -from ..._utils import ListPage, _filter_out_none_values_recursively, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync from .actor_env_var import _get_actor_env_var_representation @@ -49,7 +51,7 @@ def create( value=value, ) - return self._create(_filter_out_none_values_recursively(actor_env_var_representation)) + return self._create(filter_out_none_values_recursively(actor_env_var_representation)) class ActorEnvVarCollectionClientAsync(ResourceCollectionClientAsync): @@ -96,4 +98,4 @@ async def create( value=value, ) - return await self._create(_filter_out_none_values_recursively(actor_env_var_representation)) + return await self._create(filter_out_none_values_recursively(actor_env_var_representation)) diff --git a/src/apify_client/clients/resource_clients/actor_version.py b/src/apify_client/clients/resource_clients/actor_version.py index afdbe652..947fcbd3 100644 --- a/src/apify_client/clients/resource_clients/actor_version.py +++ b/src/apify_client/clients/resource_clients/actor_version.py @@ -1,7 +1,8 @@ from typing import Any, Dict, List, Optional -from ..._utils import _filter_out_none_values_recursively, _maybe_extract_enum_member_value, ignore_docs -from ...consts import ActorSourceType +from apify_shared.consts import ActorSourceType +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, maybe_extract_enum_member_value + from ..base import ResourceClient, ResourceClientAsync from .actor_env_var import ActorEnvVarClient, ActorEnvVarClientAsync from .actor_env_var_collection import ActorEnvVarCollectionClient, ActorEnvVarCollectionClientAsync @@ -24,7 +25,7 @@ def _get_actor_version_representation( 'buildTag': build_tag, 'envVars': env_vars, 'applyEnvVarsToBuild': apply_env_vars_to_build, - 'sourceType': _maybe_extract_enum_member_value(source_type), + 'sourceType': maybe_extract_enum_member_value(source_type), 'sourceFiles': source_files, 'gitRepoUrl': git_repo_url, 'tarballUrl': tarball_url, @@ -97,7 +98,7 @@ def update( github_gist_url=github_gist_url, ) - return self._update(_filter_out_none_values_recursively(actor_version_representation)) + return self._update(filter_out_none_values_recursively(actor_version_representation)) def delete(self) -> None: """Delete the actor version. @@ -187,7 +188,7 @@ async def update( github_gist_url=github_gist_url, ) - return await self._update(_filter_out_none_values_recursively(actor_version_representation)) + return await self._update(filter_out_none_values_recursively(actor_version_representation)) async def delete(self) -> None: """Delete the actor version. diff --git a/src/apify_client/clients/resource_clients/actor_version_collection.py b/src/apify_client/clients/resource_clients/actor_version_collection.py index 25377cf7..106d66b1 100644 --- a/src/apify_client/clients/resource_clients/actor_version_collection.py +++ b/src/apify_client/clients/resource_clients/actor_version_collection.py @@ -1,7 +1,9 @@ from typing import Any, Dict, List, Optional -from ..._utils import ListPage, _filter_out_none_values_recursively, ignore_docs -from ...consts import ActorSourceType +from apify_shared.consts import ActorSourceType +from apify_shared.models import ListPage +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync from .actor_version import _get_actor_version_representation @@ -74,7 +76,7 @@ def create( github_gist_url=github_gist_url, ) - return self._create(_filter_out_none_values_recursively(actor_version_representation)) + return self._create(filter_out_none_values_recursively(actor_version_representation)) class ActorVersionCollectionClientAsync(ResourceCollectionClientAsync): @@ -145,4 +147,4 @@ async def create( github_gist_url=github_gist_url, ) - return await self._create(_filter_out_none_values_recursively(actor_version_representation)) + return await self._create(filter_out_none_values_recursively(actor_version_representation)) diff --git a/src/apify_client/clients/resource_clients/build.py b/src/apify_client/clients/resource_clients/build.py index 8d57c598..eacdc7e0 100644 --- a/src/apify_client/clients/resource_clients/build.py +++ b/src/apify_client/clients/resource_clients/build.py @@ -1,6 +1,7 @@ from typing import Any, Dict, Optional -from ..._utils import ignore_docs +from apify_shared.utils import ignore_docs + from ..base import ActorJobBaseClient, ActorJobBaseClientAsync diff --git a/src/apify_client/clients/resource_clients/build_collection.py b/src/apify_client/clients/resource_clients/build_collection.py index d4852c8c..26a9557a 100644 --- a/src/apify_client/clients/resource_clients/build_collection.py +++ b/src/apify_client/clients/resource_clients/build_collection.py @@ -1,6 +1,8 @@ from typing import Any, Dict, Optional -from ..._utils import ListPage, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync diff --git a/src/apify_client/clients/resource_clients/dataset.py b/src/apify_client/clients/resource_clients/dataset.py index d5223b84..e718ffb4 100644 --- a/src/apify_client/clients/resource_clients/dataset.py +++ b/src/apify_client/clients/resource_clients/dataset.py @@ -4,8 +4,10 @@ import httpx -from ..._types import JSONSerializable -from ..._utils import ListPage, _filter_out_none_values_recursively, ignore_docs +from apify_shared.models import ListPage +from apify_shared.types import JSONSerializable +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..base import ResourceClient, ResourceClientAsync @@ -43,7 +45,7 @@ def update(self, *, name: Optional[str] = None) -> Dict: 'name': name, } - return self._update(_filter_out_none_values_recursively(updated_fields)) + return self._update(filter_out_none_values_recursively(updated_fields)) def delete(self) -> None: """Delete the dataset. @@ -536,7 +538,7 @@ async def update(self, *, name: Optional[str] = None) -> Dict: 'name': name, } - return await self._update(_filter_out_none_values_recursively(updated_fields)) + return await self._update(filter_out_none_values_recursively(updated_fields)) async def delete(self) -> None: """Delete the dataset. diff --git a/src/apify_client/clients/resource_clients/dataset_collection.py b/src/apify_client/clients/resource_clients/dataset_collection.py index 0db82229..5bdba1dd 100644 --- a/src/apify_client/clients/resource_clients/dataset_collection.py +++ b/src/apify_client/clients/resource_clients/dataset_collection.py @@ -1,6 +1,8 @@ from typing import Any, Dict, Optional -from ..._utils import ListPage, _filter_out_none_values_recursively, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync @@ -48,7 +50,7 @@ def get_or_create(self, *, name: Optional[str] = None, schema: Optional[Dict] = Returns: dict: The retrieved or newly-created dataset. """ - return self._get_or_create(name=name, resource=_filter_out_none_values_recursively({'schema': schema})) + return self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) class DatasetCollectionClientAsync(ResourceCollectionClientAsync): @@ -95,4 +97,4 @@ async def get_or_create(self, *, name: Optional[str] = None, schema: Optional[Di Returns: dict: The retrieved or newly-created dataset. """ - return await self._get_or_create(name=name, resource=_filter_out_none_values_recursively({'schema': schema})) + return await self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) diff --git a/src/apify_client/clients/resource_clients/key_value_store.py b/src/apify_client/clients/resource_clients/key_value_store.py index 946706d1..555c87eb 100644 --- a/src/apify_client/clients/resource_clients/key_value_store.py +++ b/src/apify_client/clients/resource_clients/key_value_store.py @@ -2,15 +2,10 @@ from contextlib import asynccontextmanager, contextmanager from typing import Any, AsyncIterator, Dict, Iterator, Optional +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, parse_date_fields + from ..._errors import ApifyApiError -from ..._utils import ( - _catch_not_found_or_throw, - _encode_key_value_store_record_value, - _filter_out_none_values_recursively, - _parse_date_fields, - _pluck_data, - ignore_docs, -) +from ..._utils import _catch_not_found_or_throw, _encode_key_value_store_record_value, _pluck_data from ..base import ResourceClient, ResourceClientAsync @@ -48,7 +43,7 @@ def update(self, *, name: Optional[str] = None) -> Dict: 'name': name, } - return self._update(_filter_out_none_values_recursively(updated_fields)) + return self._update(filter_out_none_values_recursively(updated_fields)) def delete(self) -> None: """Delete the key-value store. @@ -80,7 +75,7 @@ def list_keys(self, *, limit: Optional[int] = None, exclusive_start_key: Optiona params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def get_record(self, key: str, *, as_bytes: bool = False, as_file: bool = False) -> Optional[Dict]: """Retrieve the given record from the key-value store. @@ -277,7 +272,7 @@ async def update(self, *, name: Optional[str] = None) -> Dict: 'name': name, } - return await self._update(_filter_out_none_values_recursively(updated_fields)) + return await self._update(filter_out_none_values_recursively(updated_fields)) async def delete(self) -> None: """Delete the key-value store. @@ -309,7 +304,7 @@ async def list_keys(self, *, limit: Optional[int] = None, exclusive_start_key: O params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def get_record(self, key: str) -> Optional[Dict]: """Retrieve the given record from the key-value store. diff --git a/src/apify_client/clients/resource_clients/key_value_store_collection.py b/src/apify_client/clients/resource_clients/key_value_store_collection.py index 71e76dba..adae7db2 100644 --- a/src/apify_client/clients/resource_clients/key_value_store_collection.py +++ b/src/apify_client/clients/resource_clients/key_value_store_collection.py @@ -1,6 +1,8 @@ from typing import Any, Dict, Optional -from ..._utils import ListPage, _filter_out_none_values_recursively, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync @@ -48,7 +50,7 @@ def get_or_create(self, *, name: Optional[str] = None, schema: Optional[Dict] = Returns: dict: The retrieved or newly-created key-value store. """ - return self._get_or_create(name=name, resource=_filter_out_none_values_recursively({'schema': schema})) + return self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) class KeyValueStoreCollectionClientAsync(ResourceCollectionClientAsync): @@ -95,4 +97,4 @@ async def get_or_create(self, *, name: Optional[str] = None, schema: Optional[Di Returns: dict: The retrieved or newly-created key-value store. """ - return await self._get_or_create(name=name, resource=_filter_out_none_values_recursively({'schema': schema})) + return await self._get_or_create(name=name, resource=filter_out_none_values_recursively({'schema': schema})) diff --git a/src/apify_client/clients/resource_clients/log.py b/src/apify_client/clients/resource_clients/log.py index efcc5355..3a15aca4 100644 --- a/src/apify_client/clients/resource_clients/log.py +++ b/src/apify_client/clients/resource_clients/log.py @@ -3,8 +3,10 @@ import httpx +from apify_shared.utils import ignore_docs + from ..._errors import ApifyApiError -from ..._utils import _catch_not_found_or_throw, ignore_docs +from ..._utils import _catch_not_found_or_throw from ..base import ResourceClient, ResourceClientAsync diff --git a/src/apify_client/clients/resource_clients/request_queue.py b/src/apify_client/clients/resource_clients/request_queue.py index 6b6ea16f..329e26c3 100644 --- a/src/apify_client/clients/resource_clients/request_queue.py +++ b/src/apify_client/clients/resource_clients/request_queue.py @@ -1,7 +1,9 @@ from typing import Any, Dict, List, Optional +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, parse_date_fields + from ..._errors import ApifyApiError -from ..._utils import _catch_not_found_or_throw, _filter_out_none_values_recursively, _parse_date_fields, _pluck_data, ignore_docs +from ..._utils import _catch_not_found_or_throw, _pluck_data from ..base import ResourceClient, ResourceClientAsync @@ -44,7 +46,7 @@ def update(self, *, name: Optional[str] = None) -> Dict: 'name': name, } - return self._update(_filter_out_none_values_recursively(updated_fields)) + return self._update(filter_out_none_values_recursively(updated_fields)) def delete(self) -> None: """Delete the request queue. @@ -72,7 +74,7 @@ def list_head(self, *, limit: Optional[int] = None) -> Dict: params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def list_and_lock_head(self, *, lock_secs: int, limit: Optional[int] = None) -> Dict: """Retrieve a given number of unlocked requests from the beginning of the queue and lock them for a given time. @@ -95,7 +97,7 @@ def list_and_lock_head(self, *, lock_secs: int, limit: Optional[int] = None) -> params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def add_request(self, request: Dict, *, forefront: Optional[bool] = None) -> Dict: """Add a request to the queue. @@ -121,7 +123,7 @@ def add_request(self, request: Dict, *, forefront: Optional[bool] = None) -> Dic params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def get_request(self, request_id: str) -> Optional[Dict]: """Retrieve a request from the queue. @@ -140,7 +142,7 @@ def get_request(self, request_id: str) -> Optional[Dict]: method='GET', params=self._params(), ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) except ApifyApiError as exc: _catch_not_found_or_throw(exc) @@ -173,7 +175,7 @@ def update_request(self, request: Dict, *, forefront: Optional[bool] = None) -> params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def delete_request(self, request_id: str) -> None: """Delete a request from the queue. @@ -215,7 +217,7 @@ def prolong_request_lock(self, request_id: str, *, forefront: Optional[bool] = N params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def delete_request_lock(self, request_id: str, *, forefront: Optional[bool] = None) -> None: """Delete the lock on a request. @@ -257,7 +259,7 @@ def batch_add_requests(self, requests: List[Dict[str, Any]], *, forefront: Optio params=request_params, json=requests, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def batch_delete_requests(self, requests: List[Dict[str, Any]]) -> Dict: """Delete given requests from the queue. @@ -278,7 +280,7 @@ def batch_delete_requests(self, requests: List[Dict[str, Any]]) -> Dict: json=requests, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def list_requests(self, *, limit: Optional[int] = None, exclusive_start_id: Optional[str] = None) -> Dict: """List requests in the queue. @@ -297,7 +299,7 @@ def list_requests(self, *, limit: Optional[int] = None, exclusive_start_id: Opti params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) class RequestQueueClientAsync(ResourceClientAsync): @@ -339,7 +341,7 @@ async def update(self, *, name: Optional[str] = None) -> Dict: 'name': name, } - return await self._update(_filter_out_none_values_recursively(updated_fields)) + return await self._update(filter_out_none_values_recursively(updated_fields)) async def delete(self) -> None: """Delete the request queue. @@ -367,7 +369,7 @@ async def list_head(self, *, limit: Optional[int] = None) -> Dict: params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def list_and_lock_head(self, *, lock_secs: int, limit: Optional[int] = None) -> Dict: """Retrieve a given number of unlocked requests from the beginning of the queue and lock them for a given time. @@ -390,7 +392,7 @@ async def list_and_lock_head(self, *, lock_secs: int, limit: Optional[int] = Non params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def add_request(self, request: Dict, *, forefront: Optional[bool] = None) -> Dict: """Add a request to the queue. @@ -416,7 +418,7 @@ async def add_request(self, request: Dict, *, forefront: Optional[bool] = None) params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def get_request(self, request_id: str) -> Optional[Dict]: """Retrieve a request from the queue. @@ -435,7 +437,7 @@ async def get_request(self, request_id: str) -> Optional[Dict]: method='GET', params=self._params(), ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) except ApifyApiError as exc: _catch_not_found_or_throw(exc) @@ -468,7 +470,7 @@ async def update_request(self, request: Dict, *, forefront: Optional[bool] = Non params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def delete_request(self, request_id: str) -> None: """Delete a request from the queue. @@ -510,7 +512,7 @@ async def prolong_request_lock(self, request_id: str, *, forefront: Optional[boo params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def delete_request_lock(self, request_id: str, *, forefront: Optional[bool] = None) -> None: """Delete the lock on a request. @@ -552,7 +554,7 @@ async def batch_add_requests(self, requests: List[Dict[str, Any]], *, forefront: params=request_params, json=requests, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def batch_delete_requests(self, requests: List[Dict[str, Any]]) -> Dict: """Delete given requests from the queue. @@ -572,7 +574,7 @@ async def batch_delete_requests(self, requests: List[Dict[str, Any]]) -> Dict: params=request_params, json=requests, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def list_requests(self, *, limit: Optional[int] = None, exclusive_start_id: Optional[str] = None) -> Dict: """List requests in the queue. @@ -591,4 +593,4 @@ async def list_requests(self, *, limit: Optional[int] = None, exclusive_start_id params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) diff --git a/src/apify_client/clients/resource_clients/request_queue_collection.py b/src/apify_client/clients/resource_clients/request_queue_collection.py index 1441718d..ccc99436 100644 --- a/src/apify_client/clients/resource_clients/request_queue_collection.py +++ b/src/apify_client/clients/resource_clients/request_queue_collection.py @@ -1,6 +1,8 @@ from typing import Any, Dict, Optional -from ..._utils import ListPage, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync diff --git a/src/apify_client/clients/resource_clients/run.py b/src/apify_client/clients/resource_clients/run.py index 76b322ae..3cf29b58 100644 --- a/src/apify_client/clients/resource_clients/run.py +++ b/src/apify_client/clients/resource_clients/run.py @@ -1,13 +1,8 @@ from typing import Any, Dict, Optional -from ..._utils import ( - _encode_key_value_store_record_value, - _filter_out_none_values_recursively, - _parse_date_fields, - _pluck_data, - _to_safe_id, - ignore_docs, -) +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, parse_date_fields + +from ..._utils import _encode_key_value_store_record_value, _pluck_data, _to_safe_id from ..base import ActorJobBaseClient, ActorJobBaseClientAsync from .dataset import DatasetClient, DatasetClientAsync from .key_value_store import KeyValueStoreClient, KeyValueStoreClientAsync @@ -51,7 +46,7 @@ def update(self, *, status_message: Optional[str] = None, is_status_message_term 'isStatusMessageTerminal': is_status_message_terminal, } - return self._update(_filter_out_none_values_recursively(updated_fields)) + return self._update(filter_out_none_values_recursively(updated_fields)) def abort(self, *, gracefully: Optional[bool] = None) -> Dict: """Abort the actor run which is starting or currently running and return its details. @@ -119,7 +114,7 @@ def metamorph( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def resurrect( self, @@ -158,7 +153,7 @@ def resurrect( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def dataset(self) -> DatasetClient: """Get the client for the default dataset of the actor run. @@ -245,7 +240,7 @@ async def update(self, *, status_message: Optional[str] = None, is_status_messag 'isStatusMessageTerminal': is_status_message_terminal, } - return await self._update(_filter_out_none_values_recursively(updated_fields)) + return await self._update(filter_out_none_values_recursively(updated_fields)) async def abort(self, *, gracefully: Optional[bool] = None) -> Dict: """Abort the actor run which is starting or currently running and return its details. @@ -313,7 +308,7 @@ async def metamorph( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def resurrect( self, @@ -352,7 +347,7 @@ async def resurrect( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def dataset(self) -> DatasetClientAsync: """Get the client for the default dataset of the actor run. diff --git a/src/apify_client/clients/resource_clients/run_collection.py b/src/apify_client/clients/resource_clients/run_collection.py index 84841ebf..d99af668 100644 --- a/src/apify_client/clients/resource_clients/run_collection.py +++ b/src/apify_client/clients/resource_clients/run_collection.py @@ -1,7 +1,9 @@ from typing import Any, Dict, Optional -from ..._utils import ListPage, _maybe_extract_enum_member_value, ignore_docs -from ...consts import ActorJobStatus +from apify_shared.consts import ActorJobStatus +from apify_shared.models import ListPage +from apify_shared.utils import ignore_docs, maybe_extract_enum_member_value + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync @@ -41,7 +43,7 @@ def list( limit=limit, offset=offset, desc=desc, - status=_maybe_extract_enum_member_value(status), + status=maybe_extract_enum_member_value(status), ) @@ -81,5 +83,5 @@ async def list( limit=limit, offset=offset, desc=desc, - status=_maybe_extract_enum_member_value(status), + status=maybe_extract_enum_member_value(status), ) diff --git a/src/apify_client/clients/resource_clients/schedule.py b/src/apify_client/clients/resource_clients/schedule.py index e5c9d2af..3e18ac16 100644 --- a/src/apify_client/clients/resource_clients/schedule.py +++ b/src/apify_client/clients/resource_clients/schedule.py @@ -1,7 +1,9 @@ from typing import Any, Dict, List, Optional +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..._errors import ApifyApiError -from ..._utils import _catch_not_found_or_throw, _filter_out_none_values_recursively, _pluck_data_as_list, ignore_docs +from ..._utils import _catch_not_found_or_throw, _pluck_data_as_list from ..base import ResourceClient, ResourceClientAsync @@ -87,7 +89,7 @@ def update( title=title, ) - return self._update(_filter_out_none_values_recursively(schedule_representation)) + return self._update(filter_out_none_values_recursively(schedule_representation)) def delete(self) -> None: """Delete the schedule. @@ -177,7 +179,7 @@ async def update( title=title, ) - return await self._update(_filter_out_none_values_recursively(schedule_representation)) + return await self._update(filter_out_none_values_recursively(schedule_representation)) async def delete(self) -> None: """Delete the schedule. diff --git a/src/apify_client/clients/resource_clients/schedule_collection.py b/src/apify_client/clients/resource_clients/schedule_collection.py index aa0ba58e..15931bea 100644 --- a/src/apify_client/clients/resource_clients/schedule_collection.py +++ b/src/apify_client/clients/resource_clients/schedule_collection.py @@ -1,6 +1,8 @@ from typing import Any, Dict, List, Optional -from ..._utils import ListPage, _filter_out_none_values_recursively, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync from .schedule import _get_schedule_representation @@ -78,7 +80,7 @@ def create( title=title, ) - return self._create(_filter_out_none_values_recursively(schedule_representation)) + return self._create(filter_out_none_values_recursively(schedule_representation)) class ScheduleCollectionClientAsync(ResourceCollectionClientAsync): @@ -154,4 +156,4 @@ async def create( title=title, ) - return await self._create(_filter_out_none_values_recursively(schedule_representation)) + return await self._create(filter_out_none_values_recursively(schedule_representation)) diff --git a/src/apify_client/clients/resource_clients/task.py b/src/apify_client/clients/resource_clients/task.py index 8922c308..9f4a828a 100644 --- a/src/apify_client/clients/resource_clients/task.py +++ b/src/apify_client/clients/resource_clients/task.py @@ -1,16 +1,10 @@ from typing import Any, Dict, List, Optional, cast +from apify_shared.consts import ActorJobStatus, MetaOrigin +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, maybe_extract_enum_member_value, parse_date_fields + from ..._errors import ApifyApiError -from ..._utils import ( - _catch_not_found_or_throw, - _encode_webhook_list_to_base64, - _filter_out_none_values_recursively, - _maybe_extract_enum_member_value, - _parse_date_fields, - _pluck_data, - ignore_docs, -) -from ...consts import ActorJobStatus, MetaOrigin +from ..._utils import _catch_not_found_or_throw, _encode_webhook_list_to_base64, _pluck_data from ..base import ResourceClient, ResourceClientAsync from .run import RunClient, RunClientAsync from .run_collection import RunCollectionClient, RunCollectionClientAsync @@ -94,7 +88,7 @@ def update( title=title, ) - return self._update(_filter_out_none_values_recursively(task_representation)) + return self._update(filter_out_none_values_recursively(task_representation)) def delete(self) -> None: """Delete the task. @@ -154,7 +148,7 @@ def start( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) def call( self, @@ -252,8 +246,8 @@ def last_run(self, *, status: Optional[ActorJobStatus] = None, origin: Optional[ resource_id='last', resource_path='runs', params=self._params( - status=_maybe_extract_enum_member_value(status), - origin=_maybe_extract_enum_member_value(origin), + status=maybe_extract_enum_member_value(status), + origin=maybe_extract_enum_member_value(origin), ), )) @@ -317,7 +311,7 @@ async def update( title=title, ) - return await self._update(_filter_out_none_values_recursively(task_representation)) + return await self._update(filter_out_none_values_recursively(task_representation)) async def delete(self) -> None: """Delete the task. @@ -377,7 +371,7 @@ async def start( params=request_params, ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) async def call( self, @@ -475,8 +469,8 @@ def last_run(self, *, status: Optional[ActorJobStatus] = None, origin: Optional[ resource_id='last', resource_path='runs', params=self._params( - status=_maybe_extract_enum_member_value(status), - origin=_maybe_extract_enum_member_value(origin), + status=maybe_extract_enum_member_value(status), + origin=maybe_extract_enum_member_value(origin), ), )) diff --git a/src/apify_client/clients/resource_clients/task_collection.py b/src/apify_client/clients/resource_clients/task_collection.py index d6965022..8f0d21e2 100644 --- a/src/apify_client/clients/resource_clients/task_collection.py +++ b/src/apify_client/clients/resource_clients/task_collection.py @@ -1,6 +1,8 @@ from typing import Any, Dict, Optional -from ..._utils import ListPage, _filter_out_none_values_recursively, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync from .task import _get_task_representation @@ -74,7 +76,7 @@ def create( title=title, ) - return self._create(_filter_out_none_values_recursively(task_representation)) + return self._create(filter_out_none_values_recursively(task_representation)) class TaskCollectionClientAsync(ResourceCollectionClientAsync): @@ -146,4 +148,4 @@ async def create( title=title, ) - return await self._create(_filter_out_none_values_recursively(task_representation)) + return await self._create(filter_out_none_values_recursively(task_representation)) diff --git a/src/apify_client/clients/resource_clients/user.py b/src/apify_client/clients/resource_clients/user.py index 11da5742..9324a0be 100644 --- a/src/apify_client/clients/resource_clients/user.py +++ b/src/apify_client/clients/resource_clients/user.py @@ -1,6 +1,7 @@ from typing import Any, Dict, Optional -from ..._utils import ignore_docs +from apify_shared.utils import ignore_docs + from ..base import ResourceClient, ResourceClientAsync diff --git a/src/apify_client/clients/resource_clients/webhook.py b/src/apify_client/clients/resource_clients/webhook.py index ebd00d6c..f11adaae 100644 --- a/src/apify_client/clients/resource_clients/webhook.py +++ b/src/apify_client/clients/resource_clients/webhook.py @@ -1,15 +1,10 @@ from typing import Any, Dict, List, Optional +from apify_shared.consts import WebhookEventType +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs, maybe_extract_enum_member_value, parse_date_fields + from ..._errors import ApifyApiError -from ..._utils import ( - _catch_not_found_or_throw, - _filter_out_none_values_recursively, - _maybe_extract_enum_member_value, - _parse_date_fields, - _pluck_data, - ignore_docs, -) -from ...consts import WebhookEventType +from ..._utils import _catch_not_found_or_throw, _pluck_data from ..base import ResourceClient, ResourceClientAsync from .webhook_dispatch_collection import WebhookDispatchCollectionClient, WebhookDispatchCollectionClientAsync @@ -46,7 +41,7 @@ def _get_webhook_representation( webhook['isAdHoc'] = True if event_types is not None: - webhook['eventTypes'] = [_maybe_extract_enum_member_value(event_type) for event_type in event_types] + webhook['eventTypes'] = [maybe_extract_enum_member_value(event_type) for event_type in event_types] return webhook @@ -115,7 +110,7 @@ def update( is_ad_hoc=is_ad_hoc, ) - return self._update(_filter_out_none_values_recursively(webhook_representation)) + return self._update(filter_out_none_values_recursively(webhook_representation)) def delete(self) -> None: """Delete the webhook. @@ -141,7 +136,7 @@ def test(self) -> Optional[Dict]: params=self._params(), ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) except ApifyApiError as exc: _catch_not_found_or_throw(exc) @@ -225,7 +220,7 @@ async def update( is_ad_hoc=is_ad_hoc, ) - return await self._update(_filter_out_none_values_recursively(webhook_representation)) + return await self._update(filter_out_none_values_recursively(webhook_representation)) async def delete(self) -> None: """Delete the webhook. @@ -251,7 +246,7 @@ async def test(self) -> Optional[Dict]: params=self._params(), ) - return _parse_date_fields(_pluck_data(response.json())) + return parse_date_fields(_pluck_data(response.json())) except ApifyApiError as exc: _catch_not_found_or_throw(exc) diff --git a/src/apify_client/clients/resource_clients/webhook_collection.py b/src/apify_client/clients/resource_clients/webhook_collection.py index 5db69376..4ad0e935 100644 --- a/src/apify_client/clients/resource_clients/webhook_collection.py +++ b/src/apify_client/clients/resource_clients/webhook_collection.py @@ -1,8 +1,9 @@ from typing import Any, Dict, List, Optional -from apify_client.consts import WebhookEventType +from apify_shared.consts import WebhookEventType +from apify_shared.models import ListPage +from apify_shared.utils import filter_out_none_values_recursively, ignore_docs -from ..._utils import ListPage, _filter_out_none_values_recursively, ignore_docs from ..base import ResourceCollectionClient, ResourceCollectionClientAsync from .webhook import _get_webhook_representation @@ -88,7 +89,7 @@ def create( is_ad_hoc=is_ad_hoc, ) - return self._create(_filter_out_none_values_recursively(webhook_representation)) + return self._create(filter_out_none_values_recursively(webhook_representation)) class WebhookCollectionClientAsync(ResourceCollectionClientAsync): @@ -172,4 +173,4 @@ async def create( is_ad_hoc=is_ad_hoc, ) - return await self._create(_filter_out_none_values_recursively(webhook_representation)) + return await self._create(filter_out_none_values_recursively(webhook_representation)) diff --git a/src/apify_client/clients/resource_clients/webhook_dispatch.py b/src/apify_client/clients/resource_clients/webhook_dispatch.py index 76492b0e..f6f8169c 100644 --- a/src/apify_client/clients/resource_clients/webhook_dispatch.py +++ b/src/apify_client/clients/resource_clients/webhook_dispatch.py @@ -1,6 +1,7 @@ from typing import Any, Dict, Optional -from ..._utils import ignore_docs +from apify_shared.utils import ignore_docs + from ..base import ResourceClient, ResourceClientAsync diff --git a/src/apify_client/clients/resource_clients/webhook_dispatch_collection.py b/src/apify_client/clients/resource_clients/webhook_dispatch_collection.py index e879ea88..7b16b996 100644 --- a/src/apify_client/clients/resource_clients/webhook_dispatch_collection.py +++ b/src/apify_client/clients/resource_clients/webhook_dispatch_collection.py @@ -1,6 +1,8 @@ from typing import Any, Dict, Optional -from ..._utils import ListPage, ignore_docs +from apify_shared.models import ListPage +from apify_shared.utils import ignore_docs + from ..base import ResourceCollectionClient, ResourceCollectionClientAsync diff --git a/src/apify_client/consts.py b/src/apify_client/consts.py index 16a1c95b..5bfab5bb 100644 --- a/src/apify_client/consts.py +++ b/src/apify_client/consts.py @@ -1,87 +1,31 @@ -from enum import Enum - - -class ActorJobStatus(str, Enum): - """Available statuses for actor jobs (runs or builds).""" - - #: Actor job initialized but not started yet - READY = 'READY' - #: Actor job in progress - RUNNING = 'RUNNING' - #: Actor job finished successfully - SUCCEEDED = 'SUCCEEDED' - #: Actor job or build failed - FAILED = 'FAILED' - #: Actor job currently timing out - TIMING_OUT = 'TIMING-OUT' - #: Actor job timed out - TIMED_OUT = 'TIMED-OUT' - #: Actor job currently being aborted by user - ABORTING = 'ABORTING' - #: Actor job aborted by user - ABORTED = 'ABORTED' - - @property - def _is_terminal(self) -> bool: - """Whether this actor job status is terminal.""" - return self in (ActorJobStatus.SUCCEEDED, ActorJobStatus.FAILED, ActorJobStatus.TIMED_OUT, ActorJobStatus.ABORTED) - - -class ActorSourceType(str, Enum): - """Available source types for actors.""" - - #: Actor source code is comprised of multiple files - SOURCE_FILES = 'SOURCE_FILES' - #: Actor source code is cloned from a Git repository - GIT_REPO = 'GIT_REPO' - #: Actor source code is downloaded using a tarball or Zip file - TARBALL = 'TARBALL' - #: Actor source code is taken from a GitHub Gist - GITHUB_GIST = 'GITHUB_GIST' - - -class WebhookEventType(str, Enum): - """Events that can trigger a webhook.""" - - #: The actor run was created - ACTOR_RUN_CREATED = 'ACTOR.RUN.CREATED' - #: The actor run has succeeded - ACTOR_RUN_SUCCEEDED = 'ACTOR.RUN.SUCCEEDED' - #: The actor run has failed - ACTOR_RUN_FAILED = 'ACTOR.RUN.FAILED' - #: The actor run has timed out - ACTOR_RUN_TIMED_OUT = 'ACTOR.RUN.TIMED_OUT' - #: The actor run was aborted - ACTOR_RUN_ABORTED = 'ACTOR.RUN.ABORTED' - #: The actor run was resurrected - ACTOR_RUN_RESURRECTED = 'ACTOR.RUN.RESURRECTED' - - #: The actor build was created - ACTOR_BUILD_CREATED = 'ACTOR.BUILD.CREATED' - #: The actor build has succeeded - ACTOR_BUILD_SUCCEEDED = 'ACTOR.BUILD.SUCCEEDED' - #: The actor build has failed - ACTOR_BUILD_FAILED = 'ACTOR.BUILD.FAILED' - #: The actor build has timed out - ACTOR_BUILD_TIMED_OUT = 'ACTOR.BUILD.TIMED_OUT' - #: The actor build was aborted - ACTOR_BUILD_ABORTED = 'ACTOR.BUILD.ABORTED' - - -class MetaOrigin(str, Enum): - """Possible origins for actor runs, i.e. how were the jobs started.""" - - #: Job started from Developer console in Source section of actor - DEVELOPMENT = 'DEVELOPMENT' - #: Job started from other place on the website (either console or task detail page) - WEB = 'WEB' - #: Job started through API - API = 'API' - #: Job started through Scheduler - SCHEDULER = 'SCHEDULER' - #: Job started through test actor page - TEST = 'TEST' - #: Job started by the webhook - WEBHOOK = 'WEBHOOK' - #: Job started by another actor run - ACTOR = 'ACTOR' +import warnings +from typing import Any + +from apify_shared.consts import ActorJobStatus as _ActorJobStatus +from apify_shared.consts import ActorSourceType as _ActorSourceType +from apify_shared.consts import MetaOrigin as _MetaOrigin +from apify_shared.consts import WebhookEventType as _WebhookEventType + +DEPRECATED_NAMES = [ + 'ActorJobStatus', + 'ActorSourceType', + 'MetaOrigin', + 'WebhookEventType', +] + + +# The following piece of code is highly inspired by the example in https://peps.python.org/pep-0562. +# The else branch is missing intentionally! Check the following discussion for details: +# https://github.com/apify/apify-client-python/pull/132#discussion_r1277294315. +def __getattr__(name: str) -> Any: + if name in DEPRECATED_NAMES: + warnings.warn( + ( + f'Importing "{name}" from "apify_client.consts" is deprecated and will be removed in the future. ' + 'Please use "apify_shared" library instead.' + ), + category=DeprecationWarning, + stacklevel=2, + ) + return globals()[f'_{name}'] + raise AttributeError(f'module {__name__!r} has no attribute {name!r}') diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 9474d485..d68a5f1a 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -1,28 +1,10 @@ -import io import time -from datetime import datetime, timezone -from enum import Enum from typing import Any, Callable import pytest -from apify_client._utils import ( - _encode_webhook_list_to_base64, - _filter_out_none_values_recursively, - _filter_out_none_values_recursively_internal, - _is_content_type_json, - _is_content_type_text, - _is_content_type_xml, - _is_file_or_bytes, - _maybe_extract_enum_member_value, - _parse_date_fields, - _pluck_data, - _retry_with_exp_backoff, - _retry_with_exp_backoff_async, - _to_safe_id, - ignore_docs, -) -from apify_client.consts import WebhookEventType +from apify_client._utils import _encode_webhook_list_to_base64, _pluck_data, _retry_with_exp_backoff, _retry_with_exp_backoff_async, _to_safe_id +from apify_shared.consts import WebhookEventType def test__to_safe_id() -> None: @@ -31,35 +13,6 @@ def test__to_safe_id() -> None: assert _to_safe_id('abc~def') == 'abc~def' -def test__parse_date_fields() -> None: - # works correctly on empty dicts - assert _parse_date_fields({}) == {} - - # correctly parses dates on fields ending with -At - expected_datetime = datetime(2016, 11, 14, 11, 10, 52, 425000, timezone.utc) - assert _parse_date_fields({'createdAt': '2016-11-14T11:10:52.425Z'}) == {'createdAt': expected_datetime} - - # doesn't parse dates on fields not ending with -At - assert _parse_date_fields({'saveUntil': '2016-11-14T11:10:52.425Z'}) == {'saveUntil': '2016-11-14T11:10:52.425Z'} - - # parses dates in dicts in lists - expected_datetime = datetime(2016, 11, 14, 11, 10, 52, 425000, timezone.utc) - assert _parse_date_fields([{'createdAt': '2016-11-14T11:10:52.425Z'}]) == [{'createdAt': expected_datetime}] - - # parses nested dates - expected_datetime = datetime(2020, 2, 29, 10, 9, 8, 100000, timezone.utc) - assert _parse_date_fields({'a': {'b': {'c': {'createdAt': '2020-02-29T10:09:08.100Z'}}}}) \ - == {'a': {'b': {'c': {'createdAt': expected_datetime}}}} - - # doesn't parse dates nested too deep - expected_datetime = datetime(2020, 2, 29, 10, 9, 8, 100000, timezone.utc) - assert _parse_date_fields({'a': {'b': {'c': {'d': {'createdAt': '2020-02-29T10:09:08.100Z'}}}}}) \ - == {'a': {'b': {'c': {'d': {'createdAt': '2020-02-29T10:09:08.100Z'}}}}} - - # doesn't die when the date can't be parsed - assert _parse_date_fields({'createdAt': 'NOT_A_DATE'}) == {'createdAt': 'NOT_A_DATE'} - - def test__pluck_data() -> None: # works correctly when data is present assert _pluck_data({'data': {}}) == {} @@ -74,46 +27,6 @@ def test__pluck_data() -> None: _pluck_data('{"a": "b"}') -def test__is_content_type_json() -> None: - # returns True for the right content types - assert _is_content_type_json('application/json') is True - assert _is_content_type_json('application/jsonc') is True - # returns False for bad content types - assert _is_content_type_json('application/xml') is False - assert _is_content_type_json('application/ld+json') is False - - -def test__is_content_type_xml() -> None: - # returns True for the right content types - assert _is_content_type_xml('application/xml') is True - assert _is_content_type_xml('application/xhtml+xml') is True - # returns False for bad content types - assert _is_content_type_xml('application/json') is False - assert _is_content_type_xml('text/html') is False - - -def test__is_content_type_text() -> None: - # returns True for the right content types - assert _is_content_type_text('text/html') is True - assert _is_content_type_text('text/plain') is True - # returns False for bad content types - assert _is_content_type_text('application/json') is False - assert _is_content_type_text('application/text') is False - - -def test__is_file_or_bytes() -> None: - # returns True for the right value types - assert _is_file_or_bytes(b'abc') is True - assert _is_file_or_bytes(bytearray.fromhex('F0F1F2')) is True - assert _is_file_or_bytes(io.BytesIO(b'\x00\x01\x02')) is True - - # returns False for bad value types - assert _is_file_or_bytes('abc') is False - assert _is_file_or_bytes(['a', 'b', 'c']) is False - assert _is_file_or_bytes({'a': 'b'}) is False - assert _is_file_or_bytes(None) is False - - def test__retry_with_exp_backoff() -> None: attempt_counter = 0 @@ -225,39 +138,3 @@ def test__encode_webhook_list_to_base64() -> None: 'payload_template': '{"hello": "world", "resource":{{resource}}}', }, ]) == 'W3siZXZlbnRUeXBlcyI6IFsiQUNUT1IuUlVOLkNSRUFURUQiXSwgInJlcXVlc3RVcmwiOiAiaHR0cHM6Ly9leGFtcGxlLmNvbS9ydW4tY3JlYXRlZCJ9LCB7ImV2ZW50VHlwZXMiOiBbIkFDVE9SLlJVTi5TVUNDRUVERUQiXSwgInJlcXVlc3RVcmwiOiAiaHR0cHM6Ly9leGFtcGxlLmNvbS9ydW4tc3VjY2VlZGVkIiwgInBheWxvYWRUZW1wbGF0ZSI6ICJ7XCJoZWxsb1wiOiBcIndvcmxkXCIsIFwicmVzb3VyY2VcIjp7e3Jlc291cmNlfX19In1d' # noqa: E501 - - -def test__maybe_extract_enum_member_value() -> None: - class TestEnum(Enum): - A = 'A' - B = 'B' - - assert _maybe_extract_enum_member_value(TestEnum.A) == 'A' - assert _maybe_extract_enum_member_value(TestEnum.B) == 'B' - assert _maybe_extract_enum_member_value('C') == 'C' - assert _maybe_extract_enum_member_value(1) == 1 - assert _maybe_extract_enum_member_value(None) is None - - -def test__filter_out_none_values_recursively() -> None: - assert _filter_out_none_values_recursively({'k1': 'v1'}) == {'k1': 'v1'} - assert _filter_out_none_values_recursively({'k1': None}) == {} - assert _filter_out_none_values_recursively({'k1': 'v1', 'k2': None, 'k3': {'k4': 'v4', 'k5': None}, 'k6': {'k7': None}}) \ - == {'k1': 'v1', 'k3': {'k4': 'v4'}} - - -def test__filter_out_none_values_recursively_internal() -> None: - assert _filter_out_none_values_recursively_internal({}) == {} - assert _filter_out_none_values_recursively_internal({'k1': {}}) == {} - assert _filter_out_none_values_recursively_internal({}, False) == {} - assert _filter_out_none_values_recursively_internal({'k1': {}}, False) == {'k1': {}} - assert _filter_out_none_values_recursively_internal({}, True) is None - assert _filter_out_none_values_recursively_internal({'k1': {}}, True) is None - - -def test_ignore_docs() -> None: - def testing_function(_a: str, _b: str) -> str: - """Dummy docstring""" - return 'dummy' - - assert testing_function is ignore_docs(testing_function)