-
Notifications
You must be signed in to change notification settings - Fork 27
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
1 parent
8b39757
commit e3d010c
Showing
128 changed files
with
14,924 additions
and
8,287 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -29,6 +29,7 @@ wheels/ | |
|
||
# Other | ||
test.py | ||
do_release.py | ||
.env | ||
|
||
# Coverage | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,3 @@ | ||
from cognite.client.cognite_client import CogniteClient | ||
from cognite.client.exceptions import APIError | ||
from cognite.client._cognite_client import CogniteClient | ||
|
||
__version__ = "0.13.3" | ||
__version__ = "1.0.0a20" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
from typing import * |
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,282 @@ | ||
from typing import * | ||
|
||
from cognite.client._api_client import APIClient | ||
from cognite.client.data_classes import Event, EventFilter, EventList, EventUpdate | ||
from cognite.client.utils import _utils as utils | ||
|
||
|
||
class EventsAPI(APIClient): | ||
_RESOURCE_PATH = "/events" | ||
_LIST_CLASS = EventList | ||
|
||
def __call__( | ||
self, | ||
chunk_size: int = None, | ||
start_time: Dict[str, Any] = None, | ||
end_time: Dict[str, Any] = None, | ||
type: str = None, | ||
subtype: str = None, | ||
metadata: Dict[str, Any] = None, | ||
asset_ids: List[int] = None, | ||
source: str = None, | ||
created_time: Dict[str, Any] = None, | ||
last_updated_time: Dict[str, Any] = None, | ||
external_id_prefix: str = None, | ||
) -> Generator[Union[Event, EventList], None, None]: | ||
"""Iterate over events | ||
Fetches events as they are iterated over, so you keep a limited number of events in memory. | ||
Args: | ||
chunk_size (int, optional): Number of events to return in each chunk. Defaults to yielding one event a time. | ||
start_time (Dict[str, Any]): Range between two timestamps | ||
end_time (Dict[str, Any]): Range between two timestamps | ||
type (str): Type of the event, e.g 'failure'. | ||
subtype (str): Subtype of the event, e.g 'electrical'. | ||
metadata (Dict[str, Any]): Customizable extra data about the event. String key -> String value. | ||
asset_ids (List[int]): Asset IDs of related equipments that this event relates to. | ||
source (str): The source of this event. | ||
created_time (Dict[str, Any]): Range between two timestamps | ||
last_updated_time (Dict[str, Any]): Range between two timestamps | ||
external_id_prefix (str): External Id provided by client. Should be unique within the project | ||
Yields: | ||
Union[Event, EventList]: yields Event one by one if chunk is not specified, else EventList objects. | ||
""" | ||
filter = EventFilter( | ||
start_time=start_time, | ||
end_time=end_time, | ||
metadata=metadata, | ||
asset_ids=asset_ids, | ||
source=source, | ||
created_time=created_time, | ||
last_updated_time=last_updated_time, | ||
external_id_prefix=external_id_prefix, | ||
type=type, | ||
subtype=subtype, | ||
).dump(camel_case=True) | ||
return self._list_generator(method="POST", chunk_size=chunk_size, filter=filter) | ||
|
||
def __iter__(self) -> Generator[Event, None, None]: | ||
"""Iterate over events | ||
Fetches events as they are iterated over, so you keep a limited number of events in memory. | ||
Yields: | ||
Event: yields Events one by one. | ||
""" | ||
return self.__call__() | ||
|
||
def retrieve(self, id: Optional[int] = None, external_id: Optional[str] = None) -> Optional[Event]: | ||
"""Retrieve a single event by id. | ||
Args: | ||
id (int, optional): ID | ||
external_id (str, optional): External ID | ||
Returns: | ||
Optional[Event]: Requested event or None if it does not exist. | ||
Examples: | ||
Get event by id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.events.retrieve(id=1) | ||
Get event by external id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.events.retrieve(external_id="1") | ||
""" | ||
utils.assert_exactly_one_of_id_or_external_id(id, external_id) | ||
return self._retrieve_multiple(ids=id, external_ids=external_id, wrap_ids=True) | ||
|
||
def retrieve_multiple(self, ids: Optional[List[int]] = None, external_ids: Optional[List[str]] = None) -> EventList: | ||
"""Retrieve multiple events by id. | ||
Args: | ||
ids (List[int], optional): IDs | ||
external_ids (List[str], optional): External IDs | ||
Returns: | ||
EventList: The requested events. | ||
Examples: | ||
Get events by id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.events.retrieve_multiple(ids=[1, 2, 3]) | ||
Get events by external id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.events.retrieve_multiple(external_ids=["abc", "def"]) | ||
""" | ||
utils.assert_type(ids, "id", [List], allow_none=True) | ||
utils.assert_type(external_ids, "external_id", [List], allow_none=True) | ||
return self._retrieve_multiple(ids=ids, external_ids=external_ids, wrap_ids=True) | ||
|
||
def list( | ||
self, | ||
start_time: Dict[str, Any] = None, | ||
end_time: Dict[str, Any] = None, | ||
type: str = None, | ||
subtype: str = None, | ||
metadata: Dict[str, Any] = None, | ||
asset_ids: List[int] = None, | ||
source: str = None, | ||
created_time: Dict[str, Any] = None, | ||
last_updated_time: Dict[str, Any] = None, | ||
external_id_prefix: str = None, | ||
limit: int = 25, | ||
) -> EventList: | ||
"""List events | ||
Args: | ||
start_time (Dict[str, Any]): Range between two timestamps | ||
end_time (Dict[str, Any]): Range between two timestamps | ||
type (str): Type of the event, e.g 'failure'. | ||
subtype (str): Subtype of the event, e.g 'electrical'. | ||
metadata (Dict[str, Any]): Customizable extra data about the event. String key -> String value. | ||
asset_ids (List[int]): Asset IDs of related equipments that this event relates to. | ||
source (str): The source of this event. | ||
created_time (Dict[str, Any]): Range between two timestamps | ||
last_updated_time (Dict[str, Any]): Range between two timestamps | ||
external_id_prefix (str): External Id provided by client. Should be unique within the project | ||
limit (int, optional): Maximum number of events to return. Defaults to 25. Set to -1, float("inf") or None | ||
to return all items. | ||
Returns: | ||
EventList: List of requested events | ||
Examples: | ||
List events and filter on max start time:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> file_list = c.events.list(limit=5, start_time={"max": 1500000000}) | ||
Iterate over events:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> for event in c.events: | ||
... event # do something with the event | ||
Iterate over chunks of events to reduce memory load:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> for event_list in c.events(chunk_size=2500): | ||
... event_list # do something with the files | ||
""" | ||
filter = EventFilter( | ||
start_time=start_time, | ||
end_time=end_time, | ||
metadata=metadata, | ||
asset_ids=asset_ids, | ||
source=source, | ||
created_time=created_time, | ||
last_updated_time=last_updated_time, | ||
external_id_prefix=external_id_prefix, | ||
type=type, | ||
subtype=subtype, | ||
).dump(camel_case=True) | ||
return self._list(method="POST", limit=limit, filter=filter) | ||
|
||
def create(self, event: Union[Event, List[Event]]) -> Union[Event, EventList]: | ||
"""Create one or more events. | ||
Args: | ||
event (Union[Event, List[Event]]): Event or list of events to create. | ||
Returns: | ||
Union[Event, EventList]: Created event(s) | ||
Examples: | ||
Create new events:: | ||
>>> from cognite.client import CogniteClient | ||
>>> from cognite.client.data_classes import Event | ||
>>> c = CogniteClient() | ||
>>> events = [Event(start_time=0, end_time=1), Event(start_time=2, end_time=3)] | ||
>>> res = c.events.create(events) | ||
""" | ||
return self._create_multiple(items=event) | ||
|
||
def delete(self, id: Union[int, List[int]] = None, external_id: Union[str, List[str]] = None) -> None: | ||
"""Delete one or more events | ||
Args: | ||
id (Union[int, List[int]): Id or list of ids | ||
external_id (Union[str, List[str]]): External ID or list of external ids | ||
Returns: | ||
None | ||
Examples: | ||
Delete events by id or external id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.events.delete(id=[1,2,3], external_id="3") | ||
""" | ||
self._delete_multiple(ids=id, external_ids=external_id, wrap_ids=True) | ||
|
||
def update(self, item: Union[Event, EventUpdate, List[Union[Event, EventUpdate]]]) -> Union[Event, EventList]: | ||
"""Update one or more events | ||
Args: | ||
item (Union[Event, EventUpdate, List[Union[Event, EventUpdate]]]): Event(s) to update | ||
Returns: | ||
Union[Event, EventList]: Updated event(s) | ||
Examples: | ||
Update an event that you have fetched. This will perform a full update of the event:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> event = c.events.retrieve(id=1) | ||
>>> event.description = "New description" | ||
>>> res = c.events.update(event) | ||
Perform a partial update on a event, updating the description and adding a new field to metadata:: | ||
>>> from cognite.client import CogniteClient | ||
>>> from cognite.client.data_classes import EventUpdate | ||
>>> c = CogniteClient() | ||
>>> my_update = EventUpdate(id=1).description.set("New description").metadata.add({"key": "value"}) | ||
>>> res = c.events.update(my_update) | ||
""" | ||
return self._update_multiple(items=item) | ||
|
||
def search(self, description: str = None, filter: Union[EventFilter, Dict] = None, limit: int = None) -> EventList: | ||
"""Search for events | ||
Args: | ||
description (str): Fuzzy match on description. | ||
filter (Union[EventFilter, Dict]): Filter to apply. Performs exact match on these fields. | ||
limit (int): Maximum number of results to return. | ||
Returns: | ||
EventList: List of requested events | ||
Examples: | ||
Search for events:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.events.search(description="some description") | ||
""" | ||
return self._search(search={"description": description}, filter=filter, limit=limit) |
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,227 @@ | ||
import numbers | ||
from typing import * | ||
|
||
from cognite.client._api_client import APIClient | ||
from cognite.client.data_classes import ( | ||
APIKey, | ||
APIKeyList, | ||
Group, | ||
GroupList, | ||
SecurityCategory, | ||
SecurityCategoryList, | ||
ServiceAccount, | ||
ServiceAccountList, | ||
) | ||
from cognite.client.utils import _utils as utils | ||
|
||
|
||
class IAMAPI(APIClient): | ||
def __init__(self, *args, **kwargs): | ||
super().__init__(*args, **kwargs) | ||
self.service_accounts = ServiceAccountsAPI(*args, **kwargs) | ||
self.api_keys = APIKeysAPI(*args, **kwargs) | ||
self.groups = GroupsAPI(*args, **kwargs) | ||
self.security_categories = SecurityCategoriesAPI(*args, **kwargs) | ||
|
||
|
||
class ServiceAccountsAPI(APIClient): | ||
_RESOURCE_PATH = "/serviceaccounts" | ||
_LIST_CLASS = ServiceAccountList | ||
|
||
def list(self) -> ServiceAccountList: | ||
"""List service accounts. | ||
Returns: | ||
ServiceAccountList: List of service accounts. | ||
""" | ||
return ServiceAccountList._load(self._get(url_path=self._RESOURCE_PATH).json()["items"]) | ||
|
||
def create( | ||
self, service_account: Union[ServiceAccount, List[ServiceAccount]] | ||
) -> Union[ServiceAccount, ServiceAccountList]: | ||
"""Create one or more new service accounts. | ||
Args: | ||
service_account (Union[ServiceAccount, List[ServiceAccount]]): The service account(s) to create. | ||
Returns: | ||
Union[ServiceAccount, ServiceAccountList]: The created service account(s). | ||
""" | ||
return self._create_multiple(items=service_account) | ||
|
||
def delete(self, id: Union[int, List[int]]) -> None: | ||
"""Delete one or more service accounts. | ||
Args: | ||
id (Union[int, List[int]]): ID or list of IDs to delete. | ||
Returns: | ||
None | ||
""" | ||
self._delete_multiple(ids=id, wrap_ids=False) | ||
|
||
|
||
class APIKeysAPI(APIClient): | ||
_RESOURCE_PATH = "/apikeys" | ||
_LIST_CLASS = APIKeyList | ||
|
||
def list(self, include_deleted: bool = False, all: bool = False, service_account_id: bool = None) -> APIKeyList: | ||
"""List api keys. | ||
Args: | ||
include_deleted (bool): Whether or not to include deleted api keys. Defaults to False. | ||
all (bool): Whether or not to return all api keys for this project. Requires users:list acl. Defaults to False. | ||
service_account_id (int): Get api keys for this service account only. Only available to admin users. | ||
Returns: | ||
APIKeyList: List of api keys. | ||
""" | ||
res = self._get( | ||
self._RESOURCE_PATH, | ||
params={"all": all, "serviceAccountId": service_account_id, "includeDeleted": include_deleted}, | ||
) | ||
return APIKeyList._load(res.json()["items"]) | ||
|
||
def create(self, service_account_id: Union[int, List[int]]) -> Union[APIKey, APIKeyList]: | ||
"""Create a new api key for one or more service accounts. | ||
Args: | ||
service_account_id (Union[int, List[int]]): ID or list of IDs of service accounts to create an api key for. | ||
Returns: | ||
Union[APIKey, APIKeyList]: API key or list of api keys. | ||
""" | ||
utils.assert_type(service_account_id, "service_account_id", [numbers.Integral, list]) | ||
if isinstance(service_account_id, numbers.Integral): | ||
items = {"serviceAccountId": service_account_id} | ||
else: | ||
items = [{"serviceAccountId": sa_id} for sa_id in service_account_id] | ||
return self._create_multiple(items=items) | ||
|
||
def delete(self, id: Union[int, List[int]]) -> None: | ||
"""Delete one or more api keys. | ||
Args: | ||
id (Union[int, List[int]]): ID or list of IDs of api keys to delete. | ||
Returns: | ||
None | ||
""" | ||
self._delete_multiple(ids=id, wrap_ids=False) | ||
|
||
|
||
class GroupsAPI(APIClient): | ||
_RESOURCE_PATH = "/groups" | ||
_LIST_CLASS = GroupList | ||
|
||
def list(self, all: bool = False) -> GroupList: | ||
"""List groups. | ||
Args: | ||
all (bool): Whether to get all groups, only available with the groups:list acl. | ||
Returns: | ||
GroupList: List of groups. | ||
""" | ||
res = self._get(self._RESOURCE_PATH, params={"all": all}) | ||
return GroupList._load(res.json()["items"]) | ||
|
||
def create(self, group: Union[Group, List[Group]]) -> Union[Group, GroupList]: | ||
"""Create one or more groups. | ||
Args: | ||
group (Union[Group, List[Group]]): Group or list of groups to create. | ||
Returns: | ||
Union[Group, GroupList]: The created group(s). | ||
""" | ||
return self._create_multiple(group) | ||
|
||
def delete(self, id: Union[int, List[int]]) -> None: | ||
"""Delete one or more groups. | ||
Args: | ||
id (Union[int, List[int]]): ID or list of IDs of groups to delete. | ||
Returns: | ||
None | ||
""" | ||
self._delete_multiple(ids=id, wrap_ids=False) | ||
|
||
def list_service_accounts(self, id: int) -> ServiceAccountList: | ||
"""List service accounts in a group. | ||
Args: | ||
id (int): List service accounts which are a member of this group. | ||
Returns: | ||
ServiceAccountList: List of service accounts. | ||
""" | ||
resource_path = self._RESOURCE_PATH + "/{}/serviceaccounts".format(id) | ||
return ServiceAccountList._load(self._get(resource_path).json()["items"]) | ||
|
||
def add_service_account(self, id: int, service_account_id: Union[int, List[int]]) -> None: | ||
"""Add one or more service accounts to a group. | ||
Args: | ||
id (int): Add service accounts to the group with this id. | ||
service_account_id (Union[int, List[int]]): Add these service accounts to the specified group. | ||
Returns: | ||
None | ||
""" | ||
resource_path = self._RESOURCE_PATH + "/{}/serviceaccounts".format(id) | ||
self._create_multiple(cls=ServiceAccountList, resource_path=resource_path, items=service_account_id) | ||
|
||
def remove_service_account(self, id: int, service_account_id: Union[int, List[int]]) -> None: | ||
"""Remove one or more service accounts from a group. | ||
Args: | ||
id (int): Remove service accounts from the group with this id. | ||
service_account_id: Remove these service accounts from the specified group. | ||
Returns: | ||
None | ||
""" | ||
url_path = self._RESOURCE_PATH + "/{}/serviceaccounts/remove".format(id) | ||
all_ids = self._process_ids(service_account_id, None, False) | ||
self._post(url_path, json={"items": all_ids}) | ||
|
||
|
||
class SecurityCategoriesAPI(APIClient): | ||
_RESOURCE_PATH = "/securitycategories" | ||
_LIST_CLASS = SecurityCategoryList | ||
|
||
def list(self, limit: int = 25) -> SecurityCategoryList: | ||
"""List security categories. | ||
Args: | ||
limit (int): Max number of security categories to return. Defaults to 25. | ||
Returns: | ||
SecurityCategoryList: List of security categories | ||
""" | ||
return self._list(method="GET", limit=limit) | ||
|
||
def create( | ||
self, security_category: Union[SecurityCategory, List[SecurityCategory]] | ||
) -> Union[SecurityCategory, SecurityCategoryList]: | ||
"""Create one or more security categories. | ||
Args: | ||
group (Union[SecurityCategory, List[SecurityCategory]]): Security category or list of categories to create. | ||
Returns: | ||
Union[SecurityCategory, SecurityCategoryList]: The created security category or categories. | ||
""" | ||
return self._create_multiple(security_category) | ||
|
||
def delete(self, id: Union[int, List[int]]) -> None: | ||
"""Delete one or more security categories. | ||
Args: | ||
id (Union[int, List[int]]): ID or list of IDs of security categories to delete. | ||
Returns: | ||
None | ||
""" | ||
self._delete_multiple(ids=id, wrap_ids=False) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
from cognite.client._api_client import APIClient | ||
from cognite.client.data_classes.login import LoginStatus | ||
|
||
|
||
class LoginAPI(APIClient): | ||
_RESOURCE_PATH = "/login" | ||
|
||
def status(self) -> LoginStatus: | ||
"""Check login status | ||
Returns: | ||
LoginStatus: The login status of the current api key. | ||
Examples: | ||
Check the current login status and get the project:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> login_status = c.login.status() | ||
>>> project = login_status.project | ||
""" | ||
return LoginStatus._load(self._get(self._RESOURCE_PATH + "/status").json()) |
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,241 @@ | ||
import numbers | ||
from typing import * | ||
|
||
from cognite.client._api_client import APIClient | ||
from cognite.client.data_classes import TimeSeries, TimeSeriesFilter, TimeSeriesList, TimeSeriesUpdate | ||
from cognite.client.utils import _utils as utils | ||
|
||
|
||
class TimeSeriesAPI(APIClient): | ||
_RESOURCE_PATH = "/timeseries" | ||
_LIST_CLASS = TimeSeriesList | ||
|
||
def __call__( | ||
self, chunk_size: int = None, include_metadata: bool = False, asset_ids: List[int] = None | ||
) -> Generator[Union[TimeSeries, TimeSeriesList], None, None]: | ||
"""Iterate over time series | ||
Fetches time series as they are iterated over, so you keep a limited number of objects in memory. | ||
Args: | ||
chunk_size (int, optional): Number of time series to return in each chunk. Defaults to yielding one event a time. | ||
include_metadata (bool, optional): Whether or not to include metadata | ||
asset_id (int, optional): List time series related to this asset. | ||
Yields: | ||
Union[TimeSeries, TimeSeriesList]: yields TimeSeries one by one if chunk is not specified, else TimeSeriesList objects. | ||
""" | ||
filter = {"includeMetadata": include_metadata, "assetIds": str(asset_ids) if asset_ids else None} | ||
return self._list_generator(method="GET", chunk_size=chunk_size, filter=filter) | ||
|
||
def __iter__(self) -> Generator[TimeSeries, None, None]: | ||
"""Iterate over time series | ||
Fetches time series as they are iterated over, so you keep a limited number of metadata objects in memory. | ||
Yields: | ||
TimeSeries: yields TimeSeries one by one. | ||
""" | ||
return self.__call__() | ||
|
||
def retrieve(self, id: Optional[int] = None, external_id: Optional[str] = None) -> Optional[TimeSeries]: | ||
"""Retrieve a single time series by id. | ||
Args: | ||
id (int, optional): ID | ||
external_id (str, optional): External ID | ||
Returns: | ||
Optional[TimeSeries]: Requested time series or None if it does not exist. | ||
Examples: | ||
Get time series by id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.time_series.retrieve(id=1) | ||
Get time series by external id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.time_series.retrieve(external_id="1") | ||
""" | ||
utils.assert_exactly_one_of_id_or_external_id(id, external_id) | ||
return self._retrieve_multiple(ids=id, external_ids=external_id, wrap_ids=True) | ||
|
||
def retrieve_multiple( | ||
self, ids: Optional[List[int]] = None, external_ids: Optional[List[str]] = None | ||
) -> TimeSeriesList: | ||
"""Retrieve multiple time series by id. | ||
Args: | ||
ids (List[int], optional): IDs | ||
external_ids (List[str], optional): External IDs | ||
Returns: | ||
TimeSeriesList: The requested time series. | ||
Examples: | ||
Get time series by id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.time_series.retrieve_multiple(ids=[1, 2, 3]) | ||
Get time series by external id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.time_series.retrieve_multiple(external_ids=["abc", "def"]) | ||
""" | ||
utils.assert_type(ids, "id", [List], allow_none=True) | ||
utils.assert_type(external_ids, "external_id", [List], allow_none=True) | ||
return self._retrieve_multiple(ids=ids, external_ids=external_ids, wrap_ids=True) | ||
|
||
def list( | ||
self, include_metadata: bool = False, asset_ids: Optional[List[int]] = None, limit: int = 25 | ||
) -> TimeSeriesList: | ||
"""Iterate over time series | ||
Fetches time series as they are iterated over, so you keep a limited number of objects in memory. | ||
Args: | ||
include_metadata (bool, optional): Whether or not to include metadata | ||
asset_ids (List[int], optional): List time series related to these assets. | ||
limit (int, optional): Max number of time series to return. Defaults to 25. Set to -1, float("inf") or None | ||
to return all items. | ||
Returns: | ||
TimeSeriesList: The requested time series. | ||
Examples: | ||
List time series:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.time_series.list(limit=5) | ||
Iterate over time series:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> for ts in c.time_series: | ||
... ts # do something with the time_series | ||
Iterate over chunks of time series to reduce memory load:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> for ts_list in c.time_series(chunk_size=2500): | ||
... ts_list # do something with the time_series | ||
""" | ||
filter = {"includeMetadata": include_metadata, "assetIds": str(asset_ids) if asset_ids else None} | ||
return self._list(method="GET", filter=filter, limit=limit) | ||
|
||
def create(self, time_series: Union[TimeSeries, List[TimeSeries]]) -> Union[TimeSeries, TimeSeriesList]: | ||
"""Create one or more time series. | ||
Args: | ||
time_series (Union[TimeSeries, List[TimeSeries]]): TimeSeries or list of TimeSeries to create. | ||
Returns: | ||
Union[TimeSeries, TimeSeriesList]: The created time series. | ||
Examples: | ||
Create a new time series:: | ||
>>> from cognite.client import CogniteClient | ||
>>> from cognite.client.data_classes import TimeSeries | ||
>>> c = CogniteClient() | ||
>>> ts = c.time_series.create(TimeSeries(name="my ts")) | ||
""" | ||
return self._create_multiple(items=time_series) | ||
|
||
def delete(self, id: Union[int, List[int]] = None, external_id: Union[str, List[str]] = None) -> None: | ||
"""Delete one or more time series. | ||
Args: | ||
id (Union[int, List[int]): Id or list of ids | ||
external_id (Union[str, List[str]]): External ID or list of external ids | ||
Returns: | ||
None | ||
Examples: | ||
Delete time series by id or external id:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.time_series.delete(id=[1,2,3], external_id="3") | ||
""" | ||
self._delete_multiple(wrap_ids=True, ids=id, external_ids=external_id) | ||
|
||
def update( | ||
self, item: Union[TimeSeries, TimeSeriesUpdate, List[Union[TimeSeries, TimeSeriesUpdate]]] | ||
) -> Union[TimeSeries, TimeSeriesList]: | ||
"""Update one or more time series. | ||
Args: | ||
item (Union[TimeSeries, TimeSeriesUpdate, List[Union[TimeSeries, TimeSeriesUpdate]]]): Time series to update | ||
Returns: | ||
Union[TimeSeries, TimeSeriesList]: Updated time series. | ||
Examples: | ||
Update a time series that you have fetched. This will perform a full update of the time series:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.time_series.retrieve(id=1) | ||
>>> res.description = "New description" | ||
>>> res = c.time_series.update(res) | ||
Perform a partial update on a time series, updating the description and adding a new field to metadata:: | ||
>>> from cognite.client import CogniteClient | ||
>>> from cognite.client.data_classes import TimeSeriesUpdate | ||
>>> c = CogniteClient() | ||
>>> my_update = TimeSeriesUpdate(id=1).description.set("New description").metadata.add({"key": "value"}) | ||
>>> res = c.time_series.update(my_update) | ||
""" | ||
return self._update_multiple(items=item) | ||
|
||
def search( | ||
self, | ||
name: str = None, | ||
description: str = None, | ||
query: str = None, | ||
filter: Union[TimeSeriesFilter, Dict] = None, | ||
limit: int = None, | ||
) -> TimeSeriesList: | ||
"""Search for time series. | ||
Args: | ||
name (str, optional): Prefix and fuzzy search on name. | ||
description (str, optional): Prefix and fuzzy search on description. | ||
query (str, optional): Search on name and description using wildcard search on each of the words (separated | ||
by spaces). Retrieves results where at least one word must match. Example: 'some other' | ||
filter (Union[TimeSeriesFilter, Dict], optional): Filter to apply. Performs exact match on these fields. | ||
limit (int, optional): Max number of results to return. | ||
Returns: | ||
TimeSeriesList: List of requested time series. | ||
Examples: | ||
Search for a time series:: | ||
>>> from cognite.client import CogniteClient | ||
>>> c = CogniteClient() | ||
>>> res = c.time_series.search(name="some name") | ||
""" | ||
return self._search( | ||
search={"name": name, "description": description, "query": query}, filter=filter, limit=limit | ||
) |
Large diffs are not rendered by default.
Oops, something went wrong.
505 changes: 0 additions & 505 deletions
505
cognite/client/_auxiliary/_protobuf_descriptors/_api_timeseries_data_v1_pb2.py
This file was deleted.
Oops, something went wrong.
505 changes: 0 additions & 505 deletions
505
cognite/client/_auxiliary/_protobuf_descriptors/_api_timeseries_data_v2_pb2.py
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,251 @@ | ||
import logging | ||
import os | ||
import sys | ||
import warnings | ||
from typing import Any, Dict | ||
|
||
from cognite.client._api.assets import AssetsAPI | ||
from cognite.client._api.datapoints import DatapointsAPI | ||
from cognite.client._api.events import EventsAPI | ||
from cognite.client._api.files import FilesAPI | ||
from cognite.client._api.iam import IAMAPI | ||
from cognite.client._api.login import LoginAPI | ||
from cognite.client._api.raw import RawAPI | ||
from cognite.client._api.three_d import ThreeDAPI | ||
from cognite.client._api.time_series import TimeSeriesAPI | ||
from cognite.client._api_client import APIClient | ||
from cognite.client.exceptions import CogniteAPIKeyError | ||
from cognite.client.utils._utils import DebugLogFormatter, get_current_sdk_version | ||
from cognite.client.utils._version_checker import get_newest_version_in_major_release | ||
|
||
DEFAULT_BASE_URL = "https://api.cognitedata.com" | ||
DEFAULT_MAX_WORKERS = 10 | ||
DEFAULT_TIMEOUT = 30 | ||
|
||
|
||
class CogniteClient: | ||
"""Main entrypoint into Cognite Python SDK. | ||
All services are made available through this object. See examples below. | ||
Args: | ||
api_key (str): API key | ||
project (str): Project. Defaults to project of given API key. | ||
client_name (str): A user-defined name for the client. Used to identify number of unique applications/scripts | ||
running on top of CDF. | ||
base_url (str): Base url to send requests to. Defaults to "https://api.cognitedata.com" | ||
max_workers (int): Max number of workers to spawn when parallelizing data fetching. Defaults to 10. | ||
headers (Dict): Additional headers to add to all requests. | ||
timeout (int): Timeout on requests sent to the api. Defaults to 30 seconds. | ||
debug (bool): Configures logger to log extra request details to stderr. | ||
""" | ||
|
||
def __init__( | ||
self, | ||
api_key: str = None, | ||
project: str = None, | ||
client_name: str = None, | ||
base_url: str = None, | ||
max_workers: int = None, | ||
headers: Dict[str, str] = None, | ||
timeout: int = None, | ||
debug: bool = None, | ||
): | ||
thread_local_api_key, thread_local_project = self._get_thread_local_credentials() | ||
environment_api_key = os.getenv("COGNITE_API_KEY") | ||
environment_base_url = os.getenv("COGNITE_BASE_URL") | ||
environment_max_workers = os.getenv("COGNITE_MAX_WORKERS") | ||
environment_timeout = os.getenv("COGNITE_TIMEOUT") | ||
environment_client_name = os.getenv("COGNITE_CLIENT_NAME") | ||
|
||
self.__api_key = api_key or thread_local_api_key or environment_api_key | ||
if self.__api_key is None: | ||
raise ValueError("No API Key has been specified") | ||
|
||
self._base_url = base_url or environment_base_url or DEFAULT_BASE_URL | ||
|
||
self._max_workers = int(max_workers or environment_max_workers or DEFAULT_MAX_WORKERS) | ||
|
||
self._headers = headers or {} | ||
|
||
self._client_name = client_name if client_name is not None else environment_client_name | ||
if self._client_name is None: | ||
raise ValueError( | ||
"No client name has been specified. Pass it to the CogniteClient or set the environment variable 'COGNITE_CLIENT_NAME'." | ||
) | ||
self._headers["x-cdp-app"] = client_name | ||
|
||
self._timeout = int(timeout or environment_timeout or DEFAULT_TIMEOUT) | ||
|
||
if debug: | ||
self._configure_logger_for_debug_mode() | ||
|
||
__api_version = "v1" | ||
|
||
self.project = project or thread_local_project | ||
self.login = LoginAPI( | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
|
||
if self.project is None: | ||
login_status = self.login.status() | ||
if login_status.logged_in: | ||
self.project = login_status.project | ||
warnings.warn( | ||
"Authenticated towards inferred project '{}'. Pass project to the CogniteClient constructor" | ||
" to suppress this warning.".format(self.project), | ||
stacklevel=2, | ||
) | ||
else: | ||
raise CogniteAPIKeyError | ||
self._check_client_has_newest_major_version() | ||
|
||
self.assets = AssetsAPI( | ||
version=__api_version, | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
self.datapoints = DatapointsAPI( | ||
version=__api_version, | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
self.events = EventsAPI( | ||
version=__api_version, | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
self.files = FilesAPI( | ||
version=__api_version, | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
self.iam = IAMAPI( | ||
version=__api_version, | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
self.time_series = TimeSeriesAPI( | ||
version=__api_version, | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
self.raw = RawAPI( | ||
version=__api_version, | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
self.three_d = ThreeDAPI( | ||
version=__api_version, | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
self._api_client = APIClient( | ||
project=self.project, | ||
api_key=self.__api_key, | ||
base_url=self._base_url, | ||
max_workers=self._max_workers, | ||
headers=self._headers, | ||
timeout=self._timeout, | ||
cognite_client=self, | ||
) | ||
|
||
def get(self, url: str, params: Dict[str, Any] = None, headers: Dict[str, Any] = None): | ||
"""Perform a GET request to an arbitrary path in the API.""" | ||
return self._api_client._get(url, params=params, headers=headers) | ||
|
||
def post(self, url: str, json: Dict[str, Any], params: Dict[str, Any] = None, headers: Dict[str, Any] = None): | ||
"""Perform a POST request to an arbitrary path in the API.""" | ||
return self._api_client._post(url, json=json, params=params, headers=headers) | ||
|
||
def put(self, url: str, json: Dict[str, Any] = None, headers: Dict[str, Any] = None): | ||
"""Perform a PUT request to an arbitrary path in the API.""" | ||
return self._api_client._put(url, json=json, headers=headers) | ||
|
||
def delete(self, url: str, params: Dict[str, Any] = None, headers: Dict[str, Any] = None): | ||
"""Perform a DELETE request to an arbitrary path in the API.""" | ||
return self._api_client._delete(url, params=params, headers=headers) | ||
|
||
@property | ||
def version(self) -> str: | ||
"""Returns the current SDK version. | ||
Returns: | ||
str: The current SDK version | ||
""" | ||
return get_current_sdk_version() | ||
|
||
@staticmethod | ||
def _get_thread_local_credentials(): | ||
if "cognite._thread_local" in sys.modules: | ||
from cognite._thread_local import credentials | ||
|
||
thread_local_api_key = getattr(credentials, "api_key", None) | ||
thread_local_project = getattr(credentials, "project", None) | ||
return thread_local_api_key, thread_local_project | ||
return None, None | ||
|
||
def _configure_logger_for_debug_mode(self): | ||
logger = logging.getLogger("cognite-sdk") | ||
logger.setLevel("DEBUG") | ||
log_handler = logging.StreamHandler() | ||
formatter = DebugLogFormatter() | ||
log_handler.setFormatter(formatter) | ||
logger.handlers = [] | ||
logger.propagate = False | ||
logger.addHandler(log_handler) | ||
|
||
def _check_client_has_newest_major_version(self): | ||
newest_version = get_newest_version_in_major_release("cognite-sdk", self.version) | ||
if newest_version != self.version: | ||
warnings.warn( | ||
"You are using version {} of the SDK, however version {} is available. " | ||
"Upgrade to suppress this warning.".format(self.version, newest_version), | ||
stacklevel=3, | ||
) |
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
from typing import * | ||
|
||
from cognite.client.data_classes.assets import Asset, AssetFilter, AssetList, AssetUpdate | ||
from cognite.client.data_classes.datapoints import Datapoint, Datapoints, DatapointsList, DatapointsQuery | ||
from cognite.client.data_classes.events import Event, EventFilter, EventList, EventUpdate | ||
from cognite.client.data_classes.files import FileMetadata, FileMetadataFilter, FileMetadataList, FileMetadataUpdate | ||
from cognite.client.data_classes.iam import ( | ||
APIKey, | ||
APIKeyList, | ||
Group, | ||
GroupList, | ||
SecurityCategory, | ||
SecurityCategoryList, | ||
ServiceAccount, | ||
ServiceAccountList, | ||
) | ||
from cognite.client.data_classes.raw import Database, DatabaseList, Row, RowList, Table, TableList | ||
from cognite.client.data_classes.three_d import ( | ||
ThreeDAssetMapping, | ||
ThreeDAssetMappingList, | ||
ThreeDModel, | ||
ThreeDModelList, | ||
ThreeDModelRevision, | ||
ThreeDModelRevisionList, | ||
ThreeDModelRevisionUpdate, | ||
ThreeDModelUpdate, | ||
ThreeDNode, | ||
ThreeDNodeList, | ||
ThreeDRevealNode, | ||
ThreeDRevealNodeList, | ||
ThreeDRevealRevision, | ||
ThreeDRevealSector, | ||
ThreeDRevealSectorList, | ||
) | ||
from cognite.client.data_classes.time_series import TimeSeries, TimeSeriesFilter, TimeSeriesList, TimeSeriesUpdate |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,361 @@ | ||
import functools | ||
import json | ||
from collections import UserList | ||
from typing import * | ||
|
||
from cognite.client.exceptions import CogniteMissingClientError | ||
from cognite.client.utils import _utils as utils | ||
from cognite.client.utils._utils import to_camel_case, to_snake_case | ||
|
||
EXCLUDE_VALUE = [None] | ||
|
||
|
||
class CogniteResponse: | ||
def __str__(self): | ||
item = utils.convert_time_attributes_to_datetime(self.dump()) | ||
return json.dumps(item, indent=4) | ||
|
||
def __repr__(self): | ||
return self.__str__() | ||
|
||
def __eq__(self, other): | ||
return type(other) == type(self) and other.dump() == self.dump() | ||
|
||
def __getattribute__(self, item): | ||
attr = super().__getattribute__(item) | ||
if item == "_cognite_client": | ||
if attr is None: | ||
raise CogniteMissingClientError | ||
return attr | ||
|
||
def dump(self, camel_case: bool = False) -> Dict[str, Any]: | ||
"""Dump the instance into a json serializable python data type. | ||
Args: | ||
camel_case (bool): Use camelCase for attribute names. Defaults to False. | ||
Returns: | ||
Dict[str, Any]: A dictionary representation of the instance. | ||
""" | ||
dumped = { | ||
key: value for key, value in self.__dict__.items() if value not in EXCLUDE_VALUE and not key.startswith("_") | ||
} | ||
if camel_case: | ||
dumped = {to_camel_case(key): value for key, value in dumped.items()} | ||
return dumped | ||
|
||
@classmethod | ||
def _load(cls, api_response): | ||
raise NotImplementedError | ||
|
||
def to_pandas(self): | ||
raise NotImplementedError | ||
|
||
|
||
class CogniteResource: | ||
def __new__(cls, *args, **kwargs): | ||
obj = super().__new__(cls) | ||
obj._cognite_client = None | ||
if "cognite_client" in kwargs: | ||
obj._cognite_client = kwargs["cognite_client"] | ||
return obj | ||
|
||
def __eq__(self, other): | ||
return type(self) == type(other) and self.dump() == other.dump() | ||
|
||
def __str__(self): | ||
item = utils.convert_time_attributes_to_datetime(self.dump()) | ||
return json.dumps(item, default=lambda x: x.__dict__, indent=4) | ||
|
||
def __repr__(self): | ||
return self.__str__() | ||
|
||
def __getattribute__(self, item): | ||
attr = super().__getattribute__(item) | ||
if item == "_cognite_client": | ||
if attr is None: | ||
raise CogniteMissingClientError | ||
return attr | ||
|
||
def dump(self, camel_case: bool = False) -> Dict[str, Any]: | ||
"""Dump the instance into a json serializable Python data type. | ||
Args: | ||
camel_case (bool): Use camelCase for attribute names. Defaults to False. | ||
Returns: | ||
Dict[str, Any]: A dictionary representation of the instance. | ||
""" | ||
if camel_case: | ||
return { | ||
to_camel_case(key): value | ||
for key, value in self.__dict__.items() | ||
if value not in EXCLUDE_VALUE and not key.startswith("_") | ||
} | ||
return { | ||
key: value for key, value in self.__dict__.items() if value not in EXCLUDE_VALUE and not key.startswith("_") | ||
} | ||
|
||
@classmethod | ||
def _load(cls, resource: Union[Dict, str], cognite_client=None): | ||
if isinstance(resource, str): | ||
return cls._load(json.loads(resource), cognite_client=cognite_client) | ||
elif isinstance(resource, Dict): | ||
instance = cls(cognite_client=cognite_client) | ||
for key, value in resource.items(): | ||
snake_case_key = to_snake_case(key) | ||
if not hasattr(instance, snake_case_key): | ||
raise AttributeError("Attribute '{}' does not exist on '{}'".format(snake_case_key, cls.__name__)) | ||
setattr(instance, snake_case_key, value) | ||
return instance | ||
raise TypeError("Resource must be json str or Dict, not {}".format(type(resource))) | ||
|
||
def to_pandas(self, expand: List[str] = None, ignore: List[str] = None): | ||
"""Convert the instance into a pandas DataFrame. | ||
Returns: | ||
pandas.DataFrame: The dataframe. | ||
""" | ||
expand = ["metadata"] if expand is None else expand | ||
ignore = [] if ignore is None else ignore | ||
pd = utils.local_import("pandas") | ||
dumped = self.dump(camel_case=True) | ||
|
||
for element in ignore: | ||
del dumped[element] | ||
for key in expand: | ||
if key in dumped and isinstance(dumped[key], dict): | ||
dumped.update(dumped.pop(key)) | ||
else: | ||
raise AssertionError("Could not expand attribute '{}'".format(key)) | ||
|
||
df = pd.DataFrame(columns=["value"]) | ||
for name, value in dumped.items(): | ||
df.loc[name] = [value] | ||
return df | ||
|
||
|
||
class CogniteResourceList(UserList): | ||
_RESOURCE = None | ||
_UPDATE = None | ||
_ASSERT_CLASSES = True | ||
|
||
def __init__(self, resources: List[Any], cognite_client=None): | ||
if self._ASSERT_CLASSES: | ||
assert self._RESOURCE is not None, "{} does not have _RESOURCE set".format(self.__class__.__name__) | ||
assert self._UPDATE is not None, "{} does not have _UPDATE set".format(self.__class__.__name__) | ||
for resource in resources: | ||
if not isinstance(resource, self._RESOURCE): | ||
raise TypeError( | ||
"All resources for class '{}' must be of type '{}', not '{}'.".format( | ||
self.__class__.__name__, self._RESOURCE.__name__, type(resource) | ||
) | ||
) | ||
self._cognite_client = cognite_client | ||
super().__init__(resources) | ||
if self.data: | ||
if hasattr(self.data[0], "external_id"): | ||
self._external_id_to_item = { | ||
item.external_id: item for item in self.data if item.external_id is not None | ||
} | ||
if hasattr(self.data[0], "id"): | ||
self._id_to_item = {item.id: item for item in self.data if item.id is not None} | ||
|
||
def __getattribute__(self, item): | ||
attr = super().__getattribute__(item) | ||
if item == "_cognite_client" and attr is None: | ||
raise CogniteMissingClientError | ||
return attr | ||
|
||
def __getitem__(self, item): | ||
value = super().__getitem__(item) | ||
if isinstance(item, slice): | ||
c = None | ||
if super().__getattribute__("_cognite_client") is not None: | ||
c = self._cognite_client | ||
return self.__class__(value, cognite_client=c) | ||
return value | ||
|
||
def __str__(self): | ||
item = utils.convert_time_attributes_to_datetime(self.dump()) | ||
return json.dumps(item, default=lambda x: x.__dict__, indent=4) | ||
|
||
def __repr__(self): | ||
return self.__str__() | ||
|
||
def dump(self, camel_case: bool = False) -> List[Dict[str, Any]]: | ||
"""Dump the instance into a json serializable Python data type. | ||
Args: | ||
camel_case (bool): Use camelCase for attribute names. Defaults to False. | ||
Returns: | ||
List[Dict[str, Any]]: A list of dicts representing the instance. | ||
""" | ||
return [resource.dump(camel_case) for resource in self.data] | ||
|
||
def get(self, id: int = None, external_id: str = None) -> Optional[CogniteResource]: | ||
"""Get an item from this list by id or exernal_id. | ||
Args: | ||
id (int): The id of the item to get. | ||
external_id (str): The external_id of the item to get. | ||
Returns: | ||
Optional[CogniteResource]: The requested item | ||
""" | ||
utils.assert_exactly_one_of_id_or_external_id(id, external_id) | ||
if id: | ||
return self._id_to_item.get(id) | ||
return self._external_id_to_item.get(external_id) | ||
|
||
def to_pandas(self) -> "pandas.DataFrame": | ||
"""Convert the instance into a pandas DataFrame. | ||
Returns: | ||
pandas.DataFrame: The dataframe. | ||
""" | ||
pd = utils.local_import("pandas") | ||
df = pd.DataFrame(self.dump(camel_case=True)) | ||
nullable_int_fields = ["endTime", "assetId"] | ||
for field in nullable_int_fields: | ||
if field in df: | ||
df[field] = df[field].astype(pd.Int64Dtype()) | ||
return df | ||
|
||
@classmethod | ||
def _load(cls, resource_list: Union[List, str], cognite_client=None): | ||
if isinstance(resource_list, str): | ||
return cls._load(json.loads(resource_list), cognite_client=cognite_client) | ||
elif isinstance(resource_list, List): | ||
resources = [cls._RESOURCE._load(resource, cognite_client=cognite_client) for resource in resource_list] | ||
return cls(resources, cognite_client=cognite_client) | ||
|
||
|
||
class CogniteUpdate: | ||
def __init__(self, id: int = None, external_id: str = None): | ||
self._id = id | ||
self._external_id = external_id | ||
self._update_object = {} | ||
|
||
def __eq__(self, other): | ||
return type(self) == type(other) and self.dump() == other.dump() | ||
|
||
def __str__(self): | ||
return json.dumps(self.dump(), indent=4) | ||
|
||
def __repr__(self): | ||
return self.__str__() | ||
|
||
def _set(self, name, value): | ||
self._update_object[name] = {"set": value} | ||
|
||
def _set_null(self, name): | ||
self._update_object[name] = {"setNull": True} | ||
|
||
def _add(self, name, value): | ||
self._update_object[name] = {"add": value} | ||
|
||
def _remove(self, name, value): | ||
self._update_object[name] = {"remove": value} | ||
|
||
def dump(self): | ||
"""Dump the instance into a json serializable Python data type. | ||
Returns: | ||
Dict[str, Any]: A dictionary representation of the instance. | ||
""" | ||
dumped = {"update": self._update_object} | ||
if self._id is not None: | ||
dumped["id"] = self._id | ||
elif self._external_id is not None: | ||
dumped["externalId"] = self._external_id | ||
return dumped | ||
|
||
@classmethod | ||
def _get_update_properties(cls): | ||
return [key for key in cls.__dict__.keys() if not key.startswith("_")] | ||
|
||
|
||
class CognitePrimitiveUpdate: | ||
def __init__(self, update_object, name: str): | ||
self._update_object = update_object | ||
self._name = name | ||
|
||
def _set(self, value: Union[None, str, int, bool]): | ||
if value is None: | ||
self._update_object._set_null(self._name) | ||
else: | ||
self._update_object._set(self._name, value) | ||
return self._update_object | ||
|
||
|
||
class CogniteObjectUpdate: | ||
def __init__(self, update_object, name: str): | ||
self._update_object = update_object | ||
self._name = name | ||
|
||
def _set(self, value: Dict): | ||
self._update_object._set(self._name, value) | ||
return self._update_object | ||
|
||
def _add(self, value: Dict): | ||
self._update_object._add(self._name, value) | ||
return self._update_object | ||
|
||
def _remove(self, value: List): | ||
self._update_object._remove(self._name, value) | ||
return self._update_object | ||
|
||
|
||
class CogniteListUpdate: | ||
def __init__(self, update_object, name: str): | ||
self._update_object = update_object | ||
self._name = name | ||
|
||
def _set(self, value: List): | ||
self._update_object._set(self._name, value) | ||
return self._update_object | ||
|
||
def _add(self, value: List): | ||
self._update_object._add(self._name, value) | ||
return self._update_object | ||
|
||
def _remove(self, value: List): | ||
self._update_object._remove(self._name, value) | ||
return self._update_object | ||
|
||
|
||
class CogniteFilter: | ||
def __eq__(self, other): | ||
return type(self) == type(other) and self.dump() == other.dump() | ||
|
||
def __str__(self): | ||
item = utils.convert_time_attributes_to_datetime(self.dump()) | ||
return json.dumps(item, default=lambda x: x.__dict__, indent=4) | ||
|
||
def __repr__(self): | ||
return self.__str__() | ||
|
||
def __getattribute__(self, item): | ||
attr = super().__getattribute__(item) | ||
if item == "_cognite_client": | ||
if attr is None: | ||
raise CogniteMissingClientError | ||
return attr | ||
|
||
def dump(self, camel_case: bool = False): | ||
"""Dump the instance into a json serializable Python data type. | ||
Returns: | ||
Dict[str, Any]: A dictionary representation of the instance. | ||
""" | ||
if camel_case: | ||
return { | ||
to_camel_case(key): value | ||
for key, value in self.__dict__.items() | ||
if value not in EXCLUDE_VALUE and not key.startswith("_") | ||
} | ||
return { | ||
key: value for key, value in self.__dict__.items() if value not in EXCLUDE_VALUE and not key.startswith("_") | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,301 @@ | ||
from cognite.client.data_classes._base import * | ||
|
||
|
||
# GenClass: Asset, DataExternalAssetItem | ||
class Asset(CogniteResource): | ||
"""Representation of a physical asset, e.g plant or piece of equipment | ||
Args: | ||
external_id (str): External Id provided by client. Should be unique within the project. | ||
name (str): Name of asset. Often referred to as tag. | ||
parent_id (int): Javascript friendly internal ID given to the object. | ||
description (str): Description of asset. | ||
metadata (Dict[str, Any]): Custom, application specific metadata. String key -> String value | ||
source (str): The source of this asset | ||
id (int): Javascript friendly internal ID given to the object. | ||
created_time (int): It is the number of seconds that have elapsed since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. | ||
last_updated_time (int): It is the number of seconds that have elapsed since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. | ||
path (List[int]): IDs of assets on the path to the asset. | ||
depth (int): Asset path depth (number of levels below root node). | ||
parent_external_id (str): External Id provided by client. Should be unique within the project. | ||
cognite_client (CogniteClient): The client to associate with this object. | ||
""" | ||
|
||
def __init__( | ||
self, | ||
external_id: str = None, | ||
name: str = None, | ||
parent_id: int = None, | ||
description: str = None, | ||
metadata: Dict[str, Any] = None, | ||
source: str = None, | ||
id: int = None, | ||
created_time: int = None, | ||
last_updated_time: int = None, | ||
path: List[int] = None, | ||
depth: int = None, | ||
parent_external_id: str = None, | ||
cognite_client=None, | ||
): | ||
self.external_id = external_id | ||
self.name = name | ||
self.parent_id = parent_id | ||
self.description = description | ||
self.metadata = metadata | ||
self.source = source | ||
self.id = id | ||
self.created_time = created_time | ||
self.last_updated_time = last_updated_time | ||
self.path = path | ||
self.depth = depth | ||
self.parent_external_id = parent_external_id | ||
self._cognite_client = cognite_client | ||
|
||
# GenStop | ||
|
||
def __hash__(self): | ||
return hash(self.external_id) | ||
|
||
def parent(self) -> "Asset": | ||
"""Returns this assets parent. | ||
Returns: | ||
Asset: The parent asset. | ||
""" | ||
if self.parent_id is None: | ||
raise ValueError("parent_id is None") | ||
return self._cognite_client.assets.retrieve(id=self.parent_id) | ||
|
||
def children(self) -> "AssetList": | ||
"""Returns the children of this asset. | ||
Returns: | ||
AssetList: The requested assets | ||
""" | ||
return self._cognite_client.assets.list(parent_ids=[self.id], limit=None) | ||
|
||
def subtree(self, depth: int = None) -> "AssetList": | ||
"""Returns the subtree of this asset up to a specified depth. | ||
Args: | ||
depth (int, optional): Retrieve assets up to this depth below the asset. | ||
Returns: | ||
AssetList: The requested assets sorted topologically. | ||
""" | ||
return self._cognite_client.assets.retrieve_subtree(id=self.id, depth=depth) | ||
|
||
def time_series(self, **kwargs) -> "TimeSeriesList": | ||
"""Retrieve all time series related to this asset. | ||
Returns: | ||
TimeSeriesList: All time series related to this asset. | ||
""" | ||
return self._cognite_client.time_series.list(asset_ids=[self.id], **kwargs) | ||
|
||
def events(self, **kwargs) -> "EventList": | ||
"""Retrieve all events related to this asset. | ||
Returns: | ||
EventList: All events related to this asset. | ||
""" | ||
|
||
return self._cognite_client.events.list(asset_ids=[self.id], **kwargs) | ||
|
||
def files(self, **kwargs) -> "FileMetadataList": | ||
"""Retrieve all files metadata related to this asset. | ||
Returns: | ||
FileMetadataList: Metadata about all files related to this asset. | ||
""" | ||
return self._cognite_client.files.list(asset_ids=[self.id], **kwargs) | ||
|
||
|
||
# GenUpdateClass: AssetChange | ||
class AssetUpdate(CogniteUpdate): | ||
"""Changes applied to asset | ||
Args: | ||
id (int): Javascript friendly internal ID given to the object. | ||
external_id (str): External Id provided by client. Should be unique within the project. | ||
""" | ||
|
||
@property | ||
def external_id(self): | ||
return _PrimitiveAssetUpdate(self, "externalId") | ||
|
||
@property | ||
def name(self): | ||
return _PrimitiveAssetUpdate(self, "name") | ||
|
||
@property | ||
def description(self): | ||
return _PrimitiveAssetUpdate(self, "description") | ||
|
||
@property | ||
def metadata(self): | ||
return _ObjectAssetUpdate(self, "metadata") | ||
|
||
@property | ||
def source(self): | ||
return _PrimitiveAssetUpdate(self, "source") | ||
|
||
|
||
class _PrimitiveAssetUpdate(CognitePrimitiveUpdate): | ||
def set(self, value: Any) -> AssetUpdate: | ||
return self._set(value) | ||
|
||
|
||
class _ObjectAssetUpdate(CogniteObjectUpdate): | ||
def set(self, value: Dict) -> AssetUpdate: | ||
return self._set(value) | ||
|
||
def add(self, value: Dict) -> AssetUpdate: | ||
return self._add(value) | ||
|
||
def remove(self, value: List) -> AssetUpdate: | ||
return self._remove(value) | ||
|
||
|
||
class _ListAssetUpdate(CogniteListUpdate): | ||
def set(self, value: List) -> AssetUpdate: | ||
return self._set(value) | ||
|
||
def add(self, value: List) -> AssetUpdate: | ||
return self._add(value) | ||
|
||
def remove(self, value: List) -> AssetUpdate: | ||
return self._remove(value) | ||
|
||
# GenStop | ||
|
||
|
||
class AssetList(CogniteResourceList): | ||
_RESOURCE = Asset | ||
_UPDATE = AssetUpdate | ||
|
||
def _indented_asset_str(self, asset: Asset): | ||
single_indent = " " * 8 | ||
marked_indent = "|______ " | ||
indent = len(asset.path) - 1 | ||
|
||
s = single_indent * (indent - 1) | ||
if indent > 0: | ||
s += marked_indent | ||
s += str(asset.id) + "\n" | ||
dumped = utils.convert_time_attributes_to_datetime(asset.dump()) | ||
for key, value in sorted(dumped.items()): | ||
if isinstance(value, dict): | ||
s += single_indent * indent + "{}:\n".format(key) | ||
for mkey, mvalue in sorted(value.items()): | ||
s += single_indent * indent + " - {}: {}\n".format(mkey, mvalue) | ||
elif key != "id": | ||
s += single_indent * indent + key + ": " + str(value) + "\n" | ||
|
||
return s | ||
|
||
def __str__(self): | ||
try: | ||
sorted_assets = sorted(self.data, key=lambda x: x.path) | ||
except: | ||
return super().__str__() | ||
|
||
if len(sorted_assets) == 0: | ||
return super().__str__() | ||
|
||
ids = set([asset.id for asset in sorted_assets]) | ||
|
||
s = "\n" | ||
root = sorted_assets[0].path[0] | ||
for asset in sorted_assets: | ||
this_root = asset.path[0] | ||
if this_root != root: | ||
s += "\n" + "*" * 80 + "\n\n" | ||
root = this_root | ||
elif len(asset.path) > 1 and asset.path[-2] not in ids: | ||
s += "\n" + "-" * 80 + "\n\n" | ||
s += self._indented_asset_str(asset) | ||
return s | ||
|
||
def time_series(self) -> "TimeSeriesList": | ||
"""Retrieve all time series related to these assets. | ||
Returns: | ||
TimeSeriesList: All time series related to the assets in this AssetList. | ||
""" | ||
from cognite.client.data_classes import TimeSeriesList | ||
|
||
return self._retrieve_related_resources(TimeSeriesList, self._cognite_client.time_series) | ||
|
||
def events(self) -> "EventList": | ||
"""Retrieve all events related to these assets. | ||
Returns: | ||
EventList: All events related to the assets in this AssetList. | ||
""" | ||
from cognite.client.data_classes import EventList | ||
|
||
return self._retrieve_related_resources(EventList, self._cognite_client.events) | ||
|
||
def files(self) -> "FileMetadataList": | ||
"""Retrieve all files metadata related to these assets. | ||
Returns: | ||
FileMetadataList: Metadata about all files related to the assets in this AssetList. | ||
""" | ||
from cognite.client.data_classes import FileMetadataList | ||
|
||
return self._retrieve_related_resources(FileMetadataList, self._cognite_client.files) | ||
|
||
def _retrieve_related_resources(self, resource_list_class, resource_api): | ||
ids = [a.id for a in self.data] | ||
tasks = [] | ||
chunk_size = 100 | ||
for i in range(0, len(ids), chunk_size): | ||
tasks.append({"asset_ids": ids[i : i + chunk_size], "limit": -1}) | ||
res_list = utils.execute_tasks_concurrently(resource_api.list, tasks, resource_api._max_workers).results | ||
resources = resource_list_class([]) | ||
for res in res_list: | ||
resources.extend(res) | ||
return resources | ||
|
||
|
||
# GenClass: AssetFilter.filter | ||
class AssetFilter(CogniteFilter): | ||
"""No description. | ||
Args: | ||
name (str): Name of asset. Often referred to as tag. | ||
parent_ids (List[int]): No description. | ||
metadata (Dict[str, Any]): Custom, application specific metadata. String key -> String value | ||
source (str): The source of this asset | ||
created_time (Dict[str, Any]): Range between two timestamps | ||
last_updated_time (Dict[str, Any]): Range between two timestamps | ||
root (bool): filtered assets are root assets or not | ||
external_id_prefix (str): External Id provided by client. Should be unique within the project. | ||
cognite_client (CogniteClient): The client to associate with this object. | ||
""" | ||
|
||
def __init__( | ||
self, | ||
name: str = None, | ||
parent_ids: List[int] = None, | ||
metadata: Dict[str, Any] = None, | ||
source: str = None, | ||
created_time: Dict[str, Any] = None, | ||
last_updated_time: Dict[str, Any] = None, | ||
root: bool = None, | ||
external_id_prefix: str = None, | ||
cognite_client=None, | ||
): | ||
self.name = name | ||
self.parent_ids = parent_ids | ||
self.metadata = metadata | ||
self.source = source | ||
self.created_time = created_time | ||
self.last_updated_time = last_updated_time | ||
self.root = root | ||
self.external_id_prefix = external_id_prefix | ||
self._cognite_client = cognite_client | ||
|
||
# GenStop |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,342 @@ | ||
from datetime import datetime | ||
|
||
from cognite.client.data_classes._base import * | ||
|
||
|
||
class Datapoint(CogniteResource): | ||
"""An object representing a datapoint. | ||
Args: | ||
timestamp (Union[int, float]): The data timestamp in milliseconds since the epoch (Jan 1, 1970). | ||
value (Union[str, int, float]): The data value. Can be String or numeric depending on the metric | ||
average (float): The integral average value in the aggregate period | ||
max (float): The maximum value in the aggregate period | ||
min (float): The minimum value in the aggregate period | ||
count (int): The number of datapoints in the aggregate period | ||
sum (float): The sum of the datapoints in the aggregate period | ||
interpolation (float): The interpolated value of the series in the beginning of the aggregate | ||
step_interpolation (float): The last value before or at the beginning of the aggregate. | ||
continuous_variance (float): The variance of the interpolated underlying function. | ||
discrete_variance (float): The variance of the datapoint values. | ||
total_variation (float): The total variation of the interpolated underlying function. | ||
""" | ||
|
||
def __init__( | ||
self, | ||
timestamp: Union[int, float] = None, | ||
value: Union[str, int, float] = None, | ||
average: float = None, | ||
max: float = None, | ||
min: float = None, | ||
count: int = None, | ||
sum: float = None, | ||
interpolation: float = None, | ||
step_interpolation: float = None, | ||
continuous_variance: float = None, | ||
discrete_variance: float = None, | ||
total_variation: float = None, | ||
): | ||
self.timestamp = timestamp | ||
self.value = value | ||
self.average = average | ||
self.max = max | ||
self.min = min | ||
self.count = count | ||
self.sum = sum | ||
self.interpolation = interpolation | ||
self.step_interpolation = step_interpolation | ||
self.continuous_variance = continuous_variance | ||
self.discrete_variance = discrete_variance | ||
self.total_variation = total_variation | ||
|
||
def to_pandas(self) -> "pandas.DataFrame": | ||
"""Convert the datapoint into a pandas DataFrame. | ||
Returns: | ||
pandas.DataFrame: The dataframe. | ||
""" | ||
pd = utils.local_import("pandas") | ||
|
||
dumped = self.dump(camel_case=True) | ||
timestamp = dumped.pop("timestamp") | ||
|
||
for k, v in dumped.items(): | ||
dumped[k] = [v] | ||
df = pd.DataFrame(dumped, index=[utils.ms_to_datetime(timestamp)]) | ||
|
||
return df | ||
|
||
|
||
class Datapoints: | ||
"""An object representing a list of datapoints. | ||
Args: | ||
id (int): Id of the timeseries the datapoints belong to | ||
external_id (str): External id of the timeseries the datapoints belong to (Only if id is not set) | ||
timestamp (List[Union[int, float]]): The data timestamps in milliseconds since the epoch (Jan 1, 1970). | ||
value (List[Union[int, str, float]]): The data values. Can be String or numeric depending on the metric | ||
average (List[float]): The integral average values in the aggregate period | ||
max (List[float]): The maximum values in the aggregate period | ||
min (List[float]): The minimum values in the aggregate period | ||
count (List[int]): The number of datapoints in the aggregate periods | ||
sum (List[float]): The sum of the datapoints in the aggregate periods | ||
interpolation (List[float]): The interpolated values of the series in the beginning of the aggregates | ||
step_interpolation (List[float]): The last values before or at the beginning of the aggregates. | ||
continuous_variance (List[float]): The variance of the interpolated underlying function. | ||
discrete_variance (List[float]): The variance of the datapoint values. | ||
total_variation (List[float]): The total variation of the interpolated underlying function. | ||
""" | ||
|
||
def __init__( | ||
self, | ||
id: int = None, | ||
external_id: str = None, | ||
timestamp: List[Union[int, float]] = None, | ||
value: List[Union[int, str, float]] = None, | ||
average: List[float] = None, | ||
max: List[float] = None, | ||
min: List[float] = None, | ||
count: List[int] = None, | ||
sum: List[float] = None, | ||
interpolation: List[float] = None, | ||
step_interpolation: List[float] = None, | ||
continuous_variance: List[float] = None, | ||
discrete_variance: List[float] = None, | ||
total_variation: List[float] = None, | ||
): | ||
self.id = id | ||
self.external_id = external_id | ||
self.timestamp = timestamp or [] | ||
self.value = value | ||
self.average = average | ||
self.max = max | ||
self.min = min | ||
self.count = count | ||
self.sum = sum | ||
self.interpolation = interpolation | ||
self.step_interpolation = step_interpolation | ||
self.continuous_variance = continuous_variance | ||
self.discrete_variance = discrete_variance | ||
self.total_variation = total_variation | ||
|
||
self.__datapoint_objects = None | ||
|
||
def __str__(self): | ||
item = self.dump() | ||
item["datapoints"] = utils.convert_time_attributes_to_datetime(item["datapoints"]) | ||
return json.dumps(item, indent=4) | ||
|
||
def __repr__(self): | ||
return self.__str__() | ||
|
||
def __len__(self) -> int: | ||
return len(self.timestamp) | ||
|
||
def __eq__(self, other): | ||
return ( | ||
type(self) == type(other) | ||
and self.id == other.id | ||
and self.external_id == other.external_id | ||
and list(self._get_non_empty_data_fields()) == list(other._get_non_empty_data_fields()) | ||
) | ||
|
||
def __getitem__(self, item) -> Union[Datapoint, "Datapoints"]: | ||
if isinstance(item, slice): | ||
return self._slice(item) | ||
dp_args = {} | ||
for attr, values in self._get_non_empty_data_fields(): | ||
dp_args[attr] = values[item] | ||
return Datapoint(**dp_args) | ||
|
||
def __iter__(self) -> Generator[Datapoint, None, None]: | ||
yield from self.__get_datapoint_objects() | ||
|
||
def dump(self, camel_case: bool = False) -> Dict[str, Any]: | ||
"""Dump the datapoints into a json serializable Python data type. | ||
Args: | ||
camel_case (bool): Use camelCase for attribute names. Defaults to False. | ||
Returns: | ||
List[Dict[str, Any]]: A list of dicts representing the instance. | ||
""" | ||
dumped = { | ||
"id": self.id, | ||
"external_id": self.external_id, | ||
"datapoints": [dp.dump(camel_case=camel_case) for dp in self.__get_datapoint_objects()], | ||
} | ||
if camel_case: | ||
dumped = {utils.to_camel_case(key): value for key, value in dumped.items()} | ||
return {key: value for key, value in dumped.items() if value is not None} | ||
|
||
def to_pandas(self, column_names="externalId") -> "pandas.DataFrame": | ||
"""Convert the datapoints into a pandas DataFrame. | ||
Args: | ||
column_names (str): Which field to use as column header. Defaults to "externalId", can also be "id". | ||
Returns: | ||
pandas.DataFrame: The dataframe. | ||
""" | ||
np, pd = utils.local_import("numpy", "pandas") | ||
data_fields = {} | ||
timestamps = [] | ||
if column_names == "externalId": | ||
identifier = self.external_id if self.external_id is not None else self.id | ||
elif column_names == "id": | ||
identifier = self.id | ||
else: | ||
raise ValueError("column_names must be 'externalId' or 'id'") | ||
for attr, value in self._get_non_empty_data_fields(get_empty_lists=True): | ||
if attr == "timestamp": | ||
timestamps = value | ||
else: | ||
id_with_agg = str(identifier) | ||
if attr != "value": | ||
id_with_agg += "|{}".format(utils.to_camel_case(attr)) | ||
data_fields[id_with_agg] = value | ||
return pd.DataFrame(data_fields, index=pd.DatetimeIndex(data=np.array(timestamps, dtype="datetime64[ms]"))) | ||
|
||
def plot(self, *args, **kwargs) -> None: | ||
"""Plot the datapoints.""" | ||
plt = utils.local_import("matplotlib.pyplot") | ||
self.to_pandas().plot(*args, **kwargs) | ||
plt.show() | ||
|
||
@classmethod | ||
def _load(cls, dps_object, expected_fields: List[str] = None, cognite_client=None): | ||
instance = cls() | ||
instance.id = dps_object["id"] | ||
instance.external_id = dps_object.get("externalId") | ||
expected_fields = expected_fields or ["value"] | ||
expected_fields.append("timestamp") | ||
if len(dps_object["datapoints"]) == 0: | ||
for key in expected_fields: | ||
snake_key = utils.to_snake_case(key) | ||
setattr(instance, snake_key, []) | ||
else: | ||
for dp in dps_object["datapoints"]: | ||
for key in expected_fields: | ||
snake_key = utils.to_snake_case(key) | ||
current_attr = getattr(instance, snake_key) or [] | ||
value = dp.get(key) | ||
current_attr.append(value) | ||
setattr(instance, snake_key, current_attr) | ||
return instance | ||
|
||
def _insert(self, other_dps): | ||
if self.id is None and self.external_id is None: | ||
self.id = other_dps.id | ||
self.external_id = other_dps.external_id | ||
|
||
if other_dps.timestamp: | ||
other_first_ts = other_dps.timestamp[0] | ||
index_to_split_on = None | ||
for i, ts in enumerate(self.timestamp): | ||
if ts > other_first_ts: | ||
index_to_split_on = i | ||
break | ||
else: | ||
index_to_split_on = 0 | ||
|
||
for attr, other_value in other_dps._get_non_empty_data_fields(get_empty_lists=True): | ||
value = getattr(self, attr) | ||
if not value: | ||
setattr(self, attr, other_value) | ||
else: | ||
if index_to_split_on is not None: | ||
new_value = value[:index_to_split_on] + other_value + value[index_to_split_on:] | ||
else: | ||
new_value = value + other_value | ||
setattr(self, attr, new_value) | ||
|
||
def _get_non_empty_data_fields(self, get_empty_lists=False) -> List[Tuple[str, Any]]: | ||
non_empty_data_fields = [] | ||
for attr, value in self.__dict__.copy().items(): | ||
if attr not in ["id", "external_id", "_Datapoints__datapoint_objects", "_cognite_client"]: | ||
if value is not None or attr == "timestamp": | ||
if len(value) > 0 or get_empty_lists or attr == "timestamp": | ||
non_empty_data_fields.append((attr, value)) | ||
return non_empty_data_fields | ||
|
||
def __get_datapoint_objects(self) -> List[Datapoint]: | ||
if self.__datapoint_objects is None: | ||
self.__datapoint_objects = [] | ||
for i in range(len(self)): | ||
dp_args = {} | ||
for attr, value in self._get_non_empty_data_fields(): | ||
dp_args[attr] = value[i] | ||
self.__datapoint_objects.append(Datapoint(**dp_args)) | ||
return self.__datapoint_objects | ||
|
||
def _slice(self, slice: slice): | ||
truncated_datapoints = Datapoints(id=self.id, external_id=self.external_id) | ||
for attr, value in self._get_non_empty_data_fields(): | ||
setattr(truncated_datapoints, attr, value[slice]) | ||
return truncated_datapoints | ||
|
||
|
||
class DatapointsList(CogniteResourceList): | ||
_RESOURCE = Datapoints | ||
_ASSERT_CLASSES = False | ||
|
||
def __str__(self): | ||
item = self.dump() | ||
for i in item: | ||
i["datapoints"] = utils.convert_time_attributes_to_datetime(i["datapoints"]) | ||
return json.dumps(item, default=lambda x: x.__dict__, indent=4) | ||
|
||
def to_pandas(self, column_names="externalId") -> "pandas.DataFrame": | ||
"""Convert the datapoints list into a pandas DataFrame. | ||
Args: | ||
column_names (str): Which field to use as column header. Defaults to "externalId", can also be "id". | ||
Returns: | ||
pandas.DataFrame: The datapoints list as a pandas DataFrame. | ||
""" | ||
pd = utils.local_import("pandas") | ||
dfs = [df.to_pandas(column_names=column_names) for df in self.data] | ||
if dfs: | ||
return pd.concat(dfs, axis="columns") | ||
return pd.DataFrame() | ||
|
||
def plot(self, *args, **kwargs) -> None: | ||
"""Plot the list of datapoints.""" | ||
plt = utils.local_import("matplotlib.pyplot") | ||
self.to_pandas().plot(*args, **kwargs) | ||
plt.show() | ||
|
||
|
||
class DatapointsQuery(CogniteResource): | ||
"""Parameters describing a query for datapoints. | ||
Args: | ||
start (Union[str, int, datetime]): Get datapoints after this time. Format is N[timeunit]-ago where timeunit is w,d,h,m,s. Example: '2d-ago' will get everything that is up to 2 days old. Can also send time in ms since epoch. | ||
end (Union[str, int, datetime]): Get datapoints up to this time. The format is the same as for start. | ||
id (int): Id of the timeseries to query | ||
external_id (str): External id of the timeseries to query (Only if id is not set) | ||
limit (int): Return up to this number of datapoints. | ||
aggregates (List[str]): The aggregates to be returned. Use default if null. An empty string must be sent to get raw data if the default is a set of aggregates. | ||
granularity (str): The granularity size and granularity of the aggregates. | ||
include_outside_points (bool): Whether to include the last datapoint before the requested time period,and the first one after the requested period. This can be useful for interpolating data. Not available for aggregates. | ||
""" | ||
|
||
def __init__( | ||
self, | ||
start: Union[str, int, datetime], | ||
end: Union[str, int, datetime], | ||
id: int = None, | ||
external_id: str = None, | ||
limit: int = None, | ||
aggregates: List[str] = None, | ||
granularity: str = None, | ||
include_outside_points: bool = None, | ||
): | ||
self.id = id | ||
self.external_id = external_id | ||
self.start = start | ||
self.end = end | ||
self.limit = limit | ||
self.aggregates = aggregates | ||
self.granularity = granularity | ||
self.include_outside_points = include_outside_points |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,181 @@ | ||
from cognite.client.data_classes._base import * | ||
|
||
|
||
# GenClass: Event | ||
class Event(CogniteResource): | ||
"""An event represents something that happened at a given interval in time, e.g a failure, a work order etc. | ||
Args: | ||
external_id (str): External Id provided by client. Should be unique within the project | ||
start_time (int): It is the number of seconds that have elapsed since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. | ||
end_time (int): It is the number of seconds that have elapsed since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. | ||
type (str): Type of the event, e.g 'failure'. | ||
subtype (str): Subtype of the event, e.g 'electrical'. | ||
description (str): Textual description of the event. | ||
metadata (Dict[str, Any]): Custom, application specific metadata. String key -> String value | ||
asset_ids (List[int]): Asset IDs of related equipment that this event relates to. | ||
source (str): The source of this event. | ||
id (int): Javascript friendly internal ID given to the object. | ||
last_updated_time (int): It is the number of seconds that have elapsed since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. | ||
created_time (int): It is the number of seconds that have elapsed since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. | ||
cognite_client (CogniteClient): The client to associate with this object. | ||
""" | ||
|
||
def __init__( | ||
self, | ||
external_id: str = None, | ||
start_time: int = None, | ||
end_time: int = None, | ||
type: str = None, | ||
subtype: str = None, | ||
description: str = None, | ||
metadata: Dict[str, Any] = None, | ||
asset_ids: List[int] = None, | ||
source: str = None, | ||
id: int = None, | ||
last_updated_time: int = None, | ||
created_time: int = None, | ||
cognite_client=None, | ||
): | ||
self.external_id = external_id | ||
self.start_time = start_time | ||
self.end_time = end_time | ||
self.type = type | ||
self.subtype = subtype | ||
self.description = description | ||
self.metadata = metadata | ||
self.asset_ids = asset_ids | ||
self.source = source | ||
self.id = id | ||
self.last_updated_time = last_updated_time | ||
self.created_time = created_time | ||
self._cognite_client = cognite_client | ||
|
||
# GenStop | ||
|
||
|
||
# GenClass: EventFilter | ||
class EventFilter(CogniteFilter): | ||
"""Filter on events filter with exact match | ||
Args: | ||
start_time (Dict[str, Any]): Range between two timestamps | ||
end_time (Dict[str, Any]): Range between two timestamps | ||
metadata (Dict[str, Any]): Custom, application specific metadata. String key -> String value | ||
asset_ids (List[int]): Asset IDs of related equipment that this event relates to. | ||
source (str): The source of this event. | ||
type (str): The event type | ||
subtype (str): The event subtype | ||
created_time (Dict[str, Any]): Range between two timestamps | ||
last_updated_time (Dict[str, Any]): Range between two timestamps | ||
external_id_prefix (str): External Id provided by client. Should be unique within the project | ||
cognite_client (CogniteClient): The client to associate with this object. | ||
""" | ||
|
||
def __init__( | ||
self, | ||
start_time: Dict[str, Any] = None, | ||
end_time: Dict[str, Any] = None, | ||
metadata: Dict[str, Any] = None, | ||
asset_ids: List[int] = None, | ||
source: str = None, | ||
type: str = None, | ||
subtype: str = None, | ||
created_time: Dict[str, Any] = None, | ||
last_updated_time: Dict[str, Any] = None, | ||
external_id_prefix: str = None, | ||
cognite_client=None, | ||
): | ||
self.start_time = start_time | ||
self.end_time = end_time | ||
self.metadata = metadata | ||
self.asset_ids = asset_ids | ||
self.source = source | ||
self.type = type | ||
self.subtype = subtype | ||
self.created_time = created_time | ||
self.last_updated_time = last_updated_time | ||
self.external_id_prefix = external_id_prefix | ||
self._cognite_client = cognite_client | ||
|
||
# GenStop | ||
|
||
|
||
# GenUpdateClass: EventChange | ||
class EventUpdate(CogniteUpdate): | ||
"""Changes will be applied to event. | ||
Args: | ||
id (int): Javascript friendly internal ID given to the object. | ||
external_id (str): External Id provided by client. Should be unique within the project | ||
""" | ||
|
||
@property | ||
def external_id(self): | ||
return _PrimitiveEventUpdate(self, "externalId") | ||
|
||
@property | ||
def start_time(self): | ||
return _PrimitiveEventUpdate(self, "startTime") | ||
|
||
@property | ||
def end_time(self): | ||
return _PrimitiveEventUpdate(self, "endTime") | ||
|
||
@property | ||
def description(self): | ||
return _PrimitiveEventUpdate(self, "description") | ||
|
||
@property | ||
def metadata(self): | ||
return _ObjectEventUpdate(self, "metadata") | ||
|
||
@property | ||
def asset_ids(self): | ||
return _ListEventUpdate(self, "assetIds") | ||
|
||
@property | ||
def source(self): | ||
return _PrimitiveEventUpdate(self, "source") | ||
|
||
@property | ||
def type(self): | ||
return _PrimitiveEventUpdate(self, "type") | ||
|
||
@property | ||
def subtype(self): | ||
return _PrimitiveEventUpdate(self, "subtype") | ||
|
||
|
||
class _PrimitiveEventUpdate(CognitePrimitiveUpdate): | ||
def set(self, value: Any) -> EventUpdate: | ||
return self._set(value) | ||
|
||
|
||
class _ObjectEventUpdate(CogniteObjectUpdate): | ||
def set(self, value: Dict) -> EventUpdate: | ||
return self._set(value) | ||
|
||
def add(self, value: Dict) -> EventUpdate: | ||
return self._add(value) | ||
|
||
def remove(self, value: List) -> EventUpdate: | ||
return self._remove(value) | ||
|
||
|
||
class _ListEventUpdate(CogniteListUpdate): | ||
def set(self, value: List) -> EventUpdate: | ||
return self._set(value) | ||
|
||
def add(self, value: List) -> EventUpdate: | ||
return self._add(value) | ||
|
||
def remove(self, value: List) -> EventUpdate: | ||
return self._remove(value) | ||
|
||
# GenStop | ||
|
||
|
||
class EventList(CogniteResourceList): | ||
_RESOURCE = Event | ||
_UPDATE = EventUpdate |
Oops, something went wrong.