Skip to content

Commit

Permalink
Release 3.54.0 (#1258)
Browse files Browse the repository at this point in the history
  • Loading branch information
vbrodsky authored Oct 11, 2023
2 parents ddb8063 + d67624e commit 6a34aeb
Show file tree
Hide file tree
Showing 16 changed files with 153 additions and 50 deletions.
4 changes: 3 additions & 1 deletion .yapfignore
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
pytest.ini
Makefile
*.txt
*.ini
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
# Changelog
# Version 3.54.0 (2023-10-10)
## Added
* Add exports v1 deprecation warning
* Create method in SDK to modify LPO priorities in bulk
## Removed
* Remove backoff library
# Version 3.53.0 (2023-10-03)
## Added
* Remove LPO deprecation warning and allow greater range of priority values
Expand Down
2 changes: 1 addition & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
copyright = '2021, Labelbox'
author = 'Labelbox'

release = '3.53.0'
release = '3.54.0'

# -- General configuration ---------------------------------------------------

Expand Down
2 changes: 1 addition & 1 deletion labelbox/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name = "labelbox"
__version__ = "3.53.0"
__version__ = "3.54.0"

from labelbox.client import Client
from labelbox.schema.project import Project
Expand Down
11 changes: 4 additions & 7 deletions labelbox/schema/annotation_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import time
from typing import Any, BinaryIO, Dict, List, Union, TYPE_CHECKING, cast

import backoff
from google.api_core import retry
from labelbox import parser
import requests
from tqdm import tqdm # type: ignore
Expand Down Expand Up @@ -109,12 +109,9 @@ def wait_until_done(self,
pbar.update(100 - pbar.n)
pbar.close()

@backoff.on_exception(
backoff.expo,
(labelbox.exceptions.ApiLimitError, labelbox.exceptions.TimeoutError,
labelbox.exceptions.NetworkError),
max_tries=10,
jitter=None)
@retry.Retry(predicate=retry.if_exception_type(
labelbox.exceptions.ApiLimitError, labelbox.exceptions.TimeoutError,
labelbox.exceptions.NetworkError))
def __backoff_refresh(self) -> None:
self.refresh()

Expand Down
5 changes: 5 additions & 0 deletions labelbox/schema/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import requests
import logging
import time
import warnings

if TYPE_CHECKING:
from labelbox import Project
Expand Down Expand Up @@ -103,6 +104,10 @@ def export_data_rows(self,
Raises:
LabelboxError: if the export fails or is unable to download within the specified time.
"""
warnings.warn(
"You are currently utilizing exports v1 for this action, which will be deprecated after December 31st, 2023. We recommend transitioning to exports v2. To view export v2 details, visit our docs: https://docs.labelbox.com/reference/label-export",
DeprecationWarning)

id_param = "batchId"
metadata_param = "includeMetadataInput"
query_str = """mutation GetBatchDataRowsExportUrlPyApi($%s: ID!, $%s: Boolean!)
Expand Down
10 changes: 4 additions & 6 deletions labelbox/schema/bulk_import_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import logging
from pathlib import Path
import pydantic
import backoff
from google.api_core import retry
from labelbox import parser
import requests
from pydantic import BaseModel, root_validator, validator
Expand Down Expand Up @@ -197,11 +197,9 @@ def wait_until_done(self, sleep_time_seconds: int = 5) -> None:
time.sleep(sleep_time_seconds)
self.__exponential_backoff_refresh()

@backoff.on_exception(
backoff.expo, (lb_exceptions.ApiLimitError, lb_exceptions.TimeoutError,
lb_exceptions.NetworkError),
max_tries=10,
jitter=None)
@retry.Retry(predicate=retry.if_exception_type(lb_exceptions.ApiLimitError,
lb_exceptions.TimeoutError,
lb_exceptions.NetworkError))
def __exponential_backoff_refresh(self) -> None:
self.refresh()

Expand Down
4 changes: 4 additions & 0 deletions labelbox/schema/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from collections.abc import Iterable
from string import Template
import time
import warnings

from labelbox import parser
from itertools import islice
Expand Down Expand Up @@ -565,6 +566,9 @@ def export_data_rows(self,
Raises:
LabelboxError: if the export fails or is unable to download within the specified time.
"""
warnings.warn(
"You are currently utilizing exports v1 for this action, which will be deprecated after December 31st, 2023. We recommend transitioning to exports v2. To view export v2 details, visit our docs: https://docs.labelbox.com/reference/label-export",
DeprecationWarning)
id_param = "datasetId"
metadata_param = "includeMetadataInput"
query_str = """mutation GetDatasetDataRowsExportUrlPyApi($%s: ID!, $%s: Boolean!)
Expand Down
4 changes: 4 additions & 0 deletions labelbox/schema/model_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import time
import logging
import requests
import warnings
from labelbox import parser
from enum import Enum

Expand Down Expand Up @@ -470,6 +471,9 @@ def export_labels(
If the server didn't generate during the `timeout_seconds` period,
None is returned.
"""
warnings.warn(
"You are currently utilizing exports v1 for this action, which will be deprecated after December 31st, 2023. We recommend transitioning to exports v2. To view export v2 details, visit our docs: https://docs.labelbox.com/reference/label-export",
DeprecationWarning)
sleep_time = 2
query_str = """mutation exportModelRunAnnotationsPyApi($modelRunId: ID!) {
exportModelRunAnnotations(data: {modelRunId: $modelRunId}) {
Expand Down
61 changes: 61 additions & 0 deletions labelbox/schema/project.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import json
import logging
import time
import warnings
from collections import namedtuple
from datetime import datetime, timezone
from pathlib import Path
Expand Down Expand Up @@ -231,6 +232,9 @@ def export_queued_data_rows(
Raises:
LabelboxError: if the export fails or is unable to download within the specified time.
"""
warnings.warn(
"You are currently utilizing exports v1 for this action, which will be deprecated after December 31st, 2023. We recommend transitioning to exports v2. To view export v2 details, visit our docs: https://docs.labelbox.com/reference/label-export",
DeprecationWarning)
id_param = "projectId"
metadata_param = "includeMetadataInput"
query_str = """mutation GetQueuedDataRowsExportUrlPyApi($%s: ID!, $%s: Boolean!)
Expand Down Expand Up @@ -334,6 +338,9 @@ def export_labels(self,
URL of the data file with this Project's labels. If the server didn't
generate during the `timeout_seconds` period, None is returned.
"""
warnings.warn(
"You are currently utilizing exports v1 for this action, which will be deprecated after December 31st, 2023. We recommend transitioning to exports v2. To view export v2 details, visit our docs: https://docs.labelbox.com/reference/label-export",
DeprecationWarning)

def _string_from_dict(dictionary: dict, value_with_quotes=False) -> str:
"""Returns a concatenated string of the dictionary's keys and values
Expand Down Expand Up @@ -1166,6 +1173,60 @@ def set_labeling_parameter_overrides(self, data) -> bool:
res = self.client.execute(query_str, {id_param: self.uid})
return res["project"]["setLabelingParameterOverrides"]["success"]

def update_data_row_labeling_priority(
self,
data_rows: List[str],
priority: int,
) -> bool:
"""
Updates labeling parameter overrides to this project in bulk. This method allows up to 1 million data rows to be
updated at once.
See information on priority here:
https://docs.labelbox.com/en/configure-editor/queue-system#reservation-system
Args:
data_rows (iterable): An iterable of data row ids.
priority (int): Priority for the new override. See above for more information.
Returns:
bool, indicates if the operation was a success.
"""

method = "createQueuePriorityUpdateTask"
priority_param = "priority"
project_param = "projectId"
data_rows_param = "dataRowIds"
query_str = """mutation %sPyApi(
$%s: Int!
$%s: ID!
$%s: [ID!]
) {
project(where: { id: $%s }) {
%s(
data: { priority: $%s, dataRowIds: $%s }
) {
taskId
}
}
}
""" % (method, priority_param, project_param, data_rows_param,
project_param, method, priority_param, data_rows_param)
res = self.client.execute(
query_str, {
priority_param: priority,
project_param: self.uid,
data_rows_param: data_rows
})["project"][method]

task_id = res['taskId']

task = self._wait_for_task(task_id)
if task.status != "COMPLETE":
raise LabelboxError(f"Priority was not updated successfully: " +
json.dumps(task.errors))
return True

def upsert_review_queue(self, quota_factor) -> None:
""" Sets the the proportion of total assets in a project to review.
Expand Down
3 changes: 0 additions & 3 deletions mypy.ini
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
[mypy]

[mypy-backoff.*]
ignore_missing_imports = True

[mypy-google.*]
ignore_missing_imports = True

Expand Down
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
backoff==1.10.0
geojson
google-api-core>=1.22.1
imagesize
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@
url="https://labelbox.com",
packages=setuptools.find_packages(),
install_requires=[
"backoff==1.10.0", "requests>=2.22.0", "google-api-core>=1.22.1",
"pydantic>=1.8,<2.0", "tqdm", "python-dateutil>=2.8.2,<2.9.0"
"requests>=2.22.0", "google-api-core>=1.22.1", "pydantic>=1.8,<2.0",
"tqdm", "python-dateutil>=2.8.2,<2.9.0"
],
extras_require={
'data': [
Expand Down
25 changes: 24 additions & 1 deletion tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import pytest
import requests

from labelbox import Dataset
from labelbox import Dataset, DataRow
from labelbox import LabelingFrontend
from labelbox import OntologyBuilder, Tool, Option, Classification, MediaType
from labelbox.orm import query
Expand Down Expand Up @@ -167,6 +167,29 @@ def consensus_project(client, rand_gen):
project.delete()


@pytest.fixture
def consensus_project_with_batch(consensus_project, initial_dataset, rand_gen,
image_url):
project = consensus_project
dataset = initial_dataset

task = dataset.create_data_rows([{DataRow.row_data: image_url}] * 3)
task.wait_till_done()
assert task.status == "COMPLETE"

data_rows = list(dataset.data_rows())
assert len(data_rows) == 3

batch = project.create_batch(
rand_gen(str),
data_rows, # sample of data row objects
5 # priority between 1(Highest) - 5(lowest)
)

yield [project, batch, data_rows]
batch.delete()


@pytest.fixture
def dataset(client, rand_gen):
dataset = client.create_dataset(name=rand_gen(str))
Expand Down
21 changes: 13 additions & 8 deletions tests/integration/export_v2/test_export_video.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,14 @@ def org_id(client):
return client.get_organization().uid


def test_export_v2_video(client, configured_project_without_data_rows,
video_data, video_data_row, is_adv_enabled,
bbox_video_annotation_objects, rand_gen, user_id,
org_id):

orgid = client.get_organization().uid
userid = client.get_user().uid
def test_export_v2_video(
client,
configured_project_without_data_rows,
video_data,
video_data_row,
bbox_video_annotation_objects,
rand_gen,
):

project = configured_project_without_data_rows
project_id = project.uid
Expand Down Expand Up @@ -51,6 +52,7 @@ def test_export_v2_video(client, configured_project_without_data_rows,

num_retries = 5
task = None

while (num_retries > 0):
task = project.export_v2(
params={
Expand All @@ -72,7 +74,10 @@ def test_export_v2_video(client, configured_project_without_data_rows,
assert data_row_export['global_key'] == video_data_row['global_key']
assert data_row_export['row_data'] == video_data_row['row_data']
assert export_data[0]['media_attributes']['mime_type'] == 'video/mp4'

assert export_data[0]['media_attributes'][
'frame_rate'] == 10 # as per the video_data fixture
assert export_data[0]['media_attributes'][
'frame_count'] == 100 # as per the video_data fixture
expected_export_label = {
'label_kind': 'Video',
'version': '1.0.0',
Expand Down
40 changes: 21 additions & 19 deletions tests/integration/test_labeling_parameter_overrides.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,8 @@
from labelbox import DataRow


def test_labeling_parameter_overrides(consensus_project, initial_dataset,
rand_gen, image_url):
project = consensus_project
dataset = initial_dataset

task = dataset.create_data_rows([{DataRow.row_data: image_url}] * 3)
task.wait_till_done()
assert task.status == "COMPLETE"

data_rows = list(dataset.data_rows())
assert len(data_rows) == 3

project.create_batch(
rand_gen(str),
data_rows, # sample of data row objects
5 # priority between 1(Highest) - 5(lowest)
)
def test_labeling_parameter_overrides(consensus_project_with_batch):
[project, _, data_rows] = consensus_project_with_batch

init_labeling_parameter_overrides = list(
project.labeling_parameter_overrides())
Expand All @@ -45,10 +30,27 @@ def test_labeling_parameter_overrides(consensus_project, initial_dataset,
data = [(data_rows[2], "a_string", 3)]
project.set_labeling_parameter_overrides(data)
assert str(exc_info.value) == \
f"Priority must be an int. Found <class 'str'> for data_row {data_rows[2]}. Index: 0"
f"Priority must be an int. Found <class 'str'> for data_row {data_rows[2]}. Index: 0"

with pytest.raises(TypeError) as exc_info:
data = [(data_rows[2].uid, 1)]
project.set_labeling_parameter_overrides(data)
assert str(exc_info.value) == \
"data_row should be be of type DataRow. Found <class 'str'>. Index: 0"
"data_row should be be of type DataRow. Found <class 'str'>. Index: 0"


def test_set_labeling_priority(consensus_project_with_batch):
[project, _, data_rows] = consensus_project_with_batch

init_labeling_parameter_overrides = list(
project.labeling_parameter_overrides())
assert len(init_labeling_parameter_overrides) == 3
assert {o.priority for o in init_labeling_parameter_overrides} == {5, 5, 5}

data = [data_row.uid for data_row in data_rows]
success = project.update_data_row_labeling_priority(data, 1)
assert success

updated_overrides = list(project.labeling_parameter_overrides())
assert len(updated_overrides) == 3
assert {o.priority for o in updated_overrides} == {1, 1, 1}

0 comments on commit 6a34aeb

Please sign in to comment.