Skip to content

Commit c9c66b7

Browse files
authored
Merge pull request #267 from Labelbox/ms/test
bug fix
2 parents 53247b9 + e592a4d commit c9c66b7

File tree

4 files changed

+11
-7
lines changed

4 files changed

+11
-7
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# Changelog
22

3+
# Version 3.2.1 (2021-08-31)
4+
## Fix
5+
* Resolved issue with `create_data_rows()` was not working on amazon linux
6+
37
# Version 3.2.0 (2021-08-26)
48
## Added
59
* List `BulkImportRequest`s for a project with `Project.bulk_import_requests()`

labelbox/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
name = "labelbox"
2-
__version__ = "3.2.0"
2+
__version__ = "3.2.1"
33

44
from labelbox.schema.project import Project
55
from labelbox.client import Client

labelbox/schema/dataset.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,7 @@ def create_data_rows(self, items):
118118
"""
119119
file_upload_thread_count = 20
120120
DataRow = Entity.DataRow
121+
AssetAttachment = Entity.AssetAttachment
121122

122123
def upload_if_necessary(item):
123124
row_data = item['row_data']
@@ -135,8 +136,7 @@ def validate_attachments(item):
135136
if attachments:
136137
if isinstance(attachments, list):
137138
for attachment in attachments:
138-
Entity.AssetAttachment.validate_attachment_json(
139-
attachment)
139+
AssetAttachment.validate_attachment_json(attachment)
140140
else:
141141
raise ValueError(
142142
f"Attachments must be a list. Found {type(attachments)}"

tests/integration/test_data_row_metadata.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ def test_bulk_partial_delete_datarow_metadata(datarow, mdo):
119119
assert len(datarow.metadata["fields"]) == (n_fields + 4)
120120

121121

122-
@pytest.mark.slow
122+
@pytest.mark.skip
123123
def test_large_bulk_delete_datarow_metadata(big_dataset, mdo):
124124
metadata = []
125125
n_fields_start = 0
@@ -140,7 +140,7 @@ def test_large_bulk_delete_datarow_metadata(big_dataset, mdo):
140140
assert len(errors) == 0
141141

142142
deletes = []
143-
for dr in big_dataset.export_data_rows():
143+
for dr in big_dataset.data_rows():
144144
deletes.append(
145145
DeleteDataRowMetadata(
146146
data_row_id=dr.uid,
@@ -151,8 +151,8 @@ def test_large_bulk_delete_datarow_metadata(big_dataset, mdo):
151151

152152
errors = mdo.bulk_delete(deletes)
153153
assert len(errors) == 0
154-
for dr in big_dataset.export_data_rows():
155-
assert len(dr.metadata["fields"]) == 1 + n_fields_start
154+
for dr in big_dataset.data_rows():
155+
assert len(dr.metadata["fields"]) == n_fields_start
156156
break
157157

158158

0 commit comments

Comments
 (0)