Skip to content

Commit 8ec4bca

Browse files
3.58.0 (#1325)
2 parents eaf6efc + 57790a2 commit 8ec4bca

File tree

63 files changed

+3843
-1186
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

63 files changed

+3843
-1186
lines changed

CHANGELOG.md

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,19 @@
11
# Changelog
22

3+
# Version 3.58.0 (2023-12-11)
4+
## Added
5+
* `ontology_id` to the model app instantiation
6+
* LLM data generation label types
7+
* `run_foundry_app` to support running model foundry apps
8+
* Two methods for sending data rows to any workflow task in a project, that can also include predictions from a model run, or annotations from a different project
9+
## Fixed
10+
* Documentation index for identifiables
11+
## Removed
12+
* Project.datasets and Datasets.projects methods as they have been deprecated
13+
## Notebooks
14+
* Added note books for Human labeling(GT/MAL/MEA) + data generation (GT/MAL)
15+
* Remove relationship annotations from text and conversational imports
16+
317
# Version 3.57.0 (2023-11-30)
418
## Added
519
* Global key support for Project move_data_rows_to_task_queue

Makefile

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,15 @@ test-onprem: build-image
4545
-e LABELBOX_TEST_ONPREM_HOSTNAME=${LABELBOX_TEST_ONPREM_HOSTNAME} \
4646
local/labelbox-python:test pytest $(PATH_TO_TEST)
4747

48+
test-dev0: build-image
49+
docker run -it --rm -v ${PWD}:/usr/src -w /usr/src \
50+
-e LABELBOX_TEST_ENVIRON="custom" \
51+
-e DA_GCP_LABELBOX_API_KEY=${DA_GCP_LABELBOX_API_KEY} \
52+
-e LABELBOX_TEST_API_KEY_CUSTOM=${LABELBOX_TEST_API_KEY_CUSTOM} \
53+
-e LABELBOX_TEST_GRAPHQL_API_ENDPOINT="https://api.dev0.na-us.lb-dev.xyz/graphql" \
54+
-e LABELBOX_TEST_REST_API_ENDPOINT="https://api.dev0.na-us.lb-dev.xyz/api/v1" \
55+
local/labelbox-python:test pytest $(PATH_TO_TEST)
56+
4857
test-custom: build-image
4958
docker run -it --rm -v ${PWD}:/usr/src -w /usr/src \
5059
-e LABELBOX_TEST_ENVIRON="custom" \

docs/source/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
copyright = '2021, Labelbox'
2222
author = 'Labelbox'
2323

24-
release = '3.57.0'
24+
release = '3.58.0'
2525

2626
# -- General configuration ---------------------------------------------------
2727

docs/source/index.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -227,6 +227,7 @@ ExportTask
227227
---------------------------
228228
.. automodule:: labelbox.schema.export_task
229229
:members:
230+
:exclude-members: FileRetrieverByLine, FileRetrieverByOffset, FileRetrieverStrategy, Range, Converter
230231
:show-inheritance:
231232

232233
Identifiables

examples/annotation_import/conversational.ipynb

Lines changed: 44 additions & 121 deletions
Original file line numberDiff line numberDiff line change
@@ -33,16 +33,17 @@
3333
"# Conversational Text Annotation Import\n",
3434
"* This notebook will provide examples of each supported annotation type for conversational text assets, and also cover MAL and Label Import methods:\n",
3535
"\n",
36-
"Suported annotations that can be uploaded through the SDK\n",
36+
"Supported annotations that can be uploaded through the SDK\n",
3737
"\n",
3838
"* Classification Radio \n",
3939
"* Classification Checklist \n",
4040
"* Classification Free Text \n",
4141
"* NER\n",
42-
"* Relationships (only supported for MAL imports)\n",
42+
"\n",
4343
"\n",
4444
"**Not** supported annotations\n",
4545
"\n",
46+
"* Relationships\n",
4647
"* Bouding box \n",
4748
"* Polygon \n",
4849
"* Point\n",
@@ -139,11 +140,11 @@
139140
" )\n",
140141
")\n",
141142
"\n",
142-
"ner_annotation_ndjson = { \n",
143+
"ner_annotation_ndjson = {\n",
143144
" \"name\": \"ner\",\n",
144-
" \"location\": { \n",
145-
" \"start\": 0, \n",
146-
" \"end\": 8 \n",
145+
" \"location\": {\n",
146+
" \"start\": 0,\n",
147+
" \"end\": 8\n",
147148
" },\n",
148149
" \"messageId\": \"4\"\n",
149150
" }"
@@ -177,15 +178,15 @@
177178
{
178179
"metadata": {},
179180
"source": [
180-
"##### Checklist Classification ####### \n",
181+
"##### Checklist Classification #######\n",
181182
"\n",
182183
"checklist_annotation= lb_types.ClassificationAnnotation(\n",
183184
" name=\"checklist_convo\", # must match your ontology feature\"s name\n",
184185
" value=lb_types.Checklist(\n",
185186
" answer = [\n",
186187
" lb_types.ClassificationAnswer(\n",
187188
" name = \"first_checklist_answer\"\n",
188-
" ), \n",
189+
" ),\n",
189190
" lb_types.ClassificationAnswer(\n",
190191
" name = \"second_checklist_answer\"\n",
191192
" )\n",
@@ -214,7 +215,7 @@
214215
"######## Radio Classification ######\n",
215216
"\n",
216217
"radio_annotation = lb_types.ClassificationAnnotation(\n",
217-
" name=\"radio_convo\", \n",
218+
" name=\"radio_convo\",\n",
218219
" value=lb_types.Radio(answer = lb_types.ClassificationAnswer(name = \"first_radio_answer\")),\n",
219220
" message_id=\"0\"\n",
220221
")\n",
@@ -231,71 +232,6 @@
231232
"outputs": [],
232233
"execution_count": null
233234
},
234-
{
235-
"metadata": {},
236-
"source": [
237-
"####### Relationships ########## \n",
238-
"ner_source = lb_types.ObjectAnnotation(\n",
239-
" name=\"ner\",\n",
240-
" value=lb_types.ConversationEntity(\n",
241-
" start=16,\n",
242-
" end=26,\n",
243-
" message_id=\"4\"\n",
244-
" )\n",
245-
")\n",
246-
"ner_target = lb_types.ObjectAnnotation(\n",
247-
" name=\"ner\",\n",
248-
" value=lb_types.ConversationEntity(\n",
249-
" start=29, \n",
250-
" end=34, \n",
251-
" message_id=\"4\"\n",
252-
" )\n",
253-
")\n",
254-
"\n",
255-
"ner_relationship = lb_types.RelationshipAnnotation(\n",
256-
" name=\"relationship\",\n",
257-
" value=lb_types.Relationship(\n",
258-
" source=ner_source,\n",
259-
" target=ner_target,\n",
260-
" type=lb_types.Relationship.Type.UNIDIRECTIONAL,\n",
261-
" ))\n",
262-
"\n",
263-
"uuid_source = str(uuid.uuid4())\n",
264-
"uuid_target = str(uuid.uuid4())\n",
265-
"\n",
266-
"ner_source_ndjson = { \n",
267-
" \"uuid\": uuid_source, \n",
268-
" \"name\": \"ner\",\n",
269-
" \"location\": { \n",
270-
" \"start\": 16, \n",
271-
" \"end\": 26 \n",
272-
" },\n",
273-
" \"messageId\": \"4\"\n",
274-
" }\n",
275-
"\n",
276-
"ner_target_ndjson = { \n",
277-
" \"uuid\": uuid_target,\n",
278-
" \"name\": \"ner\",\n",
279-
" \"location\": { \n",
280-
" \"start\": 29, \n",
281-
" \"end\": 34\n",
282-
" },\n",
283-
" \"messageId\": \"4\"\n",
284-
" }\n",
285-
"\n",
286-
"ner_relationship_annotation_ndjson = {\n",
287-
" \"name\": \"relationship\", \n",
288-
" \"relationship\": {\n",
289-
" \"source\": uuid_source,\n",
290-
" \"target\": uuid_target,\n",
291-
" \"type\": \"bidirectional\"\n",
292-
" }\n",
293-
"}"
294-
],
295-
"cell_type": "code",
296-
"outputs": [],
297-
"execution_count": null
298-
},
299235
{
300236
"metadata": {},
301237
"source": [
@@ -328,12 +264,12 @@
328264
" \"name\": \"first_checklist_answer\",\n",
329265
" \"classifications\" : [\n",
330266
" {\n",
331-
" \"name\": \"sub_checklist_question\", \n",
267+
" \"name\": \"sub_checklist_question\",\n",
332268
" \"answer\": {\n",
333269
" \"name\": \"first_sub_checklist_answer\"\n",
334270
" }\n",
335-
" } \n",
336-
" ] \n",
271+
" }\n",
272+
" ]\n",
337273
" }]\n",
338274
"}\n",
339275
"# Global\n",
@@ -424,28 +360,27 @@
424360
"metadata": {},
425361
"source": [
426362
"ontology_builder = lb.OntologyBuilder(\n",
427-
" tools=[ \n",
363+
" tools=[\n",
428364
" lb.Tool(tool=lb.Tool.Type.NER,name=\"ner\"),\n",
429-
" lb.Tool(tool=lb.Tool.Type.RELATIONSHIP,name=\"relationship\")\n",
430-
" ], \n",
431-
" classifications=[ \n",
432-
" lb.Classification( \n",
365+
" ],\n",
366+
" classifications=[\n",
367+
" lb.Classification(\n",
433368
" class_type=lb.Classification.Type.TEXT,\n",
434-
" scope=lb.Classification.Scope.INDEX, \n",
435-
" name=\"text_convo\"), \n",
436-
" lb.Classification( \n",
437-
" class_type=lb.Classification.Type.CHECKLIST, \n",
438-
" scope=lb.Classification.Scope.INDEX, \n",
439-
" name=\"checklist_convo\", \n",
369+
" scope=lb.Classification.Scope.INDEX,\n",
370+
" name=\"text_convo\"),\n",
371+
" lb.Classification(\n",
372+
" class_type=lb.Classification.Type.CHECKLIST,\n",
373+
" scope=lb.Classification.Scope.INDEX,\n",
374+
" name=\"checklist_convo\",\n",
440375
" options=[\n",
441376
" lb.Option(value=\"first_checklist_answer\"),\n",
442-
" lb.Option(value=\"second_checklist_answer\") \n",
377+
" lb.Option(value=\"second_checklist_answer\")\n",
443378
" ]\n",
444-
" ), \n",
445-
" lb.Classification( \n",
446-
" class_type=lb.Classification.Type.RADIO, \n",
447-
" name=\"radio_convo\", \n",
448-
" scope=lb.Classification.Scope.INDEX, \n",
379+
" ),\n",
380+
" lb.Classification(\n",
381+
" class_type=lb.Classification.Type.RADIO,\n",
382+
" name=\"radio_convo\",\n",
383+
" scope=lb.Classification.Scope.INDEX,\n",
449384
" options=[\n",
450385
" lb.Option(value=\"first_radio_answer\"),\n",
451386
" lb.Option(value=\"second_radio_answer\")\n",
@@ -460,7 +395,7 @@
460395
" options=[\n",
461396
" lb.Classification(\n",
462397
" class_type=lb.Classification.Type.CHECKLIST,\n",
463-
" name=\"sub_checklist_question\", \n",
398+
" name=\"sub_checklist_question\",\n",
464399
" options=[lb.Option(\"first_sub_checklist_answer\")]\n",
465400
" )\n",
466401
" ])\n",
@@ -503,10 +438,10 @@
503438
"metadata": {},
504439
"source": [
505440
"# Create Labelbox project\n",
506-
"project = client.create_project(name=\"Conversational Text Annotation Import Demo\", \n",
441+
"project = client.create_project(name=\"Conversational Text Annotation Import Demo\",\n",
507442
" media_type=lb.MediaType.Conversational)\n",
508443
"\n",
509-
"# Setup your ontology \n",
444+
"# Setup your ontology\n",
510445
"project.setup_editor(ontology) # Connect your ontology and editor to your project"
511446
],
512447
"cell_type": "code",
@@ -523,8 +458,6 @@
523458
{
524459
"metadata": {},
525460
"source": [
526-
"# Setup Batches and Ontology\n",
527-
"\n",
528461
"# Create a batch to send to your MAL project\n",
529462
"batch = project.create_batch(\n",
530463
" \"first-batch-convo-demo\", # Each batch in a project must have a unique name\n",
@@ -570,9 +503,6 @@
570503
" text_annotation,\n",
571504
" checklist_annotation,\n",
572505
" radio_annotation,\n",
573-
" ner_source,\n",
574-
" ner_target,\n",
575-
" ner_relationship,\n",
576506
" nested_radio_annotation,\n",
577507
" nested_checklist_annotation\n",
578508
" ]\n",
@@ -600,9 +530,6 @@
600530
" text_annotation_ndjson,\n",
601531
" checklist_annotation_ndjson,\n",
602532
" radio_annotation_ndjson,\n",
603-
" ner_source_ndjson,\n",
604-
" ner_target_ndjson,\n",
605-
" ner_relationship_annotation_ndjson,\n",
606533
" nested_checklist_annotation_ndjson,\n",
607534
" nested_radio_annotation_ndjson\n",
608535
" ]:\n",
@@ -637,9 +564,9 @@
637564
"source": [
638565
"# Upload our label using Model-Assisted Labeling\n",
639566
"upload_job = lb.MALPredictionImport.create_from_objects(\n",
640-
" client = client, \n",
641-
" project_id = project.uid, \n",
642-
" name=f\"mal_job-{str(uuid.uuid4())}\", \n",
567+
" client = client,\n",
568+
" project_id = project.uid,\n",
569+
" name=f\"mal_job-{str(uuid.uuid4())}\",\n",
643570
" predictions=label)\n",
644571
"\n",
645572
"upload_job.wait_until_done()\n",
@@ -660,20 +587,16 @@
660587
{
661588
"metadata": {},
662589
"source": [
663-
"# Upload label for this data row in project \n",
664-
"# Uncomment this code when excluding relationships from label import\n",
665-
"# Relationships are not currently supported for label import\n",
590+
"# Upload label for this data row in project\n",
591+
"upload_job = lb.LabelImport.create_from_objects(\n",
592+
" client = client,\n",
593+
" project_id = project.uid,\n",
594+
" name=\"label_import_job\"+str(uuid.uuid4()),\n",
595+
" labels=label)\n",
666596
"\n",
667-
"\n",
668-
"# upload_job = lb.LabelImport.create_from_objects(\n",
669-
"# client = client, \n",
670-
"# project_id = project.uid, \n",
671-
"# name=\"label_import_job\"+str(uuid.uuid4()), \n",
672-
"# labels=label)\n",
673-
"\n",
674-
"# upload_job.wait_until_done();\n",
675-
"# print(\"Errors:\", upload_job.errors)\n",
676-
"# print(\"Status of uploads: \", upload_job.statuses)"
597+
"upload_job.wait_until_done()\n",
598+
"print(\"Errors:\", upload_job.errors)\n",
599+
"print(\"Status of uploads: \", upload_job.statuses)"
677600
],
678601
"cell_type": "code",
679602
"outputs": [],

0 commit comments

Comments
 (0)