Skip to content

Commit e060f88

Browse files
authored
Merge branch 'develop' into PNO/PLT-1016-Manage-external-workforces
2 parents b0b7f66 + cbf5dd1 commit e060f88

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+266
-120
lines changed

.github/workflows/publish.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ jobs:
9191
path: ./dist
9292
- name: Prepare package and environment
9393
run: |
94-
rye sync
94+
rye sync -f --update-all
9595
rye run toml unset --toml-path pyproject.toml tool.rye.workspace
9696
rye sync -f --update-all
9797
- name: Integration Testing

docs/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
project = 'Python SDK reference'
1717
copyright = '2024, Labelbox'
1818
author = 'Labelbox'
19-
release = '3.70.0'
19+
release = '3.71.0'
2020

2121
# -- General configuration ---------------------------------------------------
2222

docs/labelbox/project-overview.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
ProjectOverview
1+
Project Overview
22
===============================================================================================
33

44
.. automodule:: labelbox.schema.project-overview

examples/annotation_import/audio.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@
152152
},
153153
{
154154
"metadata": {},
155-
"source": "# Create one Labelbox dataset\n\nglobal_key = \"sample-audio-1.mp3\"\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/audio-sample-data/sample-audio-1.mp3\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"audio_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)",
155+
"source": "# Create one Labelbox dataset\n\nglobal_key = \"sample-audio-1.mp3\" + str(uuid.uuid4())\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/audio-sample-data/sample-audio-1.mp3\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"audio_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)",
156156
"cell_type": "code",
157157
"outputs": [],
158158
"execution_count": null

examples/annotation_import/conversational.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@
168168
},
169169
{
170170
"metadata": {},
171-
"source": "# Create one Labelbox dataset\n\nglobal_key = \"conversation-1.json\"\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-developer-testing-assets/conversational_text/1000-conversations/conversation-1.json\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"conversational_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)",
171+
"source": "# Create one Labelbox dataset\n\nglobal_key = \"conversation-1.json\" + str(uuid.uuid4())\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-developer-testing-assets/conversational_text/1000-conversations/conversation-1.json\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"conversational_annotation_import_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)",
172172
"cell_type": "code",
173173
"outputs": [],
174174
"execution_count": null

examples/annotation_import/conversational_LLM.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,7 @@
187187
},
188188
{
189189
"metadata": {},
190-
"source": "global_key = \"pairwise_shooping_asset\"\n\n# Upload data rows\nconvo_data = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/conversational-sample-data/pairwise_shopping_2.json\",\n \"global_key\":\n global_key,\n}\n\n# Create a dataset\ndataset = client.create_dataset(name=\"pairwise_annotation_demo\")\n# Create a datarows\ntask = dataset.create_data_rows([convo_data])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)",
190+
"source": "global_key = \"pairwise_shooping_asset\" + str(uuid.uuid4())\n\n# Upload data rows\nconvo_data = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/conversational-sample-data/pairwise_shopping_2.json\",\n \"global_key\":\n global_key,\n}\n\n# Create a dataset\ndataset = client.create_dataset(name=\"pairwise_annotation_demo\")\n# Create a datarows\ntask = dataset.create_data_rows([convo_data])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)",
191191
"cell_type": "code",
192192
"outputs": [],
193193
"execution_count": null

examples/annotation_import/dicom.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@
119119
},
120120
{
121121
"metadata": {},
122-
"source": "global_key = \"sample-dicom-1.dcm\"\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/dicom-sample-data/sample-dicom-1.dcm\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"dicom_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors :\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)",
122+
"source": "global_key = \"sample-dicom-1.dcm\" + str(uuid.uuid4())\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/dicom-sample-data/sample-dicom-1.dcm\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"dicom_demo_dataset\")\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors :\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)",
123123
"cell_type": "code",
124124
"outputs": [],
125125
"execution_count": null

examples/annotation_import/html.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@
159159
},
160160
{
161161
"metadata": {},
162-
"source": "# Create one Labelbox dataset\n\nglobal_key = \"sample_html_1.html\"\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/html_sample_data/sample_html_1.html\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"html_annotation_import_demo_dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)",
162+
"source": "# Create one Labelbox dataset\n\nglobal_key = \"sample_html_1.html\" + str(uuid.uuid4())\n\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/html_sample_data/sample_html_1.html\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(\n name=\"html_annotation_import_demo_dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows: \", task.failed_data_rows)",
163163
"cell_type": "code",
164164
"outputs": [],
165165
"execution_count": null

examples/annotation_import/image.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -288,7 +288,7 @@
288288
},
289289
{
290290
"metadata": {},
291-
"source": "# send a sample image as batch to the project\nglobal_key = \"2560px-Kitano_Street_Kobe01s5s4110.jpeg\"\n\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"image-demo-dataset\")\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\n\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()",
291+
"source": "# send a sample image as batch to the project\nglobal_key = \"2560px-Kitano_Street_Kobe01s5s4110.jpeg\" + str(uuid.uuid4())\n\ntest_img_url = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg\",\n \"global_key\":\n global_key,\n}\n\ndataset = client.create_dataset(name=\"image-demo-dataset\")\ntask = dataset.create_data_rows([test_img_url])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)",
292292
"cell_type": "code",
293293
"outputs": [],
294294
"execution_count": null

examples/annotation_import/pdf.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@
205205
},
206206
{
207207
"metadata": {},
208-
"source": "global_key = \"0801.3483_doc.pdf\"\nimg_url = {\n \"row_data\": {\n \"pdf_url\":\n \"https://storage.googleapis.com/labelbox-datasets/arxiv-pdf/data/99-word-token-pdfs/0801.3483.pdf\"\n },\n \"global_key\": global_key,\n}\n\ndataset = client.create_dataset(name=\"pdf_demo_dataset\")\ntask = dataset.create_data_rows([img_url])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()",
208+
"source": "global_key = \"0801.3483_doc.pdf\" + str(uuid.uuid4())\nimg_url = {\n \"row_data\": {\n \"pdf_url\":\n \"https://storage.googleapis.com/labelbox-datasets/arxiv-pdf/data/99-word-token-pdfs/0801.3483.pdf\"\n },\n \"global_key\": global_key,\n}\n\ndataset = client.create_dataset(name=\"pdf_demo_dataset\")\ntask = dataset.create_data_rows([img_url])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()",
209209
"cell_type": "code",
210210
"outputs": [],
211211
"execution_count": null

examples/annotation_import/text.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@
168168
},
169169
{
170170
"metadata": {},
171-
"source": "# You can now include ohter fields like attachments, media type and metadata in the data row creation step: https://docs.labelbox.com/reference/text-file\nglobal_key = \"lorem-ipsum.txt\"\ntext_asset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/nlp/lorem-ipsum.txt\",\n \"global_key\":\n global_key,\n \"media_type\":\n \"TEXT\",\n \"attachments\": [{\n \"type\":\n \"TEXT_URL\",\n \"value\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/Docs/text_attachment.txt\",\n }],\n}\n\ndataset = client.create_dataset(\n name=\"text_annotation_import_demo_dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([text_asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)",
171+
"source": "# You can now include ohter fields like attachments, media type and metadata in the data row creation step: https://docs.labelbox.com/reference/text-file\nglobal_key = \"lorem-ipsum.txt\" + str(uuid.uuid4())\ntext_asset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/nlp/lorem-ipsum.txt\",\n \"global_key\":\n global_key,\n \"media_type\":\n \"TEXT\",\n \"attachments\": [{\n \"type\":\n \"TEXT_URL\",\n \"value\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/Docs/text_attachment.txt\",\n }],\n}\n\ndataset = client.create_dataset(\n name=\"text_annotation_import_demo_dataset\",\n iam_integration=\n None, # Removing this argument will default to the organziation's default iam integration\n)\ntask = dataset.create_data_rows([text_asset])\ntask.wait_till_done()\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)",
172172
"cell_type": "code",
173173
"outputs": [],
174174
"execution_count": null

examples/annotation_import/tiled.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,7 @@
195195
},
196196
{
197197
"metadata": {},
198-
"source": "top_left_bound = lb_types.Point(x=-99.21052827588443, y=19.400498983095076)\nbottom_right_bound = lb_types.Point(x=-99.20534818927473, y=19.39533555271248)\n\nepsg = lb_types.EPSG.EPSG4326\nbounds = lb_types.TiledBounds(epsg=epsg,\n bounds=[top_left_bound, bottom_right_bound])\nglobal_key = \"mexico_city\"\n\ntile_layer = lb_types.TileLayer(\n url=\n \"https://s3-us-west-1.amazonaws.com/lb-tiler-layers/mexico_city/{z}/{x}/{y}.png\"\n)\n\ntiled_image_data = lb_types.TiledImageData(tile_layer=tile_layer,\n tile_bounds=bounds,\n zoom_levels=[17, 23])\n\nasset = {\n \"row_data\": tiled_image_data.asdict(),\n \"global_key\": global_key,\n \"media_type\": \"TMS_GEO\",\n}\n\ndataset = client.create_dataset(name=\"geo_demo_dataset\")\ntask = dataset.create_data_rows([asset])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)",
198+
"source": "top_left_bound = lb_types.Point(x=-99.21052827588443, y=19.400498983095076)\nbottom_right_bound = lb_types.Point(x=-99.20534818927473, y=19.39533555271248)\n\nepsg = lb_types.EPSG.EPSG4326\nbounds = lb_types.TiledBounds(epsg=epsg,\n bounds=[top_left_bound, bottom_right_bound])\nglobal_key = \"mexico_city\" + str(uuid.uuid4())\n\ntile_layer = lb_types.TileLayer(\n url=\n \"https://s3-us-west-1.amazonaws.com/lb-tiler-layers/mexico_city/{z}/{x}/{y}.png\"\n)\n\ntiled_image_data = lb_types.TiledImageData(tile_layer=tile_layer,\n tile_bounds=bounds,\n zoom_levels=[17, 23])\n\nasset = {\n \"row_data\": tiled_image_data.asdict(),\n \"global_key\": global_key,\n \"media_type\": \"TMS_GEO\",\n}\n\ndataset = client.create_dataset(name=\"geo_demo_dataset\")\ntask = dataset.create_data_rows([asset])\nprint(\"Errors:\", task.errors)\nprint(\"Failed data rows:\", task.failed_data_rows)",
199199
"cell_type": "code",
200200
"outputs": [],
201201
"execution_count": null

examples/annotation_import/video.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@
253253
},
254254
{
255255
"metadata": {},
256-
"source": "global_key = \"sample-video-jellyfish.mp4\"\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-2.mp4\",\n \"global_key\":\n global_key,\n \"media_type\":\n \"VIDEO\",\n}\n\ndataset = client.create_dataset(\n name=\"video_demo_dataset\",\n iam_integration=\n None, # If this argument is removed, labelbox will use the default integration for your organization.\n)\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")\n\nif task.errors:\n for error in task.errors:\n if (\"Duplicate global key\" in error[\"message\"] and\n dataset.row_count == 0):\n # If the global key already exists in the workspace the dataset will be created empty, so we can delete it.\n print(f\"Deleting empty dataset: {dataset}\")\n dataset.delete()",
256+
"source": "global_key = \"sample-video-jellyfish.mp4\" + str(uuid.uuid4())\nasset = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-datasets/video-sample-data/sample-video-2.mp4\",\n \"global_key\":\n global_key,\n \"media_type\":\n \"VIDEO\",\n}\n\ndataset = client.create_dataset(\n name=\"video_demo_dataset\",\n iam_integration=\n None, # If this argument is removed, labelbox will use the default integration for your organization.\n)\ntask = dataset.create_data_rows([asset])\ntask.wait_till_done()\nprint(f\"Failed data rows: {task.failed_data_rows}\")\nprint(f\"Errors: {task.errors}\")",
257257
"cell_type": "code",
258258
"outputs": [],
259259
"execution_count": null

examples/basics/data_row_metadata.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -218,7 +218,7 @@
218218
},
219219
{
220220
"metadata": {},
221-
"source": "# A simple example of uploading data rows with metadata\ndataset = client.create_dataset(\n name=\"Simple Data Rows import with metadata example\")\nglobal_key = \"s_basic.jpg\"\ndata_row = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/Docs/basic.jpg\",\n \"global_key\":\n global_key,\n}\n# This line works with dictionaries as well as schemas and fields created with DataRowMetadataField\ndata_row[\"metadata_fields\"] = custom_metadata_fields + [\n split_metadata_field,\n capture_datetime_field_dict,\n tag_metadata_field,\n]\n\ntask = dataset.create_data_rows([data_row])\ntask.wait_till_done()\nresult_task = task.result\nprint(result_task)",
221+
"source": "# A simple example of uploading data rows with metadata\ndataset = client.create_dataset(\n name=\"Simple Data Rows import with metadata example\")\nglobal_key = \"s_basic.jpg\" + str(uuid4())\ndata_row = {\n \"row_data\":\n \"https://storage.googleapis.com/labelbox-sample-datasets/Docs/basic.jpg\",\n \"global_key\":\n global_key,\n}\n# This line works with dictionaries as well as schemas and fields created with DataRowMetadataField\ndata_row[\"metadata_fields\"] = custom_metadata_fields + [\n split_metadata_field,\n capture_datetime_field_dict,\n tag_metadata_field,\n]\n\ntask = dataset.create_data_rows([data_row])\ntask.wait_till_done()\nresult_task = task.result\nprint(result_task)",
222222
"cell_type": "code",
223223
"outputs": [],
224224
"execution_count": null

examples/basics/projects.ipynb

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -266,6 +266,21 @@
266266
"outputs": [],
267267
"execution_count": null
268268
},
269+
{
270+
"metadata": {},
271+
"source": [
272+
"### Duplicate a project\n",
273+
"Please see the section [Duplicate a project](https://docs.labelbox.com/docs/create-a-project#duplicate-a-project) to have the scope of the method."
274+
],
275+
"cell_type": "markdown"
276+
},
277+
{
278+
"metadata": {},
279+
"source": "destination_project = project.clone()",
280+
"cell_type": "code",
281+
"outputs": [],
282+
"execution_count": null
283+
},
269284
{
270285
"metadata": {},
271286
"source": [

0 commit comments

Comments
 (0)