Skip to content

Commit 89f6fa2

Browse files
author
Gareth
authored
Merge pull request #411 from Labelbox/gj/add-headers
Add a header linking to Labelbox, GitHub and Colab
2 parents 7638536 + d9844ab commit 89f6fa2

31 files changed

+7778
-10412
lines changed

examples/annotation_types/basics.ipynb

Lines changed: 928 additions & 1325 deletions
Large diffs are not rendered by default.

examples/annotation_types/converters.ipynb

Lines changed: 526 additions & 587 deletions
Large diffs are not rendered by default.

examples/annotation_types/label_containers.ipynb

Lines changed: 387 additions & 606 deletions
Large diffs are not rendered by default.

examples/annotation_types/mal_using_annotation_types.ipynb

Lines changed: 223 additions & 348 deletions
Large diffs are not rendered by default.

examples/annotation_types/tiled_imagery_basics.ipynb

Lines changed: 379 additions & 428 deletions
Large diffs are not rendered by default.

examples/basics/basics.ipynb

Lines changed: 383 additions & 357 deletions
Large diffs are not rendered by default.

examples/basics/data_row_metadata.ipynb

Lines changed: 76 additions & 229 deletions
Large diffs are not rendered by default.

examples/basics/data_rows.ipynb

Lines changed: 65 additions & 146 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,31 @@
11
{
22
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"id": "db768cda",
6+
"metadata": {},
7+
"source": [
8+
"<td>\n",
9+
" <a target=\"_blank\" href=\"https://labelbox.com\" ><img src=\"https://labelbox.com/blog/content/images/2021/02/logo-v4.svg\" width=256/></a>\n",
10+
"</td>"
11+
]
12+
},
13+
{
14+
"cell_type": "markdown",
15+
"id": "cb5611d0",
16+
"metadata": {},
17+
"source": [
18+
"<td>\n",
19+
"<a href=\"https://colab.research.google.com/github/Labelbox/labelbox-python/blob/develop/examples/basics/data_rows.ipynb\" target=\"_blank\"><img\n",
20+
"src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"></a>\n",
21+
"</td>\n",
22+
"\n",
23+
"<td>\n",
24+
"<a href=\"https://github.com/Labelbox/labelbox-python/tree/develop/examples/basics/data_rows.ipynb\" target=\"_blank\"><img\n",
25+
"src=\"https://img.shields.io/badge/GitHub-100000?logo=github&logoColor=white\" alt=\"GitHub\"></a>\n",
26+
"</td>"
27+
]
28+
},
329
{
430
"cell_type": "markdown",
531
"metadata": {},
@@ -22,46 +48,24 @@
2248
},
2349
{
2450
"cell_type": "code",
25-
"execution_count": null,
51+
"execution_count": 1,
2652
"metadata": {},
2753
"outputs": [],
28-
"source": [
29-
"!pip install labelbox"
30-
]
54+
"source": "!pip install labelbox"
3155
},
3256
{
3357
"cell_type": "code",
34-
"execution_count": 1,
58+
"execution_count": 2,
3559
"metadata": {},
3660
"outputs": [],
37-
"source": [
38-
"from labelbox import DataRow, Client\n",
39-
"from getpass import getpass\n",
40-
"import uuid\n",
41-
"import os"
42-
]
61+
"source": "from labelbox import DataRow, Client\nfrom getpass import getpass\nimport uuid\nimport os"
4362
},
4463
{
4564
"cell_type": "code",
46-
"execution_count": 2,
65+
"execution_count": 3,
4766
"metadata": {},
4867
"outputs": [],
49-
"source": [
50-
"# If you don't want to give google access to drive you can skip this cell\n",
51-
"# and manually set `API_KEY` below.\n",
52-
"\n",
53-
"COLAB = \"google.colab\" in str(get_ipython())\n",
54-
"if COLAB:\n",
55-
" !pip install colab-env -qU\n",
56-
" from colab_env import envvar_handler\n",
57-
" envvar_handler.envload()\n",
58-
"\n",
59-
"API_KEY = os.environ.get(\"LABELBOX_API_KEY\")\n",
60-
"if not os.environ.get(\"LABELBOX_API_KEY\"):\n",
61-
" API_KEY = getpass(\"Please enter your labelbox api key\")\n",
62-
" if COLAB:\n",
63-
" envvar_handler.add_env(\"LABELBOX_API_KEY\", API_KEY)"
64-
]
68+
"source": "# If you don't want to give google access to drive you can skip this cell\n# and manually set `API_KEY` below.\n\nCOLAB = \"google.colab\" in str(get_ipython())\nif COLAB:\n !pip install colab-env -qU\n from colab_env import envvar_handler\n envvar_handler.envload()\n\nAPI_KEY = os.environ.get(\"LABELBOX_API_KEY\")\nif not os.environ.get(\"LABELBOX_API_KEY\"):\n API_KEY = getpass(\"Please enter your labelbox api key\")\n if COLAB:\n envvar_handler.add_env(\"LABELBOX_API_KEY\", API_KEY)"
6569
},
6670
{
6771
"cell_type": "markdown",
@@ -72,37 +76,24 @@
7276
},
7377
{
7478
"cell_type": "code",
75-
"execution_count": 3,
79+
"execution_count": 4,
7680
"metadata": {},
7781
"outputs": [],
78-
"source": [
79-
"# Pick a project that has a dataset attached, data has external ids, and there are some labels\n",
80-
"# This will modify the project so just pick a dummy one that you don't care about\n",
81-
"PROJECT_ID = \"ckpnfquwy0kyg0y8t9rwb99cz\"\n",
82-
"# Only update this if you have an on-prem deployment\n",
83-
"ENDPOINT = \"https://api.labelbox.com/graphql\""
84-
]
82+
"source": "# Pick a project that has a dataset attached, data has external ids, and there are some labels\n# This will modify the project so just pick a dummy one that you don't care about\nPROJECT_ID = \"ckpnfquwy0kyg0y8t9rwb99cz\"\n# Only update this if you have an on-prem deployment\nENDPOINT = \"https://api.labelbox.com/graphql\""
8583
},
8684
{
8785
"cell_type": "code",
88-
"execution_count": 4,
86+
"execution_count": 5,
8987
"metadata": {},
9088
"outputs": [],
91-
"source": [
92-
"client = Client(api_key=API_KEY, endpoint=ENDPOINT)"
93-
]
89+
"source": "client = Client(api_key=API_KEY, endpoint=ENDPOINT)"
9490
},
9591
{
9692
"cell_type": "code",
97-
"execution_count": 5,
93+
"execution_count": 6,
9894
"metadata": {},
9995
"outputs": [],
100-
"source": [
101-
"project = client.get_project(PROJECT_ID)\n",
102-
"dataset = next(project.datasets())\n",
103-
"# This is the same as\n",
104-
"# -> dataset = client.get_dataset(dataset_id)"
105-
]
96+
"source": "project = client.get_project(PROJECT_ID)\ndataset = next(project.datasets())\n# This is the same as\n# -> dataset = client.get_dataset(dataset_id)"
10697
},
10798
{
10899
"cell_type": "markdown",
@@ -113,17 +104,14 @@
113104
},
114105
{
115106
"cell_type": "code",
116-
"execution_count": 6,
107+
"execution_count": 7,
117108
"metadata": {},
118109
"outputs": [],
119-
"source": [
120-
"data_rows = dataset.data_rows()\n",
121-
"data_row = next(data_rows)"
122-
]
110+
"source": "data_rows = dataset.data_rows()\ndata_row = next(data_rows)"
123111
},
124112
{
125113
"cell_type": "code",
126-
"execution_count": 7,
114+
"execution_count": 8,
127115
"metadata": {},
128116
"outputs": [
129117
{
@@ -136,16 +124,11 @@
136124
]
137125
}
138126
],
139-
"source": [
140-
"# Url\n",
141-
"print(\"Associated dataset\", data_row.dataset())\n",
142-
"print(\"Associated label(s)\", next(data_row.labels()))\n",
143-
"print(\"External id\", data_row.external_id)"
144-
]
127+
"source": "# Url\nprint(\"Associated dataset\", data_row.dataset())\nprint(\"Associated label(s)\", next(data_row.labels()))\nprint(\"External id\", data_row.external_id)"
145128
},
146129
{
147130
"cell_type": "code",
148-
"execution_count": 8,
131+
"execution_count": 9,
149132
"metadata": {},
150133
"outputs": [
151134
{
@@ -156,11 +139,7 @@
156139
]
157140
}
158141
],
159-
"source": [
160-
"# External ids can be a reference to your internal datasets\n",
161-
"data_row = dataset.data_row_for_external_id(data_row.external_id)\n",
162-
"print(data_row)"
163-
]
142+
"source": "# External ids can be a reference to your internal datasets\ndata_row = dataset.data_row_for_external_id(data_row.external_id)\nprint(data_row)"
164143
},
165144
{
166145
"cell_type": "markdown",
@@ -172,7 +151,7 @@
172151
},
173152
{
174153
"cell_type": "code",
175-
"execution_count": 9,
154+
"execution_count": 10,
176155
"metadata": {},
177156
"outputs": [
178157
{
@@ -186,15 +165,7 @@
186165
"output_type": "execute_result"
187166
}
188167
],
189-
"source": [
190-
"dataset = client.create_dataset(name=\"testing-dataset\")\n",
191-
"dataset.create_data_row(row_data=\"https://picsum.photos/200/300\")\n",
192-
"\n",
193-
"# It is reccomended that you use external ids but optional.\n",
194-
"# These are useful for users to maintain references to a data_row.\n",
195-
"dataset.create_data_row(row_data=\"https://picsum.photos/200/300\",\n",
196-
" external_id=str(uuid.uuid4()))"
197-
]
168+
"source": "dataset = client.create_dataset(name=\"testing-dataset\")\ndataset.create_data_row(row_data=\"https://picsum.photos/200/300\")\n\n# It is reccomended that you use external ids but optional.\n# These are useful for users to maintain references to a data_row.\ndataset.create_data_row(row_data=\"https://picsum.photos/200/300\",\n external_id=str(uuid.uuid4()))"
198169
},
199170
{
200171
"cell_type": "markdown",
@@ -205,61 +176,35 @@
205176
},
206177
{
207178
"cell_type": "code",
208-
"execution_count": 10,
179+
"execution_count": 11,
209180
"metadata": {},
210181
"outputs": [],
211-
"source": [
212-
"task1 = dataset.create_data_rows([{\n",
213-
" DataRow.row_data: \"https://picsum.photos/200/300\"\n",
214-
"}, {\n",
215-
" DataRow.row_data: \"https://picsum.photos/200/300\"\n",
216-
"}])"
217-
]
182+
"source": "task1 = dataset.create_data_rows([{\n DataRow.row_data: \"https://picsum.photos/200/300\"\n}, {\n DataRow.row_data: \"https://picsum.photos/200/300\"\n}])"
218183
},
219184
{
220185
"cell_type": "code",
221-
"execution_count": 11,
186+
"execution_count": 12,
222187
"metadata": {},
223188
"outputs": [],
224-
"source": [
225-
"# Local paths\n",
226-
"local_data_path = '/tmp/test_data_row.txt'\n",
227-
"with open(local_data_path, 'w') as file:\n",
228-
" file.write(\"sample data\")\n",
229-
"\n",
230-
"task2 = dataset.create_data_rows([local_data_path])"
231-
]
189+
"source": "# Local paths\nlocal_data_path = '/tmp/test_data_row.txt'\nwith open(local_data_path, 'w') as file:\n file.write(\"sample data\")\n\ntask2 = dataset.create_data_rows([local_data_path])"
232190
},
233191
{
234192
"cell_type": "code",
235-
"execution_count": 12,
193+
"execution_count": 13,
236194
"metadata": {},
237195
"outputs": [],
238-
"source": [
239-
"# You can mix local files with urls\n",
240-
"task3 = dataset.create_data_rows([{\n",
241-
" DataRow.row_data: \"https://picsum.photos/200/300\"\n",
242-
"}, local_data_path])"
243-
]
196+
"source": "# You can mix local files with urls\ntask3 = dataset.create_data_rows([{\n DataRow.row_data: \"https://picsum.photos/200/300\"\n}, local_data_path])"
244197
},
245198
{
246199
"cell_type": "code",
247-
"execution_count": 13,
200+
"execution_count": 14,
248201
"metadata": {},
249202
"outputs": [],
250-
"source": [
251-
"# Note that you cannot set external_ids at this time when uploading from local files.\n",
252-
"# To do this you have to first\n",
253-
"item_url = client.upload_file(local_data_path)\n",
254-
"task4 = dataset.create_data_rows([{\n",
255-
" DataRow.row_data: item_url,\n",
256-
" DataRow.external_id: str(uuid.uuid4())\n",
257-
"}])"
258-
]
203+
"source": "# Note that you cannot set external_ids at this time when uploading from local files.\n# To do this you have to first\nitem_url = client.upload_file(local_data_path)\ntask4 = dataset.create_data_rows([{\n DataRow.row_data: item_url,\n DataRow.external_id: str(uuid.uuid4())\n}])"
259204
},
260205
{
261206
"cell_type": "code",
262-
"execution_count": 14,
207+
"execution_count": 15,
263208
"metadata": {},
264209
"outputs": [
265210
{
@@ -270,15 +215,7 @@
270215
]
271216
}
272217
],
273-
"source": [
274-
"# Blocking wait until complete\n",
275-
"task1.wait_till_done()\n",
276-
"task2.wait_till_done()\n",
277-
"task3.wait_till_done()\n",
278-
"task4.wait_till_done()\n",
279-
"\n",
280-
"print(task1.status, task2.status, task3.status, task4.status)"
281-
]
218+
"source": "# Blocking wait until complete\ntask1.wait_till_done()\ntask2.wait_till_done()\ntask3.wait_till_done()\ntask4.wait_till_done()\n\nprint(task1.status, task2.status, task3.status, task4.status)"
282219
},
283220
{
284221
"cell_type": "markdown",
@@ -289,7 +226,7 @@
289226
},
290227
{
291228
"cell_type": "code",
292-
"execution_count": 15,
229+
"execution_count": 16,
293230
"metadata": {},
294231
"outputs": [
295232
{
@@ -300,16 +237,11 @@
300237
]
301238
}
302239
],
303-
"source": [
304-
"# Useful for resigning urls\n",
305-
"new_id = str(uuid.uuid4())\n",
306-
"data_row.update(external_id=new_id)\n",
307-
"print(data_row.external_id, new_id)"
308-
]
240+
"source": "# Useful for resigning urls\nnew_id = str(uuid.uuid4())\ndata_row.update(external_id=new_id)\nprint(data_row.external_id, new_id)"
309241
},
310242
{
311243
"cell_type": "code",
312-
"execution_count": 16,
244+
"execution_count": 17,
313245
"metadata": {},
314246
"outputs": [
315247
{
@@ -323,14 +255,7 @@
323255
"output_type": "execute_result"
324256
}
325257
],
326-
"source": [
327-
"# We can also create attachments\n",
328-
"# Attachments are visible for all projects connected to the data_row \n",
329-
"data_row.create_attachment(attachment_type=\"TEXT\", attachment_value=\"LABELERS WILL SEE THIS \")\n",
330-
"# See more information here:\n",
331-
"# https://docs.labelbox.com/reference/type-image\n",
332-
"# Note that attachment_value must always be a string (url to a video/image or a text value to display)"
333-
]
258+
"source": "# We can also create attachments\n# Attachments are visible for all projects connected to the data_row\ndata_row.create_attachment(attachment_type=\"TEXT\",\n attachment_value=\"LABELERS WILL SEE THIS \")\n# See more information here:\n# https://docs.labelbox.com/reference/type-image\n# Note that attachment_value must always be a string (url to a video/image or a text value to display)"
334259
},
335260
{
336261
"cell_type": "markdown",
@@ -341,23 +266,17 @@
341266
},
342267
{
343268
"cell_type": "code",
344-
"execution_count": 17,
269+
"execution_count": 18,
345270
"metadata": {},
346271
"outputs": [],
347-
"source": [
348-
"data_row.delete()\n",
349-
"# Will remove from the dataset too"
350-
]
272+
"source": "data_row.delete()\n# Will remove from the dataset too"
351273
},
352274
{
353275
"cell_type": "code",
354-
"execution_count": 18,
276+
"execution_count": 19,
355277
"metadata": {},
356278
"outputs": [],
357-
"source": [
358-
"# Bulk delete a list of data_rows (in this case all of them we just uploaded)\n",
359-
"DataRow.bulk_delete(list(dataset.data_rows()))"
360-
]
279+
"source": "# Bulk delete a list of data_rows (in this case all of them we just uploaded)\nDataRow.bulk_delete(list(dataset.data_rows()))"
361280
}
362281
],
363282
"metadata": {
@@ -381,4 +300,4 @@
381300
},
382301
"nbformat": 4,
383302
"nbformat_minor": 5
384-
}
303+
}

0 commit comments

Comments
 (0)