48
48
"scrolled" : true
49
49
},
50
50
"outputs" : [],
51
- "source" : " !pip install labelbox"
51
+ "source" : [
52
+ " !pip install labelbox"
53
+ ]
52
54
},
53
55
{
54
56
"cell_type" : " code" ,
58
60
"scrolled" : true
59
61
},
60
62
"outputs" : [],
61
- "source" : " from labelbox import Client, OntologyBuilder, Tool, Classification, Option\n import json"
63
+ "source" : [
64
+ " from labelbox import Client, OntologyBuilder, Tool, Classification, Option\n " ,
65
+ " import json"
66
+ ]
62
67
},
63
68
{
64
69
"cell_type" : " code" ,
68
73
"scrolled" : true
69
74
},
70
75
"outputs" : [],
71
- "source" : " # Add your api key\n API_KEY = None\n ENDPOINT = \" https://api.labelbox.com/graphql\"\n client = Client(api_key=API_KEY, endpoint=ENDPOINT)"
76
+ "source" : [
77
+ " # Add your api key\n " ,
78
+ " API_KEY = None\n " ,
79
+ " client = Client(api_key=API_KEY)"
80
+ ]
72
81
},
73
82
{
74
83
"cell_type" : " markdown" ,
88
97
"scrolled" : true
89
98
},
90
99
"outputs" : [],
91
- "source" : " # This will automatically create new feature schema\n ontology_name = \" sdk-ontology\"\n feature_schema_cat_normalized = {\n 'tool': 'polygon',\n 'name': 'cat',\n 'color': 'black'\n }\n\n ontology_normalized_json = {\n \" tools\" : [feature_schema_cat_normalized],\n \" classifications\" : []\n }\n ontology = client.create_ontology(name=ontology_name,\n normalized=ontology_normalized_json)\n print(ontology)"
100
+ "source" : [
101
+ " # This will automatically create new feature schema\n " ,
102
+ " ontology_name = \" sdk-ontology\"\n " ,
103
+ " feature_schema_cat_normalized = {\n " ,
104
+ " 'tool': 'polygon',\n " ,
105
+ " 'name': 'cat',\n " ,
106
+ " 'color': 'black'\n " ,
107
+ " }\n " ,
108
+ " \n " ,
109
+ " ontology_normalized_json = {\n " ,
110
+ " \" tools\" : [feature_schema_cat_normalized],\n " ,
111
+ " \" classifications\" : []\n " ,
112
+ " }\n " ,
113
+ " ontology = client.create_ontology(name=ontology_name,\n " ,
114
+ " normalized=ontology_normalized_json)\n " ,
115
+ " print(ontology)"
116
+ ]
92
117
},
93
118
{
94
119
"cell_type" : " markdown" ,
108
133
"scrolled" : true
109
134
},
110
135
"outputs" : [],
111
- "source" : " # First create the feature schema\n feature_schema_cat = client.create_feature_schema(feature_schema_cat_normalized)\n # When we create the ontology it will not re-create the feature schema\n print(feature_schema_cat.uid)\n ontology = client.create_ontology_from_feature_schemas(ontology_name,\n [feature_schema_cat.uid])"
136
+ "source" : [
137
+ " # First create the feature schema\n " ,
138
+ " feature_schema_cat = client.create_feature_schema(feature_schema_cat_normalized)\n " ,
139
+ " # When we create the ontology it will not re-create the feature schema\n " ,
140
+ " print(feature_schema_cat.uid)\n " ,
141
+ " ontology = client.create_ontology_from_feature_schemas(ontology_name,\n " ,
142
+ " [feature_schema_cat.uid])"
143
+ ]
112
144
},
113
145
{
114
146
"cell_type" : " markdown" ,
129
161
"scrolled" : true
130
162
},
131
163
"outputs" : [],
132
- "source" : " # Create new dog schema id\n feature_schema_dog_normalized = {\n 'tool': 'polygon',\n 'name': 'dog',\n 'color': 'black',\n 'classifications': [],\n }\n feature_schema_dog = client.create_feature_schema(feature_schema_dog_normalized)\n # The cat is shared between this new ontology and the one we created previously\n # (ie. the cat feature schema will not be re-created)\n ontology = client.create_ontology_from_feature_schemas(\n ontology_name, [feature_schema_cat.uid, feature_schema_dog.uid])"
164
+ "source" : [
165
+ " # Create new dog schema id\n " ,
166
+ " feature_schema_dog_normalized = {\n " ,
167
+ " 'tool': 'polygon',\n " ,
168
+ " 'name': 'dog',\n " ,
169
+ " 'color': 'black',\n " ,
170
+ " 'classifications': [],\n " ,
171
+ " }\n " ,
172
+ " feature_schema_dog = client.create_feature_schema(feature_schema_dog_normalized)\n " ,
173
+ " # The cat is shared between this new ontology and the one we created previously\n " ,
174
+ " # (ie. the cat feature schema will not be re-created)\n " ,
175
+ " ontology = client.create_ontology_from_feature_schemas(\n " ,
176
+ " ontology_name, [feature_schema_cat.uid, feature_schema_dog.uid])"
177
+ ]
133
178
},
134
179
{
135
180
"cell_type" : " markdown" ,
149
194
"scrolled" : true
150
195
},
151
196
"outputs" : [],
152
- "source" : " #### Fetch by ID\n feature_schema = client.get_feature_schema(feature_schema_cat.uid)\n ontology = client.get_ontology(ontology.uid)\n print(feature_schema)\n print(ontology)"
197
+ "source" : [
198
+ " #### Fetch by ID\n " ,
199
+ " feature_schema = client.get_feature_schema(feature_schema_cat.uid)\n " ,
200
+ " ontology = client.get_ontology(ontology.uid)\n " ,
201
+ " print(feature_schema)\n " ,
202
+ " print(ontology)"
203
+ ]
153
204
},
154
205
{
155
206
"cell_type" : " code" ,
159
210
"scrolled" : true
160
211
},
161
212
"outputs" : [],
162
- "source" : " #### Search by name\n feature_schema = next(client.get_feature_schemas(\" cat\" ))\n ontology = next(client.get_ontologies(ontology_name))\n print(feature_schema)\n print(ontology)"
213
+ "source" : [
214
+ " #### Search by name\n " ,
215
+ " feature_schema = next(client.get_feature_schemas(\" cat\" ))\n " ,
216
+ " ontology = next(client.get_ontologies(ontology_name))\n " ,
217
+ " print(feature_schema)\n " ,
218
+ " print(ontology)"
219
+ ]
163
220
},
164
221
{
165
222
"cell_type" : " markdown" ,
189
246
"scrolled" : true
190
247
},
191
248
"outputs" : [],
192
- "source" : " # Create normalized json with a bounding box and segmentation tool\n ontology_builder = OntologyBuilder(tools=[\n Tool(tool=Tool.Type.BBOX, name=\" dog\" ),\n Tool(tool=Tool.Type.SEGMENTATION, name=\" cat\" ),\n ])\n # Creating an ontology from this is easy\n ontology = client.create_ontology(\" ontology-builder-ontology\" ,\n ontology_builder.asdict())\n print(json.dumps(ontology.normalized, indent=2))"
249
+ "source" : [
250
+ " # Create normalized json with a bounding box and segmentation tool\n " ,
251
+ " ontology_builder = OntologyBuilder(tools=[\n " ,
252
+ " Tool(tool=Tool.Type.BBOX, name=\" dog\" ),\n " ,
253
+ " Tool(tool=Tool.Type.SEGMENTATION, name=\" cat\" ),\n " ,
254
+ " ])\n " ,
255
+ " # Creating an ontology from this is easy\n " ,
256
+ " ontology = client.create_ontology(\" ontology-builder-ontology\" ,\n " ,
257
+ " ontology_builder.asdict())\n " ,
258
+ " print(json.dumps(ontology.normalized, indent=2))"
259
+ ]
193
260
},
194
261
{
195
262
"cell_type" : " markdown" ,
207
274
"scrolled" : true
208
275
},
209
276
"outputs" : [],
210
- "source" : " # Create\n ontology_builder = OntologyBuilder()\n # Append tools\n tool_dog = Tool(tool=Tool.Type.BBOX, name=\" dog\" )\n tool_cat = Tool(tool=Tool.Type.SEGMENTATION, name=\" cat\" )\n ontology_builder.add_tool(tool_dog)\n ontology_builder.add_tool(tool_cat)\n ontology = client.create_ontology(\" ontology-builder-ontology\" ,\n ontology_builder.asdict())\n print(json.dumps(ontology.normalized, indent=2))"
277
+ "source" : [
278
+ " # Create\n " ,
279
+ " ontology_builder = OntologyBuilder()\n " ,
280
+ " # Append tools\n " ,
281
+ " tool_dog = Tool(tool=Tool.Type.BBOX, name=\" dog\" )\n " ,
282
+ " tool_cat = Tool(tool=Tool.Type.SEGMENTATION, name=\" cat\" )\n " ,
283
+ " ontology_builder.add_tool(tool_dog)\n " ,
284
+ " ontology_builder.add_tool(tool_cat)\n " ,
285
+ " ontology = client.create_ontology(\" ontology-builder-ontology\" ,\n " ,
286
+ " ontology_builder.asdict())\n " ,
287
+ " print(json.dumps(ontology.normalized, indent=2))"
288
+ ]
211
289
},
212
290
{
213
291
"cell_type" : " markdown" ,
225
303
"scrolled" : true
226
304
},
227
305
"outputs" : [],
228
- "source" : " ontology_builder = OntologyBuilder(\n tools=[\n Tool(tool=Tool.Type.BBOX, name=\" dog\" ),\n Tool(tool=Tool.Type.SEGMENTATION,\n name=\" cat\" ,\n classifications=[\n Classification(class_type=Classification.Type.TEXT,\n instructions=\" name\" )\n ])\n ],\n classifications=[\n Classification(class_type=Classification.Type.RADIO,\n instructions=\" image_quality\" ,\n options=[Option(value=\" clear\" ),\n Option(value=\" blurry\" )])\n ])\n print(json.dumps(ontology_builder.asdict(), indent=2))"
306
+ "source" : [
307
+ " ontology_builder = OntologyBuilder(\n " ,
308
+ " tools=[\n " ,
309
+ " Tool(tool=Tool.Type.BBOX, name=\" dog\" ),\n " ,
310
+ " Tool(tool=Tool.Type.SEGMENTATION,\n " ,
311
+ " name=\" cat\" ,\n " ,
312
+ " classifications=[\n " ,
313
+ " Classification(class_type=Classification.Type.TEXT,\n " ,
314
+ " instructions=\" name\" )\n " ,
315
+ " ])\n " ,
316
+ " ],\n " ,
317
+ " classifications=[\n " ,
318
+ " Classification(class_type=Classification.Type.RADIO,\n " ,
319
+ " instructions=\" image_quality\" ,\n " ,
320
+ " options=[Option(value=\" clear\" ),\n " ,
321
+ " Option(value=\" blurry\" )])\n " ,
322
+ " ])\n " ,
323
+ " print(json.dumps(ontology_builder.asdict(), indent=2))"
324
+ ]
229
325
},
230
326
{
231
327
"cell_type" : " markdown" ,
243
339
"scrolled" : true
244
340
},
245
341
"outputs" : [],
246
- "source" : " bbox_tool = Tool(tool=Tool.Type.BBOX, name=\" dog_box\" )\n poly_tool = Tool(tool=Tool.Type.POLYGON, name=\" dog_poly\" )\n seg_tool = Tool(tool=Tool.Type.SEGMENTATION, name=\" dog_seg\" )\n point_tool = Tool(tool=Tool.Type.POINT, name=\" dog_center\" )\n line_tool = Tool(tool=Tool.Type.LINE, name=\" dog_orientation\" )\n ner_tool = Tool(tool=Tool.Type.NER, name=\" dog_reference\" )"
342
+ "source" : [
343
+ " bbox_tool = Tool(tool=Tool.Type.BBOX, name=\" dog_box\" )\n " ,
344
+ " poly_tool = Tool(tool=Tool.Type.POLYGON, name=\" dog_poly\" )\n " ,
345
+ " seg_tool = Tool(tool=Tool.Type.SEGMENTATION, name=\" dog_seg\" )\n " ,
346
+ " point_tool = Tool(tool=Tool.Type.POINT, name=\" dog_center\" )\n " ,
347
+ " line_tool = Tool(tool=Tool.Type.LINE, name=\" dog_orientation\" )\n " ,
348
+ " ner_tool = Tool(tool=Tool.Type.NER, name=\" dog_reference\" )"
349
+ ]
247
350
},
248
351
{
249
352
"cell_type" : " markdown" ,
262
365
"scrolled" : true
263
366
},
264
367
"outputs" : [],
265
- "source" : " text_classification = Classification(class_type=Classification.Type.TEXT,\n instructions=\" dog_name\" )\n radio_classification = Classification(class_type=Classification.Type.CHECKLIST,\n instructions=\" dog_breed\" ,\n options=[Option(\" poodle\" )])\n dropdown_classification = Classification(\n class_type=Classification.Type.DROPDOWN,\n instructions=\" dog_features\" ,\n options=[Option(\" short\" ), Option(\" fluffy\" )])\n checklist_classification = Classification(\n class_type=Classification.Type.CHECKLIST,\n instructions=\" background\" ,\n options=[Option(\" at_park\" ), Option(\" has_leash\" )])"
368
+ "source" : [
369
+ " text_classification = Classification(class_type=Classification.Type.TEXT,\n " ,
370
+ " instructions=\" dog_name\" )\n " ,
371
+ " radio_classification = Classification(class_type=Classification.Type.CHECKLIST,\n " ,
372
+ " instructions=\" dog_breed\" ,\n " ,
373
+ " options=[Option(\" poodle\" )])\n " ,
374
+ " dropdown_classification = Classification(\n " ,
375
+ " class_type=Classification.Type.DROPDOWN,\n " ,
376
+ " instructions=\" dog_features\" ,\n " ,
377
+ " options=[Option(\" short\" ), Option(\" fluffy\" )])\n " ,
378
+ " checklist_classification = Classification(\n " ,
379
+ " class_type=Classification.Type.CHECKLIST,\n " ,
380
+ " instructions=\" background\" ,\n " ,
381
+ " options=[Option(\" at_park\" ), Option(\" has_leash\" )])"
382
+ ]
266
383
}
267
384
],
268
385
"metadata" : {
286
403
},
287
404
"nbformat" : 4 ,
288
405
"nbformat_minor" : 5
289
- }
406
+ }
0 commit comments