Skip to content

Commit ccf36bb

Browse files
author
Matt Sokoloff
committed
added doc strings and cleanup
1 parent 9408395 commit ccf36bb

File tree

8 files changed

+215
-69
lines changed

8 files changed

+215
-69
lines changed

examples/basics/ontologies.ipynb

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,7 @@
6868
"root_schema_node_cat_normalized_json = {\n",
6969
" 'tool': 'polygon', \n",
7070
" 'name': 'cat', \n",
71-
" 'color': 'black', \n",
72-
" 'classifications': [], \n",
71+
" 'color': 'black'\n",
7372
"}\n",
7473
"\n",
7574
"ontology_normalized_json = {\"tools\" : [root_schema_node_cat_normalized_json], \"classifications\" : []}\n",

labelbox/client.py

Lines changed: 105 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424
from labelbox.schema import role
2525
from labelbox.schema.ontology import Tool, Classification
2626

27-
2827
logger = logging.getLogger(__name__)
2928

3029
_LABELBOX_API_KEY = "LABELBOX_API_KEY"
@@ -563,7 +562,8 @@ def create_dataset(self, iam_integration=IAMIntegration._DEFAULT, **kwargs):
563562
)
564563

565564
if not iam_integration.valid:
566-
raise ValueError("Integration is not valid. Please select another.")
565+
raise ValueError(
566+
"Integration is not valid. Please select another.")
567567

568568
self.execute(
569569
"""mutation setSignerForDatasetPyApi($signerId: ID!, $datasetId: ID!) {
@@ -709,46 +709,86 @@ def get_data_row_ids_for_external_ids(
709709
return result
710710

711711
def get_ontology(self, ontology_id):
712+
"""
713+
Fetches an Ontology by id.
714+
715+
Args:
716+
ontology_id (str): The id of the ontology to query for
717+
Returns:
718+
Ontology
719+
"""
712720
return self._get_single(Entity.Ontology, ontology_id)
713721

714-
def get_ontologies(self, name_contains: str):
722+
def get_ontologies(self, name_contains):
723+
"""
724+
Fetches all ontologies with names that match the name_contains string.
725+
726+
Args:
727+
name_contains (str): the string to search ontology names by
728+
Returns:
729+
PaginatedCollection of Ontologies with names that match `name_contains`
730+
"""
715731
query_str = """query getOntologiesPyApi($search: String, $filter: OntologyFilter, $from : String, $first: PageSize){
716732
ontologies(where: {filter: $filter, search: $search}, after: $from, first: $first){
717733
nodes {%s}
718734
nextCursor
719735
}
720736
}
721737
""" % query.results_query_part(Entity.Ontology)
722-
res = PaginatedCollection(
723-
self, query_str, {'search' : name_contains, 'filter' :{'status' : 'ALL'}}, ['ontologies', 'nodes'],
724-
Entity.Ontology, ['ontologies', 'nextCursor'])
738+
params = {'search': name_contains, 'filter': {'status': 'ALL'}}
739+
res = PaginatedCollection(self, query_str, params,
740+
['ontologies', 'nodes'], Entity.Ontology,
741+
['ontologies', 'nextCursor'])
725742
return res
726743

727744
def get_root_schema_node(self, root_schema_id):
728-
return self._get_single(Entity.RootSchemaNode, root_schema_id)
745+
"""
746+
Fetches a root schema nodes by id
729747
748+
Args:
749+
root_schema_id (str): The id of the root schema node to query for
750+
Returns:
751+
RootSchemaNode
752+
"""
753+
return self._get_single(Entity.RootSchemaNode, root_schema_id)
730754

731755
def get_root_schema_nodes(self, name_contains):
756+
"""
757+
Fetches root schema nodes with names that match the name_contains string
758+
759+
Args:
760+
name_contains (str): the string to search root schema node names by
761+
Returns:
762+
PaginatedCollection of RootSchemaNodes with names that match `name_contains`
763+
"""
732764
query_str = """query getRootSchemaNodePyApi($search: String, $filter: RootSchemaNodeFilter, $from : String, $first: PageSize){
733765
rootSchemaNodes(where: {filter: $filter, search: $search}, after: $from, first: $first){
734766
nodes {%s}
735767
nextCursor
736768
}
737769
}
738770
""" % query.results_query_part(Entity.RootSchemaNode)
739-
return PaginatedCollection(
740-
self, query_str, {'search' : name_contains, 'filter' :{'status' : 'ALL'}}, ['rootSchemaNodes', 'nodes'],
741-
Entity.RootSchemaNode, ['rootSchemaNodes', 'nextCursor'])
771+
params = {'search': name_contains, 'filter': {'status': 'ALL'}}
772+
return PaginatedCollection(self, query_str, params,
773+
['rootSchemaNodes', 'nodes'],
774+
Entity.RootSchemaNode,
775+
['rootSchemaNodes', 'nextCursor'])
742776

743-
744-
def create_ontology_from_root_schema_nodes(self, name, schema_node_ids):
777+
def create_ontology_from_root_schema_nodes(self, name,
778+
root_schema_node_ids):
745779
"""
746-
Convenient way to create feature schema nodes.
747-
If you want to mix new and old, you have to create w/ ontology builder and use create ontology..
780+
Creates an ontology from a list of root schema node ids
781+
This will reuse the schema nodes instead of making a copy
782+
783+
Args:
784+
name (str): Name of the ontology
785+
root_schema_node_ids (List[str]): List of root schema node ids include in the ontology
786+
Returns:
787+
The created Ontology
748788
"""
749789
tools, classifications = [], []
750-
for schema_node_id in schema_node_ids:
751-
schema_node = self.get_root_schema_node(schema_node_id)
790+
for schema_node_id in root_schema_node_ids:
791+
schema_node = self.get_root_schema_node(root_schema_node_ids)
752792
tool = schema_node.normalized['tool']
753793
try:
754794
Tool.Type(tool)
@@ -758,46 +798,67 @@ def create_ontology_from_root_schema_nodes(self, name, schema_node_ids):
758798
Classification.Type(tool)
759799
classifications.append(schema_node.normalized)
760800
except ValueError:
761-
raise ValueError(f"Tool `{tool}` not in list of supported tools or classifications.")
762-
return self.create_ontology(name, {'tools' : tools, 'classifications' : classifications})
763-
801+
raise ValueError(
802+
f"Tool `{tool}` not in list of supported tools or classifications."
803+
)
804+
normalized = {'tools': tools, 'classifications': classifications}
805+
return self.create_ontology(name, normalized)
764806

765-
def create_ontology(self, name , normalized_json):
807+
def create_ontology(self, name, normalized):
766808
"""
767-
Creates an ontology from normalized json.
768-
769-
Use the OntologyBuilder to easily build normalized json
809+
Creates an ontology from normalized data
810+
>>> normalized = {"tools" : [{'tool': 'polygon', 'name': 'cat', 'color': 'black'}], "classifications" : []}
811+
>>> ontology = client.create_ontology("ontology-name", normalized)
770812
771-
>>> Ontology
772-
773-
774-
You can also add reuse existing feature schema nodes
813+
Or use the ontology builder. It is especially useful for complex ontologies
814+
>>> normalized = OntologyBuilder(tools=[Tool(tool=Tool.Type.BBOX, name="cat", color = 'black')]).asdict()
815+
>>> ontology = client.create_ontology("ontology-name", normalized)
775816
817+
To reuse existing root schema nodes, use `create_ontology_from_root_schema_nodes()`
818+
More details can be found here:
819+
https://github.com/Labelbox/labelbox-python/blob/develop/examples/basics/ontologies.ipynb
776820
821+
Args:
822+
name (str): Name of the ontology
823+
normalized (dict): A normalized ontology payload. See above for details.
824+
Returns:
825+
The created Ontology
777826
"""
778827
query_str = """mutation upsertRootSchemaNodePyApi($data: UpsertOntologyInput!){
779-
upsertOntology(data: $data){
780-
%s
781-
}
828+
upsertOntology(data: $data){ %s }
782829
} """ % query.results_query_part(Entity.Ontology)
783-
if normalized_json is None:
784-
if root_schema_ids is None:
785-
raise ValueError("Must provide either a normalized ontology or a list of root_schema_ids")
786-
return root_schema_ids
787-
788-
res = self.execute(query_str, {'data' : {'name' : name ,'normalized' : json.dumps(normalized_json)}})
830+
params = {'data': {'name': name, 'normalized': json.dumps(normalized)}}
831+
res = self.execute(query_str, params)
789832
return Entity.Ontology(self, res['upsertOntology'])
790833

834+
def create_root_schema_node(self, normalized):
835+
"""
836+
Creates a root schema node from normalized data.
837+
>>> normalized = {'tool': 'polygon', 'name': 'cat', 'color': 'black'}
838+
>>> root_schema_node = client.create_root_schema_node(normalized)
839+
840+
Or use the Tool or Classification objects. It is especially useful for complex tools.
841+
>>> normalized = Tool(tool=Tool.Type.BBOX, name="cat", color = 'black').asdict()
842+
>>> root_schema_node = client.create_root_schema_node(normalized)
791843
792-
def create_root_schema_node(self, normalized_json):
844+
More details can be found here:
845+
https://github.com/Labelbox/labelbox-python/blob/develop/examples/basics/ontologies.ipynb
846+
847+
Args:
848+
normalized (dict): A normalized tool or classification payload. See above for details
849+
Returns:
850+
The created RootSchemaNode.
851+
"""
793852
query_str = """mutation upsertRootSchemaNodePyApi($data: UpsertRootSchemaNodeInput!){
794-
upsertRootSchemaNode(data: $data){
795-
%s
796-
}
853+
upsertRootSchemaNode(data: $data){ %s }
797854
} """ % query.results_query_part(Entity.RootSchemaNode)
798-
# TODO: Is this necessary?
799-
normalized_json = {k:v for k,v in normalized_json.items() if v}
800-
# Check color. Quick gotcha..
855+
normalized = {k: v for k, v in normalized.items() if v}
856+
857+
# The OntologyBuilder automatically assigns colors when calling asdict() but Tools and Classifications do not.
858+
# So we check here to prevent getting 500 erros
859+
801860
if 'color' not in normalized_json:
802861
raise KeyError("Must provide color.")
803-
return Entity.RootSchemaNode(self, self.execute(query_str, {'data' : {'normalized' : json.dumps(normalized_json)}})['upsertRootSchemaNode'])
862+
params = {'data': {'normalized': json.dumps(normalized_json)}}
863+
res = self.execute(query_str, params)
864+
return Entity.RootSchemaNode(self, res['upsertRootSchemaNode'])

labelbox/schema/ontology.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
SchemaId: Type[str] = constr(min_length=25, max_length=25)
1717

1818

19-
2019
class RootSchemaNode(DbObject):
2120
name = Field.String("name")
2221
color = Field.String("name")
@@ -360,7 +359,6 @@ def from_project(cls, project: "project.Project"):
360359
def from_ontology(cls, ontology: Ontology):
361360
return cls.from_dict(ontology.normalized)
362361

363-
364362
def add_tool(self, tool: Tool):
365363
if tool.name in (t.name for t in self.tools):
366364
raise InconsistentOntologyException(
@@ -374,6 +372,3 @@ def add_classification(self, classification: Classification):
374372
f"Duplicate classification instructions '{classification.instructions}'. "
375373
)
376374
self.classifications.append(classification)
377-
378-
379-

labelbox/schema/project.py

Lines changed: 16 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import enum
22
import json
3-
from labelbox.schema.labeling_frontend import LabelingFrontend
43
import logging
54
import time
65
import warnings
@@ -423,16 +422,26 @@ def review_metrics(self, net_score):
423422
res = self.client.execute(query_str, {id_param: self.uid})
424423
return res["project"]["reviewMetrics"]["labelAggregate"]["count"]
425424

426-
427425
def setup_editor(self, ontology):
428-
fe = next(self.client.get_labeling_frontends(where = LabelingFrontend.name == "Editor"))
429-
self.labeling_frontend.connect(fe)
430-
query_str = """mutation ConnectOntology($projectId: ID!, $ontologyId: ID!) {project(where: {id: $projectId}) {connectOntology(ontologyId: $ontologyId) {id}}}"""
431-
self.client.execute(query_str, {'ontologyId' : ontology.uid, 'projectId' : self.uid})
426+
"""
427+
Sets up the project using the Pictor editor.
428+
429+
Args:
430+
ontology (Ontology): The ontology to attach to the project
431+
"""
432+
labeling_front_end = next(
433+
self.client.get_labeling_frontends(
434+
where=Entity.LabelingFrontend.name == "Editor"))
435+
self.labeling_frontend.connect(labeling_front_end)
436+
query_str = """mutation ConnectOntologyPyApi($projectId: ID!, $ontologyId: ID!){
437+
project(where: {id: $projectId}) {connectOntology(ontologyId: $ontologyId) {id}}}"""
438+
self.client.execute(query_str, {
439+
'ontologyId': ontology.uid,
440+
'projectId': self.uid
441+
})
432442
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
433443
self.update(setup_complete=timestamp)
434444

435-
436445
def setup(self, labeling_frontend, labeling_frontend_options):
437446
""" Finalizes the Project setup.
438447

tests/integration/annotation_import/test_model.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,3 @@ def test_model_delete(client, model):
2929
after = list(client.get_models())
3030

3131
assert len(before) == len(after) + 1
32-
33-
34-

tests/integration/conftest.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -333,4 +333,3 @@ def configured_project_with_label(client, rand_gen, image_url):
333333
yield [project, label]
334334
dataset.delete()
335335
project.delete()
336-

tests/integration/test_ontology.py

Lines changed: 74 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@
22

33
from labelbox.exceptions import InconsistentOntologyException
44
from labelbox import Tool, Classification, Option, OntologyBuilder
5+
from labelbox.orm.model import Entity
6+
import json
7+
import time
58

69
_SAMPLE_ONTOLOGY = {
710
"tools": [{
@@ -242,10 +245,74 @@ def test_from_project_ontology(client, project) -> None:
242245
assert o.asdict() == project.ontology().normalized
243246

244247

245-
246-
def test_create_ontology(client, rand_gen):
247-
name = f"test-ontology-{rand_gen(str)}"
248-
normalized = {}
249-
ontology = client.create_ontology(name = name)
250-
assert
251-
248+
def _get_attr_stringify_json(obj, attr):
249+
value = getattr(obj, attr.name)
250+
if attr.field_type.name.lower() == "json":
251+
return json.dumps(value, sort_keys=True)
252+
return value
253+
254+
255+
def test_root_schema_node_create_read(client, rand_gen):
256+
name = f"test-root-schema-{rand_gen(str)}"
257+
root_schema_node_cat_normalized_json = {
258+
'tool': 'polygon',
259+
'name': name,
260+
'color': 'black',
261+
'classifications': [],
262+
}
263+
created_root_schema_node = client.create_root_schema_node(
264+
root_schema_node_cat_normalized_json)
265+
queried_root_sceham_node = client.get_root_schema_node(
266+
created_root_schema_node.uid)
267+
for attr in Entity.RootSchemaNode.fields():
268+
assert _get_attr_stringify_json(created_root_schema_node,
269+
attr) == _get_attr_stringify_json(
270+
queried_root_sceham_node, attr)
271+
272+
time.sleep(3) # Slight delay for searching
273+
queried_root_sceham_nodes = client.get_root_schema_nodes(name)
274+
assert [
275+
root_schema_node.name for root_schema_node in queried_root_sceham_nodes
276+
] == [name]
277+
queried_root_sceham_nodes = queried_root_sceham_nodes[0]
278+
279+
for attr in Entity.RootSchemaNode.fields():
280+
assert _get_attr_stringify_json(created_root_schema_node,
281+
attr) == _get_attr_stringify_json(
282+
queried_root_sceham_node, attr)
283+
284+
285+
def test_ontology_create_read(client, rand_gen):
286+
ontology_name = f"test-ontology-{rand_gen(str)}"
287+
tool_name = f"test-ontology-tool-{rand_gen(str)}"
288+
root_schema_node_cat_normalized_json = {
289+
'tool': 'polygon',
290+
'name': tool_name,
291+
'color': 'black',
292+
'classifications': [],
293+
}
294+
root_schema_node = client.create_root_schema_node(
295+
root_schema_node_cat_normalized_json)
296+
created_ontology = client.create_ontology_from_root_schema_nodes(
297+
name=ontology_name, root_schema_node_ids=[root_schema_node.uid])
298+
tool_normalized = created_ontology.normalized['tools'][0]
299+
for k, v in root_schema_node_cat_normalized_json.items():
300+
assert tool_normalized[k] == v
301+
assert tool_normalized['schemaNodeId'] == root_schema_node.uid
302+
assert tool_normalized['featureSchemaId'] is not None
303+
304+
queried_ontology = client.get_ontology(created_ontology.uid)
305+
306+
for attr in Entity.Ontology.fields():
307+
assert _get_attr_stringify_json(created_ontology,
308+
attr) == _get_attr_stringify_json(
309+
queried_ontology, attr)
310+
311+
time.sleep(3) # Slight delay for searching
312+
queried_ontologies = client.get_ontologies(ontology_name)
313+
assert [ontology.name for ontology in queried_ontologies] == [ontology_name]
314+
queried_ontology = queried_ontologies[0]
315+
for attr in Entity.Ontology.fields():
316+
assert _get_attr_stringify_json(created_ontology,
317+
attr) == _get_attr_stringify_json(
318+
queried_ontology, attr)

0 commit comments

Comments
 (0)