24
24
from labelbox .schema import role
25
25
from labelbox .schema .ontology import Tool , Classification
26
26
27
-
28
27
logger = logging .getLogger (__name__ )
29
28
30
29
_LABELBOX_API_KEY = "LABELBOX_API_KEY"
@@ -563,7 +562,8 @@ def create_dataset(self, iam_integration=IAMIntegration._DEFAULT, **kwargs):
563
562
)
564
563
565
564
if not iam_integration .valid :
566
- raise ValueError ("Integration is not valid. Please select another." )
565
+ raise ValueError (
566
+ "Integration is not valid. Please select another." )
567
567
568
568
self .execute (
569
569
"""mutation setSignerForDatasetPyApi($signerId: ID!, $datasetId: ID!) {
@@ -709,46 +709,86 @@ def get_data_row_ids_for_external_ids(
709
709
return result
710
710
711
711
def get_ontology (self , ontology_id ):
712
+ """
713
+ Fetches an Ontology by id.
714
+
715
+ Args:
716
+ ontology_id (str): The id of the ontology to query for
717
+ Returns:
718
+ Ontology
719
+ """
712
720
return self ._get_single (Entity .Ontology , ontology_id )
713
721
714
- def get_ontologies (self , name_contains : str ):
722
+ def get_ontologies (self , name_contains ):
723
+ """
724
+ Fetches all ontologies with names that match the name_contains string.
725
+
726
+ Args:
727
+ name_contains (str): the string to search ontology names by
728
+ Returns:
729
+ PaginatedCollection of Ontologies with names that match `name_contains`
730
+ """
715
731
query_str = """query getOntologiesPyApi($search: String, $filter: OntologyFilter, $from : String, $first: PageSize){
716
732
ontologies(where: {filter: $filter, search: $search}, after: $from, first: $first){
717
733
nodes {%s}
718
734
nextCursor
719
735
}
720
736
}
721
737
""" % query .results_query_part (Entity .Ontology )
722
- res = PaginatedCollection (
723
- self , query_str , {'search' : name_contains , 'filter' :{'status' : 'ALL' }}, ['ontologies' , 'nodes' ],
724
- Entity .Ontology , ['ontologies' , 'nextCursor' ])
738
+ params = {'search' : name_contains , 'filter' : {'status' : 'ALL' }}
739
+ res = PaginatedCollection (self , query_str , params ,
740
+ ['ontologies' , 'nodes' ], Entity .Ontology ,
741
+ ['ontologies' , 'nextCursor' ])
725
742
return res
726
743
727
744
def get_root_schema_node (self , root_schema_id ):
728
- return self ._get_single (Entity .RootSchemaNode , root_schema_id )
745
+ """
746
+ Fetches a root schema nodes by id
729
747
748
+ Args:
749
+ root_schema_id (str): The id of the root schema node to query for
750
+ Returns:
751
+ RootSchemaNode
752
+ """
753
+ return self ._get_single (Entity .RootSchemaNode , root_schema_id )
730
754
731
755
def get_root_schema_nodes (self , name_contains ):
756
+ """
757
+ Fetches root schema nodes with names that match the name_contains string
758
+
759
+ Args:
760
+ name_contains (str): the string to search root schema node names by
761
+ Returns:
762
+ PaginatedCollection of RootSchemaNodes with names that match `name_contains`
763
+ """
732
764
query_str = """query getRootSchemaNodePyApi($search: String, $filter: RootSchemaNodeFilter, $from : String, $first: PageSize){
733
765
rootSchemaNodes(where: {filter: $filter, search: $search}, after: $from, first: $first){
734
766
nodes {%s}
735
767
nextCursor
736
768
}
737
769
}
738
770
""" % query .results_query_part (Entity .RootSchemaNode )
739
- return PaginatedCollection (
740
- self , query_str , {'search' : name_contains , 'filter' :{'status' : 'ALL' }}, ['rootSchemaNodes' , 'nodes' ],
741
- Entity .RootSchemaNode , ['rootSchemaNodes' , 'nextCursor' ])
771
+ params = {'search' : name_contains , 'filter' : {'status' : 'ALL' }}
772
+ return PaginatedCollection (self , query_str , params ,
773
+ ['rootSchemaNodes' , 'nodes' ],
774
+ Entity .RootSchemaNode ,
775
+ ['rootSchemaNodes' , 'nextCursor' ])
742
776
743
-
744
- def create_ontology_from_root_schema_nodes ( self , name , schema_node_ids ):
777
+ def create_ontology_from_root_schema_nodes ( self , name ,
778
+ root_schema_node_ids ):
745
779
"""
746
- Convenient way to create feature schema nodes.
747
- If you want to mix new and old, you have to create w/ ontology builder and use create ontology..
780
+ Creates an ontology from a list of root schema node ids
781
+ This will reuse the schema nodes instead of making a copy
782
+
783
+ Args:
784
+ name (str): Name of the ontology
785
+ root_schema_node_ids (List[str]): List of root schema node ids include in the ontology
786
+ Returns:
787
+ The created Ontology
748
788
"""
749
789
tools , classifications = [], []
750
- for schema_node_id in schema_node_ids :
751
- schema_node = self .get_root_schema_node (schema_node_id )
790
+ for schema_node_id in root_schema_node_ids :
791
+ schema_node = self .get_root_schema_node (root_schema_node_ids )
752
792
tool = schema_node .normalized ['tool' ]
753
793
try :
754
794
Tool .Type (tool )
@@ -758,46 +798,67 @@ def create_ontology_from_root_schema_nodes(self, name, schema_node_ids):
758
798
Classification .Type (tool )
759
799
classifications .append (schema_node .normalized )
760
800
except ValueError :
761
- raise ValueError (f"Tool `{ tool } ` not in list of supported tools or classifications." )
762
- return self .create_ontology (name , {'tools' : tools , 'classifications' : classifications })
763
-
801
+ raise ValueError (
802
+ f"Tool `{ tool } ` not in list of supported tools or classifications."
803
+ )
804
+ normalized = {'tools' : tools , 'classifications' : classifications }
805
+ return self .create_ontology (name , normalized )
764
806
765
- def create_ontology (self , name , normalized_json ):
807
+ def create_ontology (self , name , normalized ):
766
808
"""
767
- Creates an ontology from normalized json.
768
-
769
- Use the OntologyBuilder to easily build normalized json
809
+ Creates an ontology from normalized data
810
+ >>> normalized = {"tools" : [{'tool': 'polygon', 'name': 'cat', 'color': 'black'}], "classifications" : []}
811
+ >>> ontology = client.create_ontology("ontology-name", normalized)
770
812
771
- >>> Ontology
772
-
773
-
774
- You can also add reuse existing feature schema nodes
813
+ Or use the ontology builder. It is especially useful for complex ontologies
814
+ >>> normalized = OntologyBuilder(tools=[Tool(tool=Tool.Type.BBOX, name="cat", color = 'black')]).asdict()
815
+ >>> ontology = client.create_ontology("ontology-name", normalized)
775
816
817
+ To reuse existing root schema nodes, use `create_ontology_from_root_schema_nodes()`
818
+ More details can be found here:
819
+ https://github.com/Labelbox/labelbox-python/blob/develop/examples/basics/ontologies.ipynb
776
820
821
+ Args:
822
+ name (str): Name of the ontology
823
+ normalized (dict): A normalized ontology payload. See above for details.
824
+ Returns:
825
+ The created Ontology
777
826
"""
778
827
query_str = """mutation upsertRootSchemaNodePyApi($data: UpsertOntologyInput!){
779
- upsertOntology(data: $data){
780
- %s
781
- }
828
+ upsertOntology(data: $data){ %s }
782
829
} """ % query .results_query_part (Entity .Ontology )
783
- if normalized_json is None :
784
- if root_schema_ids is None :
785
- raise ValueError ("Must provide either a normalized ontology or a list of root_schema_ids" )
786
- return root_schema_ids
787
-
788
- res = self .execute (query_str , {'data' : {'name' : name ,'normalized' : json .dumps (normalized_json )}})
830
+ params = {'data' : {'name' : name , 'normalized' : json .dumps (normalized )}}
831
+ res = self .execute (query_str , params )
789
832
return Entity .Ontology (self , res ['upsertOntology' ])
790
833
834
+ def create_root_schema_node (self , normalized ):
835
+ """
836
+ Creates a root schema node from normalized data.
837
+ >>> normalized = {'tool': 'polygon', 'name': 'cat', 'color': 'black'}
838
+ >>> root_schema_node = client.create_root_schema_node(normalized)
839
+
840
+ Or use the Tool or Classification objects. It is especially useful for complex tools.
841
+ >>> normalized = Tool(tool=Tool.Type.BBOX, name="cat", color = 'black').asdict()
842
+ >>> root_schema_node = client.create_root_schema_node(normalized)
791
843
792
- def create_root_schema_node (self , normalized_json ):
844
+ More details can be found here:
845
+ https://github.com/Labelbox/labelbox-python/blob/develop/examples/basics/ontologies.ipynb
846
+
847
+ Args:
848
+ normalized (dict): A normalized tool or classification payload. See above for details
849
+ Returns:
850
+ The created RootSchemaNode.
851
+ """
793
852
query_str = """mutation upsertRootSchemaNodePyApi($data: UpsertRootSchemaNodeInput!){
794
- upsertRootSchemaNode(data: $data){
795
- %s
796
- }
853
+ upsertRootSchemaNode(data: $data){ %s }
797
854
} """ % query .results_query_part (Entity .RootSchemaNode )
798
- # TODO: Is this necessary?
799
- normalized_json = {k :v for k ,v in normalized_json .items () if v }
800
- # Check color. Quick gotcha..
855
+ normalized = {k : v for k , v in normalized .items () if v }
856
+
857
+ # The OntologyBuilder automatically assigns colors when calling asdict() but Tools and Classifications do not.
858
+ # So we check here to prevent getting 500 erros
859
+
801
860
if 'color' not in normalized_json :
802
861
raise KeyError ("Must provide color." )
803
- return Entity .RootSchemaNode (self , self .execute (query_str , {'data' : {'normalized' : json .dumps (normalized_json )}})['upsertRootSchemaNode' ])
862
+ params = {'data' : {'normalized' : json .dumps (normalized_json )}}
863
+ res = self .execute (query_str , params )
864
+ return Entity .RootSchemaNode (self , res ['upsertRootSchemaNode' ])
0 commit comments