Skip to content

Commit 9131d21

Browse files
committed
[PTDT-2863]: Feature schema attributes
1 parent b6427ed commit 9131d21

File tree

8 files changed

+143
-8
lines changed

8 files changed

+143
-8
lines changed

feature-schema-attributes-testing.py

Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,106 @@
1+
import os
2+
from libs.labelbox.src.labelbox import Client
3+
from libs.labelbox.src.labelbox.schema.ontology import OntologyBuilder, Tool
4+
from libs.labelbox.src.labelbox.schema.tool_building.classification import Classification, Option
5+
from libs.labelbox.src.labelbox.schema.tool_building.types import FeatureSchemaAttribute
6+
from libs.labelbox.src.labelbox.schema.media_type import MediaType
7+
from libs.labelbox.src.labelbox.schema.ontology_kind import OntologyKind
8+
import json
9+
10+
11+
# client = Client(
12+
# api_key=os.environ.get('STAGE_API_KEY'),
13+
# endpoint="https://app.lb-stage.xyz/api/_gql/graphql",
14+
# rest_endpoint="https://app.lb-stage.xyz/api/api/v1")
15+
16+
client = Client(
17+
api_key=os.environ.get('LOCALHOST_API_KEY'),
18+
endpoint="http://localhost:8080/graphql",
19+
rest_endpoint="http://localhost:3000/api/api/v1")
20+
21+
builder = OntologyBuilder(
22+
23+
tools=[
24+
Tool(
25+
name="Auto OCR",
26+
tool=Tool.Type.BBOX,
27+
attributes=[
28+
FeatureSchemaAttribute(
29+
attributeName="auto-ocr",
30+
attributeValue="true"
31+
)
32+
],
33+
classifications=[
34+
Classification(
35+
name="Auto ocr text class value",
36+
instructions="This is an auto OCR text value classification",
37+
class_type=Classification.Type.TEXT,
38+
scope=Classification.Scope.GLOBAL,
39+
attributes=[
40+
FeatureSchemaAttribute(
41+
attributeName="auto-ocr-text-value",
42+
attributeValue="true"
43+
)
44+
]
45+
)
46+
]
47+
)
48+
]
49+
)
50+
51+
# client.create_ontology("Auto OCR ontology", builder.asdict(), media_type=MediaType.Document)
52+
53+
builder = OntologyBuilder(
54+
classifications=[
55+
Classification(
56+
name="prompt message scope text classification",
57+
instructions="This is a prompt message scoped text classification",
58+
class_type=Classification.Type.TEXT,
59+
scope=Classification.Scope.INDEX,
60+
attributes=[
61+
FeatureSchemaAttribute(
62+
attributeName="prompt-message-scope",
63+
attributeValue="true"
64+
)
65+
]
66+
)
67+
]
68+
)
69+
70+
# client.create_ontology('MMC Ontology with prompt message scope class', builder.asdict(), media_type=MediaType.Conversational, ontology_kind=OntologyKind.ModelEvaluation)
71+
72+
builder = OntologyBuilder(
73+
classifications=[
74+
Classification(
75+
name="Requires connection checklist classification",
76+
instructions="This is a requires connection checklist classification",
77+
class_type=Classification.Type.CHECKLIST,
78+
scope=Classification.Scope.GLOBAL,
79+
attributes=[
80+
FeatureSchemaAttribute(
81+
attributeName="required-connection",
82+
attributeValue="true"
83+
)
84+
],
85+
options=[
86+
Option(
87+
value='First option'
88+
),
89+
Option(
90+
value='Second option'
91+
)
92+
]
93+
)
94+
]
95+
);
96+
97+
# client.create_ontology('Image ontology with requires connection classes', builder.asdict(), media_type=MediaType.Image)
98+
99+
100+
feature_schema = client.upsert_feature_schema(Tool(name='Auto OCR from upsert feature schema', tool=Tool.Type.BBOX, attributes=[FeatureSchemaAttribute(attributeName='auto-ocr', attributeValue='true')]).asdict())
101+
fetched_feature_schema = client.get_feature_schema(feature_schema.uid)
102+
feature_schemas_with_name = client.get_feature_schemas('Auto OCR')
103+
104+
# Iterate over the feature schemas
105+
for schema in feature_schemas_with_name:
106+
print(schema)

libs/labelbox/src/labelbox/client.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1073,6 +1073,7 @@ def rootSchemaPayloadToFeatureSchema(client, payload):
10731073
["rootSchemaNodes", "nextCursor"],
10741074
)
10751075

1076+
# TODO tkerr:
10761077
def create_ontology_from_feature_schemas(
10771078
self,
10781079
name,
@@ -1242,6 +1243,9 @@ def upsert_feature_schema(self, feature_schema: Dict) -> FeatureSchema:
12421243
endpoint, json={"normalized": json.dumps(feature_schema)}
12431244
)
12441245

1246+
print('json.dumps(feature_schema)')
1247+
print(json.dumps(feature_schema))
1248+
12451249
if response.status_code == requests.codes.ok:
12461250
return self.get_feature_schema(response.json()["schemaId"])
12471251
else:
@@ -1328,6 +1332,7 @@ def get_unused_feature_schemas(self, after: str = None) -> List[str]:
13281332
+ str(response.json()["message"])
13291333
)
13301334

1335+
# TODO tkerr:
13311336
def create_ontology(
13321337
self,
13331338
name,

libs/labelbox/src/labelbox/schema/ontology.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@
2525
from labelbox.schema.tool_building.tool_type_mapping import (
2626
map_tool_type_to_tool_cls,
2727
)
28+
from labelbox.schema.tool_building.types import FeatureSchemaAttributes
29+
2830

2931

3032
class DeleteFeatureFromOntologyResult:
@@ -43,7 +45,6 @@ class FeatureSchema(DbObject):
4345
color = Field.String("name")
4446
normalized = Field.Json("normalized")
4547

46-
4748
@dataclass
4849
class Tool:
4950
"""
@@ -73,6 +74,7 @@ class Tool:
7374
classifications: (list)
7475
schema_id: (str)
7576
feature_schema_id: (str)
77+
attributes: (list)
7678
"""
7779

7880
class Type(Enum):
@@ -95,7 +97,7 @@ class Type(Enum):
9597
classifications: List[Classification] = field(default_factory=list)
9698
schema_id: Optional[str] = None
9799
feature_schema_id: Optional[str] = None
98-
100+
attributes: Optional[FeatureSchemaAttributes] = None
99101
@classmethod
100102
def from_dict(cls, dictionary: Dict[str, Any]) -> Dict[str, Any]:
101103
return cls(
@@ -109,6 +111,7 @@ def from_dict(cls, dictionary: Dict[str, Any]) -> Dict[str, Any]:
109111
for c in dictionary["classifications"]
110112
],
111113
color=dictionary["color"],
114+
attributes=dictionary.get("attributes", None),
112115
)
113116

114117
def asdict(self) -> Dict[str, Any]:
@@ -122,6 +125,7 @@ def asdict(self) -> Dict[str, Any]:
122125
],
123126
"schemaNodeId": self.schema_id,
124127
"featureSchemaId": self.feature_schema_id,
128+
"attributes": self.attributes,
125129
}
126130

127131
def add_classification(self, classification: Classification) -> None:

libs/labelbox/src/labelbox/schema/tool_building/classification.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,10 @@
55

66
from lbox.exceptions import InconsistentOntologyException
77

8-
from labelbox.schema.tool_building.types import FeatureSchemaId
8+
from labelbox.schema.tool_building.types import FeatureSchemaId, FeatureSchemaAttributes
99

1010

11+
# TODO tkerr: Update all these tools & classifications to use attributes
1112
@dataclass
1213
class Classification:
1314
"""
@@ -42,6 +43,7 @@ class Classification:
4243
schema_id: (str)
4344
feature_schema_id: (str)
4445
scope: (str)
46+
attributes: (list)
4547
"""
4648

4749
class Type(Enum):
@@ -70,6 +72,7 @@ class UIMode(Enum):
7072
ui_mode: Optional[UIMode] = (
7173
None # How this classification should be answered (e.g. hotkeys / autocomplete, etc)
7274
)
75+
attributes: Optional[FeatureSchemaAttributes] = None
7376

7477
def __post_init__(self):
7578
if self.name is None:
@@ -88,9 +91,13 @@ def __post_init__(self):
8891
else:
8992
if self.instructions is None:
9093
self.instructions = self.name
94+
if self.attributes is not None:
95+
warnings.warn('Attributes are an experimental feature and may change in the future.')
9196

9297
@classmethod
9398
def from_dict(cls, dictionary: Dict[str, Any]) -> "Classification":
99+
print('attributes:')
100+
print(dictionary.get("attributes", None))
94101
return cls(
95102
class_type=Classification.Type(dictionary["type"]),
96103
name=dictionary["name"],
@@ -103,6 +110,7 @@ def from_dict(cls, dictionary: Dict[str, Any]) -> "Classification":
103110
schema_id=dictionary.get("schemaNodeId", None),
104111
feature_schema_id=dictionary.get("featureSchemaId", None),
105112
scope=cls.Scope(dictionary.get("scope", cls.Scope.GLOBAL)),
113+
attributes=FeatureSchemaAttributes(dictionary.get("attributes", None)),
106114
)
107115

108116
def asdict(self, is_subclass: bool = False) -> Dict[str, Any]:
@@ -118,6 +126,7 @@ def asdict(self, is_subclass: bool = False) -> Dict[str, Any]:
118126
"options": [o.asdict() for o in self.options],
119127
"schemaNodeId": self.schema_id,
120128
"featureSchemaId": self.feature_schema_id,
129+
"attributes": self.attributes if self.attributes is not None else None,
121130
}
122131
if (
123132
self.class_type == self.Type.RADIO
Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,14 @@
1-
from typing import Annotated
1+
from typing import Annotated, List
2+
from pydantic import Field, BaseModel
3+
from typing import TypedDict
24

3-
from pydantic import Field
5+
6+
class FeatureSchemaAttribute(TypedDict):
7+
attributeName: str
8+
attributeValue: str
9+
10+
FeatureSchemaAttriubte = Annotated[FeatureSchemaAttribute, Field()]
411

512
FeatureSchemaId = Annotated[str, Field(min_length=25, max_length=25)]
613
SchemaId = Annotated[str, Field(min_length=25, max_length=25)]
14+
FeatureSchemaAttributes = Annotated[List[FeatureSchemaAttribute], Field(default_factory=list)]

libs/labelbox/tests/integration/test_ontology.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -225,6 +225,7 @@ def feature_schema_cat_normalized(name_for_read):
225225
"name": name_for_read,
226226
"color": "black",
227227
"classifications": [],
228+
"attributes": [] # TODO tkerr: Finish
228229
}
229230

230231

requirements-dev.lock

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
# features: []
77
# all-features: true
88
# with-sources: false
9+
# generate-hashes: false
910

1011
-e file:libs/labelbox
1112
-e file:libs/lbox-clients
@@ -133,7 +134,7 @@ nbconvert==7.16.4
133134
nbformat==5.10.4
134135
# via nbclient
135136
# via nbconvert
136-
numpy==1.24.4
137+
numpy==2.0.2
137138
# via labelbox
138139
# via opencv-python-headless
139140
# via pandas
@@ -147,7 +148,7 @@ packaging==24.1
147148
# via pytest-cases
148149
# via pytest-rerunfailures
149150
# via sphinx
150-
pandas==2.0.3
151+
pandas==2.2.3
151152
pandocfilters==1.5.1
152153
# via nbconvert
153154
parso==0.8.4

requirements.lock

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
# features: []
77
# all-features: true
88
# with-sources: false
9+
# generate-hashes: false
910

1011
-e file:libs/labelbox
1112
-e file:libs/lbox-clients
@@ -54,7 +55,7 @@ mypy==1.10.1
5455
# via labelbox
5556
mypy-extensions==1.0.0
5657
# via mypy
57-
numpy==1.24.4
58+
numpy==2.0.2
5859
# via labelbox
5960
# via opencv-python-headless
6061
# via shapely

0 commit comments

Comments
 (0)