Skip to content

Commit 91b61dc

Browse files
GabefireVal Brodsky
authored andcommitted
[PLT-1487] Remaining sdk deprecated items removed (#1853)
1 parent a7ebd42 commit 91b61dc

File tree

20 files changed

+28
-539
lines changed

20 files changed

+28
-539
lines changed

libs/labelbox/src/labelbox/__init__.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,28 @@
5555
ResponseOption,
5656
Tool,
5757
)
58+
from labelbox.schema.ontology import PromptResponseClassification
59+
from labelbox.schema.ontology import ResponseOption
60+
from labelbox.schema.role import Role, ProjectRole
61+
from labelbox.schema.invite import Invite, InviteLimit
62+
from labelbox.schema.data_row_metadata import (
63+
DataRowMetadataOntology,
64+
DataRowMetadataField,
65+
DataRowMetadata,
66+
DeleteDataRowMetadata,
67+
)
68+
from labelbox.schema.model_run import ModelRun, DataSplit
69+
from labelbox.schema.benchmark import Benchmark
70+
from labelbox.schema.iam_integration import IAMIntegration
71+
from labelbox.schema.resource_tag import ResourceTag
72+
from labelbox.schema.project_model_config import ProjectModelConfig
73+
from labelbox.schema.project_resource_tag import ProjectResourceTag
74+
from labelbox.schema.media_type import MediaType
75+
from labelbox.schema.slice import Slice, CatalogSlice, ModelSlice
76+
from labelbox.schema.task_queue import TaskQueue
77+
from labelbox.schema.label_score import LabelScore
78+
from labelbox.schema.identifiables import UniqueIds, GlobalKeys, DataRowIds
79+
from labelbox.schema.identifiable import UniqueId, GlobalKey
5880
from labelbox.schema.ontology_kind import OntologyKind
5981
from labelbox.schema.organization import Organization
6082
from labelbox.schema.project import Project

libs/labelbox/src/labelbox/client.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,6 @@
7373
CONSENSUS_AUTO_AUDIT_PERCENTAGE,
7474
QualityMode,
7575
)
76-
from labelbox.schema.queue_mode import QueueMode
7776
from labelbox.schema.role import Role
7877
from labelbox.schema.search_filters import SearchFilter
7978
from labelbox.schema.send_to_annotate_params import (
@@ -469,13 +468,11 @@ def _create(self, db_object_type, data, extra_params={}):
469468
res = self.execute(
470469
query_string, params, raise_return_resource_not_found=True
471470
)
472-
473471
if not res:
474472
raise LabelboxError(
475473
"Failed to create %s" % db_object_type.type_name()
476474
)
477475
res = res["create%s" % db_object_type.type_name()]
478-
479476
return db_object_type(self, res)
480477

481478
def create_model_config(
@@ -622,7 +619,6 @@ def create_project(
622619
name (str): A name for the project
623620
description (str): A short summary for the project
624621
media_type (MediaType): The type of assets that this project will accept
625-
queue_mode (Optional[QueueMode]): The queue mode to use
626622
quality_modes (Optional[List[QualityMode]]): The quality modes to use (e.g. Benchmark, Consensus). Defaults to
627623
Benchmark.
628624
is_benchmark_enabled (Optional[bool]): Whether the project supports benchmark. Defaults to None.
@@ -860,11 +856,7 @@ def create_response_creation_project(
860856
return self._create_project(_CoreProjectInput(**input))
861857

862858
def _create_project(self, input: _CoreProjectInput) -> Project:
863-
media_type_value = input.media_type.value
864-
865859
params = input.model_dump(exclude_none=True)
866-
if media_type_value:
867-
params["media_type"] = media_type_value
868860

869861
extra_params = {
870862
Field.String("dataset_name_or_id"): params.pop(
@@ -1651,10 +1643,6 @@ def get_data_row_ids_for_global_keys(
16511643
"""
16521644
Gets data row ids for a list of global keys.
16531645
1654-
Deprecation Notice: This function will soon no longer return 'Deleted Data Rows'
1655-
as part of the 'results'. Global keys for deleted data rows will soon be placed
1656-
under 'Data Row not found' portion.
1657-
16581646
Args:
16591647
A list of global keys
16601648
Returns:

libs/labelbox/src/labelbox/data/annotation_types/collection.py

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -21,21 +21,6 @@ def __init__(self, data: Generator[Label, None, None], *args, **kwargs):
2121
self._fns = {}
2222
super().__init__(data, *args, **kwargs)
2323

24-
def assign_feature_schema_ids(
25-
self, ontology_builder: "ontology.OntologyBuilder"
26-
) -> "LabelGenerator":
27-
def _assign_ids(label: Label):
28-
label.assign_feature_schema_ids(ontology_builder)
29-
return label
30-
31-
warnings.warn(
32-
"This method is deprecated and will be "
33-
"removed in a future release. Feature schema ids"
34-
" are no longer required for importing."
35-
)
36-
self._fns["assign_feature_schema_ids"] = _assign_ids
37-
return self
38-
3924
def add_url_to_masks(
4025
self, signer: Callable[[bytes], str]
4126
) -> "LabelGenerator":

libs/labelbox/src/labelbox/data/annotation_types/label.py

Lines changed: 0 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -136,42 +136,6 @@ def create_data_row(
136136
self.data.external_id = data_row.external_id
137137
return self
138138

139-
def assign_feature_schema_ids(
140-
self, ontology_builder: ontology.OntologyBuilder
141-
) -> "Label":
142-
"""
143-
Adds schema ids to all FeatureSchema objects in the Labels.
144-
145-
Args:
146-
ontology_builder: The ontology that matches the feature names assigned to objects in this dataset
147-
Returns:
148-
Label. useful for chaining these modifying functions
149-
150-
Note: You can now import annotations using names directly without having to lookup schema_ids
151-
"""
152-
warnings.warn(
153-
"This method is deprecated and will be "
154-
"removed in a future release. Feature schema ids"
155-
" are no longer required for importing."
156-
)
157-
tool_lookup, classification_lookup = get_feature_schema_lookup(
158-
ontology_builder
159-
)
160-
for annotation in self.annotations:
161-
if isinstance(annotation, ClassificationAnnotation):
162-
self._assign_or_raise(annotation, classification_lookup)
163-
self._assign_option(annotation, classification_lookup)
164-
elif isinstance(annotation, ObjectAnnotation):
165-
self._assign_or_raise(annotation, tool_lookup)
166-
for classification in annotation.classifications:
167-
self._assign_or_raise(classification, classification_lookup)
168-
self._assign_option(classification, classification_lookup)
169-
else:
170-
raise TypeError(
171-
f"Unexpected type found for annotation. {type(annotation)}"
172-
)
173-
return self
174-
175139
def _assign_or_raise(self, annotation, lookup: Dict[str, str]) -> None:
176140
if annotation.feature_schema_id is not None:
177141
return

libs/labelbox/src/labelbox/project_validation.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111
CONSENSUS_AUTO_AUDIT_PERCENTAGE,
1212
QualityMode,
1313
)
14-
from labelbox.schema.queue_mode import QueueMode
1514

1615
PositiveInt = Annotated[int, Field(gt=0)]
1716

@@ -20,7 +19,6 @@ class _CoreProjectInput(BaseModel):
2019
name: str
2120
description: Optional[str] = None
2221
media_type: MediaType
23-
queue_mode: QueueMode = Field(default=QueueMode.Batch, frozen=True)
2422
auto_audit_percentage: Optional[float] = None
2523
auto_audit_number_of_labels: Optional[int] = None
2624
quality_modes: Optional[Set[QualityMode]] = Field(
@@ -33,7 +31,7 @@ class _CoreProjectInput(BaseModel):
3331
data_row_count: Optional[PositiveInt] = None
3432
editor_task_type: Optional[EditorTaskType] = None
3533

36-
model_config = ConfigDict(extra="forbid")
34+
model_config = ConfigDict(extra="forbid", use_enum_values=True)
3735

3836
@model_validator(mode="after")
3937
def validate_fields(self):

libs/labelbox/src/labelbox/schema/asset_attachment.py

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,6 @@
77

88

99
class AttachmentType(str, Enum):
10-
@classmethod
11-
def __missing__(cls, value: object):
12-
if str(value) == "TEXT":
13-
warnings.warn(
14-
"The TEXT attachment type is deprecated. Use RAW_TEXT instead."
15-
)
16-
return cls.RAW_TEXT
17-
return value
18-
1910
VIDEO = "VIDEO"
2011
IMAGE = "IMAGE"
2112
IMAGE_OVERLAY = "IMAGE_OVERLAY"
@@ -30,7 +21,7 @@ class AssetAttachment(DbObject):
3021
"""Asset attachment provides extra context about an asset while labeling.
3122
3223
Attributes:
33-
attachment_type (str): IMAGE, VIDEO, IMAGE_OVERLAY, HTML, RAW_TEXT, TEXT_URL, or PDF_URL. TEXT attachment type is deprecated.
24+
attachment_type (str): IMAGE, VIDEO, IMAGE_OVERLAY, HTML, RAW_TEXT, TEXT_URL, or PDF_URL.
3425
attachment_value (str): URL to an external file or a string of text
3526
attachment_name (str): The name of the attachment
3627
"""

libs/labelbox/src/labelbox/schema/data_row_metadata.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -673,10 +673,8 @@ def bulk_delete(
673673
if not len(deletes):
674674
raise ValueError("The 'deletes' list cannot be empty.")
675675

676-
passed_strings = False
677676
for i, delete in enumerate(deletes):
678677
if isinstance(delete.data_row_id, str):
679-
passed_strings = True
680678
deletes[i] = DeleteDataRowMetadata(
681679
data_row_id=UniqueId(delete.data_row_id),
682680
fields=delete.fields,
@@ -690,12 +688,6 @@ def bulk_delete(
690688
f"Invalid data row identifier type '{type(delete.data_row_id)}' for '{delete.data_row_id}'"
691689
)
692690

693-
if passed_strings:
694-
warnings.warn(
695-
"Using string for data row id will be deprecated. Please use "
696-
"UniqueId instead."
697-
)
698-
699691
def _batch_delete(
700692
deletes: List[_DeleteBatchDataRowMetadata],
701693
) -> List[DataRowMetadataBatchResponse]:

libs/labelbox/src/labelbox/schema/dataset.py

Lines changed: 0 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -166,49 +166,9 @@ def create_data_row(self, items=None, **kwargs) -> "DataRow":
166166

167167
return self.client.get_data_row(res[0]["id"])
168168

169-
def create_data_rows_sync(
170-
self, items, file_upload_thread_count=FILE_UPLOAD_THREAD_COUNT
171-
) -> None:
172-
"""Synchronously bulk upload data rows.
173-
174-
Use this instead of `Dataset.create_data_rows` for smaller batches of data rows that need to be uploaded quickly.
175-
Cannot use this for uploads containing more than 1000 data rows.
176-
Each data row is also limited to 5 attachments.
177-
178-
Args:
179-
items (iterable of (dict or str)):
180-
See the docstring for `Dataset._create_descriptor_file` for more information.
181-
Returns:
182-
None. If the function doesn't raise an exception then the import was successful.
183-
184-
Raises:
185-
ResourceCreationError: If the `items` parameter does not conform to
186-
the specification in Dataset._create_descriptor_file or if the server did not accept the
187-
DataRow creation request (unknown reason).
188-
InvalidAttributeError: If there are fields in `items` not valid for
189-
a DataRow.
190-
ValueError: When the upload parameters are invalid
191-
"""
192-
warnings.warn(
193-
"This method is deprecated and will be "
194-
"removed in a future release. Please use create_data_rows instead."
195-
)
196-
197-
self._create_data_rows_sync(
198-
items, file_upload_thread_count=file_upload_thread_count
199-
)
200-
201-
return None # Return None if no exception is raised
202-
203169
def _create_data_rows_sync(
204170
self, items, file_upload_thread_count=FILE_UPLOAD_THREAD_COUNT
205171
) -> "DataUpsertTask":
206-
max_data_rows_supported = 1000
207-
if len(items) > max_data_rows_supported:
208-
raise ValueError(
209-
f"Dataset.create_data_rows_sync() supports a max of {max_data_rows_supported} data rows."
210-
" For larger imports use the async function Dataset.create_data_rows()"
211-
)
212172
if file_upload_thread_count < 1:
213173
raise ValueError(
214174
"file_upload_thread_count must be a positive integer"
@@ -235,8 +195,6 @@ def create_data_rows(
235195
) -> "DataUpsertTask":
236196
"""Asynchronously bulk upload data rows
237197
238-
Use this instead of `Dataset.create_data_rows_sync` uploads for batches that contain more than 1000 data rows.
239-
240198
Args:
241199
items (iterable of (dict or str))
242200

libs/labelbox/src/labelbox/schema/project.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,6 @@
6161
ProjectOverview,
6262
ProjectOverviewDetailed,
6363
)
64-
from labelbox.schema.queue_mode import QueueMode
6564
from labelbox.schema.resource_tag import ResourceTag
6665
from labelbox.schema.task import Task
6766
from labelbox.schema.task_queue import TaskQueue
@@ -109,7 +108,6 @@ class Project(DbObject, Updateable, Deletable):
109108
created_at (datetime)
110109
setup_complete (datetime)
111110
last_activity_time (datetime)
112-
queue_mode (string)
113111
auto_audit_number_of_labels (int)
114112
auto_audit_percentage (float)
115113
is_benchmark_enabled (bool)
@@ -132,7 +130,6 @@ class Project(DbObject, Updateable, Deletable):
132130
created_at = Field.DateTime("created_at")
133131
setup_complete = Field.DateTime("setup_complete")
134132
last_activity_time = Field.DateTime("last_activity_time")
135-
queue_mode = Field.Enum(QueueMode, "queue_mode")
136133
auto_audit_number_of_labels = Field.Int("auto_audit_number_of_labels")
137134
auto_audit_percentage = Field.Float("auto_audit_percentage")
138135
# Bind data_type and allowedMediaTYpe using the GraphQL type MediaType
@@ -734,9 +731,6 @@ def create_batch(
734731
Raises:
735732
lbox.exceptions.ValueError if a project is not batch mode, if the project is auto data generation, if the batch exceeds 100k data rows
736733
"""
737-
# @TODO: make this automatic?
738-
if self.queue_mode != QueueMode.Batch:
739-
raise ValueError("Project must be in batch mode")
740734

741735
if (
742736
self.is_auto_data_generation() and not self.is_chat_evaluation()
@@ -818,9 +812,6 @@ def create_batches(
818812
Returns: a task for the created batches
819813
"""
820814

821-
if self.queue_mode != QueueMode.Batch:
822-
raise ValueError("Project must be in batch mode")
823-
824815
dr_ids = []
825816
if data_rows is not None:
826817
for dr in data_rows:
@@ -903,9 +894,6 @@ def create_batches_from_dataset(
903894
Returns: a task for the created batches
904895
"""
905896

906-
if self.queue_mode != QueueMode.Batch:
907-
raise ValueError("Project must be in batch mode")
908-
909897
if consensus_settings:
910898
consensus_settings = ConsensusSettings(
911899
**consensus_settings

libs/labelbox/src/labelbox/schema/queue_mode.py

Lines changed: 0 additions & 12 deletions
This file was deleted.

0 commit comments

Comments
 (0)