Skip to content

Vb/merge 5.1.0 #1850

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Sep 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
project = 'Python SDK reference'
copyright = '2024, Labelbox'
author = 'Labelbox'
release = '5.0.0'
release = '5.1.0'

# -- General configuration ---------------------------------------------------

Expand Down
5 changes: 5 additions & 0 deletions libs/labelbox/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
# Changelog
# Version 5.1.0 (2024-09-27)
## Fixed
* Support self-signed SSL certs([#1811](https://github.com/Labelbox/labelbox-python/pull/1811))
* Rectangle units now correctly support percent inputs([#1848](https://github.com/Labelbox/labelbox-python/pull/1848))

# Version 5.0.0 (2024-09-16)
## Updated
* Set tasks_remaining_count to None LabelingServiceDashboard if labeling has not started ([#1817](https://github.com/Labelbox/labelbox-python/pull/1817))
Expand Down
2 changes: 1 addition & 1 deletion libs/labelbox/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "labelbox"
version = "5.0.0"
version = "5.1.0"
description = "Labelbox Python API"
authors = [{ name = "Labelbox", email = "engineering@labelbox.com" }]
dependencies = [
Expand Down
2 changes: 1 addition & 1 deletion libs/labelbox/src/labelbox/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name = "labelbox"

__version__ = "5.0.0"
__version__ = "5.1.0"

from labelbox.client import Client
from labelbox.schema.project import Project
Expand Down
102 changes: 47 additions & 55 deletions libs/labelbox/src/labelbox/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,19 @@
import time
import urllib.parse
from collections import defaultdict
from datetime import datetime, timezone
from types import MappingProxyType
from typing import Any, Callable, Dict, List, Optional, Set, Union, overload

import lbox.exceptions
import requests
import requests.exceptions
from google.api_core import retry
from lbox.exceptions import (
InternalServerError,
LabelboxError,
ResourceNotFoundError,
TimeoutError,
)
from lbox.request_client import RequestClient

from labelbox import __version__ as SDK_VERSION
Expand Down Expand Up @@ -111,7 +117,7 @@ def __init__(
enable_experimental (bool): Indicates whether or not to use experimental features
app_url (str) : host url for all links to the web app
Raises:
lbox.exceptions.AuthenticationError: If no `api_key`
AuthenticationError: If no `api_key`
is provided as an argument or via the environment
variable.
"""
Expand Down Expand Up @@ -199,7 +205,7 @@ def upload_file(self, path: str) -> str:
Returns:
str, the URL of uploaded data.
Raises:
lbox.exceptions.LabelboxError: If upload failed.
LabelboxError: If upload failed.
"""
content_type, _ = mimetypes.guess_type(path)
filename = os.path.basename(path)
Expand All @@ -208,9 +214,7 @@ def upload_file(self, path: str) -> str:
content=f.read(), filename=filename, content_type=content_type
)

@retry.Retry(
predicate=retry.if_exception_type(lbox.exceptions.InternalServerError)
)
@retry.Retry(predicate=retry.if_exception_type(InternalServerError))
def upload_data(
self,
content: bytes,
Expand All @@ -230,7 +234,7 @@ def upload_data(
str, the URL of uploaded data.

Raises:
lbox.exceptions.LabelboxError: If upload failed.
LabelboxError: If upload failed.
"""

request_data = {
Expand Down Expand Up @@ -271,18 +275,16 @@ def upload_data(

if response.status_code == 502:
error_502 = "502 Bad Gateway"
raise lbox.exceptions.InternalServerError(error_502)
raise InternalServerError(error_502)
elif response.status_code == 503:
raise lbox.exceptions.InternalServerError(response.text)
raise InternalServerError(response.text)
elif response.status_code == 520:
raise lbox.exceptions.InternalServerError(response.text)
raise InternalServerError(response.text)

try:
file_data = response.json().get("data", None)
except ValueError as e: # response is not valid JSON
raise lbox.exceptions.LabelboxError(
"Failed to upload, unknown cause", e
)
raise LabelboxError("Failed to upload, unknown cause", e)

if not file_data or not file_data.get("uploadFile", None):
try:
Expand All @@ -292,9 +294,7 @@ def upload_data(
)
except Exception:
error_msg = "Unknown error"
raise lbox.exceptions.LabelboxError(
"Failed to upload, message: %s" % error_msg
)
raise LabelboxError("Failed to upload, message: %s" % error_msg)

return file_data["uploadFile"]["url"]

Expand All @@ -307,15 +307,15 @@ def _get_single(self, db_object_type, uid):
Returns:
Object of `db_object_type`.
Raises:
lbox.exceptions.ResourceNotFoundError: If there is no object
ResourceNotFoundError: If there is no object
of the given type for the given ID.
"""
query_str, params = query.get_single(db_object_type, uid)

res = self.execute(query_str, params)
res = res and res.get(utils.camel_case(db_object_type.type_name()))
if res is None:
raise lbox.exceptions.ResourceNotFoundError(db_object_type, params)
raise ResourceNotFoundError(db_object_type, params)
else:
return db_object_type(self, res)

Expand All @@ -329,7 +329,7 @@ def get_project(self, project_id) -> Project:
Returns:
The sought Project.
Raises:
lbox.exceptions.ResourceNotFoundError: If there is no
ResourceNotFoundError: If there is no
Project with the given ID.
"""
return self._get_single(Entity.Project, project_id)
Expand All @@ -344,7 +344,7 @@ def get_dataset(self, dataset_id) -> Dataset:
Returns:
The sought Dataset.
Raises:
lbox.exceptions.ResourceNotFoundError: If there is no
ResourceNotFoundError: If there is no
Dataset with the given ID.
"""
return self._get_single(Entity.Dataset, dataset_id)
Expand Down Expand Up @@ -470,7 +470,7 @@ def _create(self, db_object_type, data, extra_params={}):
)

if not res:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to create %s" % db_object_type.type_name()
)
res = res["create%s" % db_object_type.type_name()]
Expand Down Expand Up @@ -528,9 +528,7 @@ def delete_model_config(self, id: str) -> bool:
params = {"id": id}
result = self.execute(query, params)
if not result:
raise lbox.exceptions.ResourceNotFoundError(
Entity.ModelConfig, params
)
raise ResourceNotFoundError(Entity.ModelConfig, params)
return result["deleteModelConfig"]["success"]

def create_dataset(
Expand Down Expand Up @@ -589,7 +587,7 @@ def create_dataset(
)

if not validation_result["validateDataset"]["valid"]:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"IAMIntegration was not successfully added to the dataset."
)
except Exception as e:
Expand Down Expand Up @@ -895,7 +893,7 @@ def get_data_row_by_global_key(self, global_key: str) -> DataRow:
"""
res = self.get_data_row_ids_for_global_keys([global_key])
if res["status"] != "SUCCESS":
raise lbox.exceptions.ResourceNotFoundError(
raise ResourceNotFoundError(
Entity.DataRow, {global_key: global_key}
)
data_row_id = res["results"][0]
Expand Down Expand Up @@ -923,7 +921,7 @@ def get_model(self, model_id) -> Model:
Returns:
The sought Model.
Raises:
lbox.exceptions.ResourceNotFoundError: If there is no
ResourceNotFoundError: If there is no
Model with the given ID.
"""
return self._get_single(Entity.Model, model_id)
Expand Down Expand Up @@ -1169,7 +1167,7 @@ def delete_unused_feature_schema(self, feature_schema_id: str) -> None:
response = self.connection.delete(endpoint)

if response.status_code != requests.codes.no_content:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to delete the feature schema, message: "
+ str(response.json()["message"])
)
Expand All @@ -1190,7 +1188,7 @@ def delete_unused_ontology(self, ontology_id: str) -> None:
response = self.connection.delete(endpoint)

if response.status_code != requests.codes.no_content:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to delete the ontology, message: "
+ str(response.json()["message"])
)
Expand Down Expand Up @@ -1220,7 +1218,7 @@ def update_feature_schema_title(
if response.status_code == requests.codes.ok:
return self.get_feature_schema(feature_schema_id)
else:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to update the feature schema, message: "
+ str(response.json()["message"])
)
Expand Down Expand Up @@ -1256,7 +1254,7 @@ def upsert_feature_schema(self, feature_schema: Dict) -> FeatureSchema:
if response.status_code == requests.codes.ok:
return self.get_feature_schema(response.json()["schemaId"])
else:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to upsert the feature schema, message: "
+ str(response.json()["message"])
)
Expand Down Expand Up @@ -1284,7 +1282,7 @@ def insert_feature_schema_into_ontology(
)
response = self.connection.post(endpoint, json={"position": position})
if response.status_code != requests.codes.created:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to insert the feature schema into the ontology, message: "
+ str(response.json()["message"])
)
Expand All @@ -1309,7 +1307,7 @@ def get_unused_ontologies(self, after: str = None) -> List[str]:
if response.status_code == requests.codes.ok:
return response.json()
else:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to get unused ontologies, message: "
+ str(response.json()["message"])
)
Expand All @@ -1334,7 +1332,7 @@ def get_unused_feature_schemas(self, after: str = None) -> List[str]:
if response.status_code == requests.codes.ok:
return response.json()
else:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to get unused feature schemas, message: "
+ str(response.json()["message"])
)
Expand Down Expand Up @@ -1630,12 +1628,12 @@ def _format_failed_rows(
elif (
res["assignGlobalKeysToDataRowsResult"]["jobStatus"] == "FAILED"
):
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Job assign_global_keys_to_data_rows failed."
)
current_time = time.time()
if current_time - start_time > timeout_seconds:
raise lbox.exceptions.TimeoutError(
raise TimeoutError(
"Timed out waiting for assign_global_keys_to_data_rows job to complete."
)
time.sleep(sleep_time)
Expand Down Expand Up @@ -1739,12 +1737,10 @@ def _format_failed_rows(

return {"status": status, "results": results, "errors": errors}
elif res["dataRowsForGlobalKeysResult"]["jobStatus"] == "FAILED":
raise lbox.exceptions.LabelboxError(
"Job dataRowsForGlobalKeys failed."
)
raise LabelboxError("Job dataRowsForGlobalKeys failed.")
current_time = time.time()
if current_time - start_time > timeout_seconds:
raise lbox.exceptions.TimeoutError(
raise TimeoutError(
"Timed out waiting for get_data_rows_for_global_keys job to complete."
)
time.sleep(sleep_time)
Expand Down Expand Up @@ -1843,12 +1839,10 @@ def _format_failed_rows(

return {"status": status, "results": results, "errors": errors}
elif res["clearGlobalKeysResult"]["jobStatus"] == "FAILED":
raise lbox.exceptions.LabelboxError(
"Job clearGlobalKeys failed."
)
raise LabelboxError("Job clearGlobalKeys failed.")
current_time = time.time()
if current_time - start_time > timeout_seconds:
raise lbox.exceptions.TimeoutError(
raise TimeoutError(
"Timed out waiting for clear_global_keys job to complete."
)
time.sleep(sleep_time)
Expand Down Expand Up @@ -1913,14 +1907,14 @@ def is_feature_schema_archived(
if filtered_feature_schema_nodes:
return bool(filtered_feature_schema_nodes[0]["archived"])
else:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"The specified feature schema was not in the ontology."
)

elif response.status_code == 404:
raise lbox.exceptions.ResourceNotFoundError(Ontology, ontology_id)
raise ResourceNotFoundError(Ontology, ontology_id)
else:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to get the feature schema archived status."
)

Expand All @@ -1947,7 +1941,7 @@ def get_model_slice(self, slice_id) -> ModelSlice:
"""
res = self.execute(query_str, {"id": slice_id})
if res is None or res["getSavedQuery"] is None:
raise lbox.exceptions.ResourceNotFoundError(ModelSlice, slice_id)
raise ResourceNotFoundError(ModelSlice, slice_id)

return Entity.ModelSlice(self, res["getSavedQuery"])

Expand Down Expand Up @@ -1994,7 +1988,7 @@ def delete_feature_schema_from_ontology(
result.deleted = bool(response_json["deleted"])
return result
else:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed to remove feature schema from ontology, message: "
+ str(response.json()["message"])
)
Expand Down Expand Up @@ -2022,11 +2016,9 @@ def unarchive_feature_schema_node(
response = self.connection.patch(ontology_endpoint)
if response.status_code == requests.codes.ok:
if not bool(response.json()["unarchived"]):
raise lbox.exceptions.LabelboxError(
"Failed unarchive the feature schema."
)
raise LabelboxError("Failed unarchive the feature schema.")
else:
raise lbox.exceptions.LabelboxError(
raise LabelboxError(
"Failed unarchive the feature schema node, message: ",
response.text,
)
Expand Down Expand Up @@ -2255,7 +2247,7 @@ def get_embedding_by_name(self, name: str) -> Embedding:
for e in embeddings:
if e.name == name:
return e
raise lbox.exceptions.ResourceNotFoundError(Embedding, dict(name=name))
raise ResourceNotFoundError(Embedding, dict(name=name))

def upsert_label_feedback(
self, label_id: str, feedback: str, scores: Dict[str, float]
Expand Down Expand Up @@ -2378,7 +2370,7 @@ def get_task_by_id(self, task_id: str) -> Union[Task, DataUpsertTask]:
result = self.execute(query, {"userId": user.uid, "taskId": task_id})
data = result.get("user", {}).get("createdTasks", [])
if not data:
raise lbox.exceptions.ResourceNotFoundError(
raise ResourceNotFoundError(
message=f"The task {task_id} does not exist."
)
task_data = data[0]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ class RectangleUnit(Enum):
INCHES = "INCHES"
PIXELS = "PIXELS"
POINTS = "POINTS"
PERCENT = "PERCENT"


class DocumentRectangle(Rectangle):
Expand Down
Loading
Loading