Skip to content

[PLT-1495] Migrated sdk to use Ruff for formattor #1806

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Sep 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/python-package-develop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -177,4 +177,4 @@ jobs:
linux/arm64

tags: |
${{ env.CONTAINER_IMAGE }}:${{ github.sha }}
${{ env.CONTAINER_IMAGE }}:${{ github.sha }}
8 changes: 5 additions & 3 deletions libs/labelbox/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -64,14 +64,16 @@ build-backend = "hatchling.build"
[tool.rye]
managed = true
dev-dependencies = [
"yapf>=0.40.2",
"mypy>=1.9.0",
"types-pillow>=10.2.0.20240311",
"types-python-dateutil>=2.9.0.20240316",
"types-requests>=2.31.0.20240311",
"types-tqdm>=4.66.0.20240106",
]

[tool.ruff]
line-length = 80

[tool.rye.scripts]
unit = "pytest tests/unit"
# https://github.com/Labelbox/labelbox-python/blob/7c84fdffbc14fd1f69d2a6abdcc0087dc557fa4e/Makefile
Expand All @@ -87,9 +89,9 @@ unit = "pytest tests/unit"
# LABELBOX_TEST_BASE_URL="http://host.docker.internal:8080" \
integration = { cmd = "pytest tests/integration" }
data = { cmd = "pytest tests/data" }
yapf-lint = "yapf tests src -i --verbose --recursive --parallel --style \"google\""
rye-fmt-check = "rye fmt --check"
mypy-lint = "mypy src --pretty --show-error-codes --non-interactive --install-types"
lint = { chain = ["yapf-lint", "mypy-lint"] }
lint = { chain = ["mypy-lint", "rye-fmt-check"] }
test = { chain = ["lint", "unit", "integration"] }

[tool.hatch.metadata]
Expand Down
43 changes: 37 additions & 6 deletions libs/labelbox/src/labelbox/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,12 @@
from labelbox.schema.model import Model
from labelbox.schema.model_config import ModelConfig
from labelbox.schema.bulk_import_request import BulkImportRequest
from labelbox.schema.annotation_import import MALPredictionImport, MEAPredictionImport, LabelImport, MEAToMALPredictionImport
from labelbox.schema.annotation_import import (
MALPredictionImport,
MEAPredictionImport,
LabelImport,
MEAToMALPredictionImport,
)
from labelbox.schema.dataset import Dataset
from labelbox.schema.data_row import DataRow
from labelbox.schema.catalog import Catalog
Expand All @@ -18,16 +23,39 @@
from labelbox.schema.user import User
from labelbox.schema.organization import Organization
from labelbox.schema.task import Task
from labelbox.schema.export_task import StreamType, ExportTask, JsonConverter, JsonConverterOutput, FileConverter, FileConverterOutput, BufferedJsonConverterOutput
from labelbox.schema.labeling_frontend import LabelingFrontend, LabelingFrontendOptions
from labelbox.schema.export_task import (
StreamType,
ExportTask,
JsonConverter,
JsonConverterOutput,
FileConverter,
FileConverterOutput,
BufferedJsonConverterOutput,
)
from labelbox.schema.labeling_frontend import (
LabelingFrontend,
LabelingFrontendOptions,
)
from labelbox.schema.asset_attachment import AssetAttachment
from labelbox.schema.webhook import Webhook
from labelbox.schema.ontology import Ontology, OntologyBuilder, Classification, Option, Tool, FeatureSchema
from labelbox.schema.ontology import (
Ontology,
OntologyBuilder,
Classification,
Option,
Tool,
FeatureSchema,
)
from labelbox.schema.ontology import PromptResponseClassification
from labelbox.schema.ontology import ResponseOption
from labelbox.schema.role import Role, ProjectRole
from labelbox.schema.invite import Invite, InviteLimit
from labelbox.schema.data_row_metadata import DataRowMetadataOntology, DataRowMetadataField, DataRowMetadata, DeleteDataRowMetadata
from labelbox.schema.data_row_metadata import (
DataRowMetadataOntology,
DataRowMetadataField,
DataRowMetadata,
DeleteDataRowMetadata,
)
from labelbox.schema.model_run import ModelRun, DataSplit
from labelbox.schema.benchmark import Benchmark
from labelbox.schema.iam_integration import IAMIntegration
Expand All @@ -42,7 +70,10 @@
from labelbox.schema.identifiables import UniqueIds, GlobalKeys, DataRowIds
from labelbox.schema.identifiable import UniqueId, GlobalKey
from labelbox.schema.ontology_kind import OntologyKind
from labelbox.schema.project_overview import ProjectOverview, ProjectOverviewDetailed
from labelbox.schema.project_overview import (
ProjectOverview,
ProjectOverviewDetailed,
)
from labelbox.schema.labeling_service import LabelingService
from labelbox.schema.labeling_service_dashboard import LabelingServiceDashboard
from labelbox.schema.labeling_service_status import LabelingServiceStatus
62 changes: 34 additions & 28 deletions libs/labelbox/src/labelbox/adv_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@


class AdvClient:

def __init__(self, endpoint: str, api_key: str):
self.endpoint = endpoint
self.api_key = api_key
Expand All @@ -32,47 +31,52 @@ def get_embeddings(self) -> List[Dict[str, Any]]:
return self._request("GET", "/adv/v1/embeddings").get("results", [])

def import_vectors_from_file(self, id: str, file_path: str, callback=None):
self._send_ndjson(f"/adv/v1/embeddings/{id}/_import_ndjson", file_path,
callback)
self._send_ndjson(
f"/adv/v1/embeddings/{id}/_import_ndjson", file_path, callback
)

def get_imported_vector_count(self, id: str) -> int:
data = self._request("GET", f"/adv/v1/embeddings/{id}/vectors/_count")
return data.get("count", 0)

def _create_session(self) -> Session:
session = requests.session()
session.headers.update({
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
})
session.headers.update(
{
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
}
)
return session

def _request(self,
method: str,
path: str,
data: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
def _request(
self,
method: str,
path: str,
data: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
url = f"{self.endpoint}{path}"
requests_data = None
if data:
requests_data = json.dumps(data)
response = self.session.request(method,
url,
data=requests_data,
headers=headers)
response = self.session.request(
method, url, data=requests_data, headers=headers
)
if response.status_code != requests.codes.ok:
message = response.json().get('message')
message = response.json().get("message")
if message:
raise LabelboxError(message)
else:
response.raise_for_status()
return response.json()

def _send_ndjson(self,
path: str,
file_path: str,
callback: Optional[Callable[[Dict[str, Any]],
None]] = None):
def _send_ndjson(
self,
path: str,
file_path: str,
callback: Optional[Callable[[Dict[str, Any]], None]] = None,
):
"""
Sends an NDJson file in chunks.

Expand All @@ -87,7 +91,7 @@ def upload_chunk(_buffer, _count):
_headers = {
"Content-Type": "application/x-ndjson",
"X-Content-Lines": str(_count),
"Content-Length": str(buffer.tell())
"Content-Length": str(buffer.tell()),
}
rsp = self._send_bytes(f"{self.endpoint}{path}", _buffer, _headers)
rsp.raise_for_status()
Expand All @@ -96,7 +100,7 @@ def upload_chunk(_buffer, _count):

buffer = io.BytesIO()
count = 0
with open(file_path, 'rb') as fp:
with open(file_path, "rb") as fp:
for line in fp:
buffer.write(line)
count += 1
Expand All @@ -107,10 +111,12 @@ def upload_chunk(_buffer, _count):
if count:
upload_chunk(buffer, count)

def _send_bytes(self,
url: str,
buffer: io.BytesIO,
headers: Optional[Dict[str, Any]] = None) -> Response:
def _send_bytes(
self,
url: str,
buffer: io.BytesIO,
headers: Optional[Dict[str, Any]] = None,
) -> Response:
buffer.seek(0)
return self.session.put(url, headers=headers, data=buffer)

Expand Down
Loading
Loading