Skip to content

Commit eea7571

Browse files
committed
tried it out and not so sure about this...
1 parent 7c79470 commit eea7571

File tree

13 files changed

+164
-88
lines changed

13 files changed

+164
-88
lines changed

.DS_Store

0 Bytes
Binary file not shown.

LICENSE

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
The MIT License (MIT)
2+
3+
Copyright (c) 2021 Scale AI
4+
5+
Permission is hereby granted, free of charge, to any person obtaining a copy
6+
of this software and associated documentation files (the "Software"), to deal
7+
in the Software without restriction, including without limitation the rights
8+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9+
copies of the Software, and to permit persons to whom the Software is
10+
furnished to do so, subject to the following conditions:
11+
12+
The above copyright notice and this permission notice shall be included in
13+
all copies or substantial portions of the Software.
14+
15+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21+
THE SOFTWARE.

conftest.py

Lines changed: 13 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
# https://github.com/gevent/gevent/issues/1016#issuecomment-328530533
77
# https://github.com/spyoungtech/grequests/issues/8
88
import grequests
9+
910
################
1011

1112
import logging
@@ -19,26 +20,28 @@
1920

2021
from tests.helpers import TEST_DATASET_NAME, TEST_DATASET_ITEMS
2122

22-
assert 'NUCLEUS_PYTEST_API_KEY' in os.environ, \
23-
"You must set the 'NUCLEUS_PYTEST_API_KEY' environment variable to a valid " \
23+
assert "NUCLEUS_PYTEST_API_KEY" in os.environ, (
24+
"You must set the 'NUCLEUS_PYTEST_API_KEY' environment variable to a valid "
2425
"Nucleus API key to run the test suite"
26+
)
2527

26-
API_KEY = os.environ['NUCLEUS_PYTEST_API_KEY']
28+
API_KEY = os.environ["NUCLEUS_PYTEST_API_KEY"]
2729

2830

29-
@pytest.fixture(scope='session')
31+
@pytest.fixture(scope="session")
3032
def monkeypatch_session(request):
31-
""" This workaround is needed to allow monkeypatching in session-scoped fixtures.
33+
"""This workaround is needed to allow monkeypatching in session-scoped fixtures.
3234
3335
See https://github.com/pytest-dev/pytest/issues/363
3436
"""
3537
from _pytest.monkeypatch import MonkeyPatch
38+
3639
mpatch = MonkeyPatch()
3740
yield mpatch
3841
mpatch.undo()
3942

4043

41-
@pytest.fixture(scope='session')
44+
@pytest.fixture(scope="session")
4245
def CLIENT(monkeypatch_session):
4346
client = nucleus.NucleusClient(API_KEY)
4447

@@ -49,14 +52,16 @@ def _make_request_patch(
4952
payload: dict, route: str, requests_command=requests.post
5053
) -> dict:
5154
response = client._make_request_raw(payload, route, requests_command)
52-
assert response.status_code in SUCCESS_STATUS_CODES, \
53-
f"HTTP response had status code: {response.status_code}. " \
55+
assert response.status_code in SUCCESS_STATUS_CODES, (
56+
f"HTTP response had status code: {response.status_code}. "
5457
f"Full JSON: {response.json()}"
58+
)
5559
return response.json()
5660

5761
monkeypatch_session.setattr(client, "_make_request", _make_request_patch)
5862
return client
5963

64+
6065
@pytest.fixture()
6166
def dataset(CLIENT):
6267
ds = CLIENT.create_dataset(TEST_DATASET_NAME)

nucleus/__init__.py

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@
5050
geometry | dict | Representation of the bounding box in the Box2DGeometry format.\n
5151
metadata | dict | An arbitrary metadata blob for the annotation.\n
5252
"""
53+
__version__ = "0.0.1"
5354

5455
import json
5556
import logging
@@ -65,8 +66,11 @@
6566
from requests.adapters import HTTPAdapter
6667

6768
# pylint: disable=E1101
69+
# TODO: refactor to reduce this file to under 1000 lines.
70+
# pylint: disable=C0302
6871
from requests.packages.urllib3.util.retry import Retry
6972

73+
from .constants import REFERENCE_IDS_KEY, DATASET_ITEM_IDS_KEY
7074
from .dataset import Dataset
7175
from .dataset_item import DatasetItem
7276
from .annotation import (
@@ -961,6 +965,42 @@ def delete_slice(self, slice_id: str) -> dict:
961965
)
962966
return response
963967

968+
def append_to_slice(
969+
self,
970+
slice_id: str,
971+
dataset_item_ids: List[str] = None,
972+
reference_ids: List[str] = None,
973+
) -> dict:
974+
"""
975+
Appends to a slice from items already present in a dataset.
976+
The caller must exclusively use either datasetItemIds or reference_ids
977+
as a means of identifying items in the dataset.
978+
979+
:param
980+
dataset_item_ids: List[str],
981+
reference_ids: List[str],
982+
983+
:return:
984+
{
985+
"slice_id": str,
986+
}
987+
"""
988+
if dataset_item_ids and reference_ids:
989+
raise Exception(
990+
"You cannot specify both dataset_item_ids and reference_ids"
991+
)
992+
993+
ids_to_append: Dict[str, Any] = {}
994+
if dataset_item_ids:
995+
ids_to_append[DATASET_ITEM_IDS_KEY] = dataset_item_ids
996+
if reference_ids:
997+
ids_to_append[REFERENCE_IDS_KEY] = reference_ids
998+
999+
response = self._make_request(
1000+
ids_to_append, f"slice/{slice_id}/append"
1001+
)
1002+
return response
1003+
9641004
def list_autotags(self, dataset_id: str) -> List[str]:
9651005
"""
9661006
Fetches a list of autotags for a given dataset id

nucleus/annotation.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,8 +89,10 @@ def __str__(self):
8989

9090
@classmethod
9191
def from_json(cls, payload: dict):
92+
if MASK_URL_KEY not in payload:
93+
raise ValueError(f"Missing {MASK_URL_KEY} in json")
9294
return cls(
93-
mask_url=payload.get(MASK_URL_KEY),
95+
mask_url=payload[MASK_URL_KEY],
9496
annotations=[
9597
Segment.from_json(ann)
9698
for ann in payload.get(ANNOTATIONS_KEY, [])

nucleus/model_run.py

Lines changed: 28 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from typing import Optional, List, Union
1+
from typing import Dict, Optional, List, Union, Type
22
from .constants import (
33
ANNOTATIONS_KEY,
44
DEFAULT_ANNOTATION_UPDATE_MODE,
@@ -125,19 +125,32 @@ def loc(self, dataset_item_id: str):
125125

126126
def _format_prediction_response(
127127
self, response: dict
128-
) -> Union[dict, List[Union[BoxPrediction, PolygonPrediction]]]:
128+
) -> Union[
129+
dict,
130+
List[Union[BoxPrediction, PolygonPrediction, SegmentationPrediction]],
131+
]:
129132
annotation_payload = response.get(ANNOTATIONS_KEY, None)
130-
if annotation_payload:
131-
annotation_response = {}
132-
for (type_key, type_cls) in zip(
133-
[BOX_TYPE, POLYGON_TYPE, SEGMENTATION_TYPE],
134-
[BoxPrediction, PolygonPrediction, SegmentationPrediction],
135-
):
136-
if type_key in annotation_payload:
137-
annotation_response[type_key] = [
138-
type_cls.from_json(ann)
139-
for ann in annotation_payload[type_key]
140-
]
141-
return annotation_response
142-
else: # An error occurred
133+
SegmentationPrediction.from_json({"asdf": "asdf"})
134+
if not annotation_payload:
135+
# An error occurred
143136
return response
137+
annotation_response = {}
138+
type_key_to_class: Dict[
139+
str,
140+
Union[
141+
Type[BoxPrediction],
142+
Type[PolygonPrediction],
143+
Type[SegmentationPrediction],
144+
],
145+
] = {
146+
BOX_TYPE: BoxPrediction,
147+
POLYGON_TYPE: PolygonPrediction,
148+
SEGMENTATION_TYPE: SegmentationPrediction,
149+
}
150+
for type_key in annotation_payload:
151+
type_class = type_key_to_class[type_key]
152+
annotation_response[type_key] = [
153+
type_class.from_json(annotation)
154+
for annotation in annotation_payload[type_key]
155+
]
156+
return annotation_response

nucleus/prediction.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
from __future__ import annotations
2+
13
from typing import Dict, Optional, List, Any
24
from .annotation import (
35
BoxAnnotation,
@@ -28,7 +30,7 @@ class SegmentationPrediction(SegmentationAnnotation):
2830
# No need to define init or to_payload methods because
2931
# we default to functions defined in the parent class
3032
@classmethod
31-
def from_json(cls, payload: dict):
33+
def from_json(cls, payload: dict) -> SegmentationPrediction:
3234
return cls(
3335
mask_url=payload[MASK_URL_KEY],
3436
annotations=[
@@ -76,7 +78,7 @@ def to_payload(self) -> dict:
7678
return payload
7779

7880
@classmethod
79-
def from_json(cls, payload: dict):
81+
def from_json(cls, payload: dict) -> BoxPrediction:
8082
geometry = payload.get(GEOMETRY_KEY, {})
8183
return cls(
8284
label=payload.get(LABEL_KEY, 0),
@@ -119,7 +121,7 @@ def to_payload(self) -> dict:
119121
return payload
120122

121123
@classmethod
122-
def from_json(cls, payload: dict):
124+
def from_json(cls, payload: dict) -> PolygonPrediction:
123125
geometry = payload.get(GEOMETRY_KEY, {})
124126
return cls(
125127
label=payload.get(LABEL_KEY, 0),

nucleus/slice.py

Lines changed: 5 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,4 @@
1-
from typing import List, Dict, Any
2-
from .constants import (
3-
DATASET_ITEM_ID_KEY,
4-
REFERENCE_IDS_KEY,
5-
DATASET_ITEM_IDS_KEY,
6-
)
1+
from typing import List
72

83

94
class Slice:
@@ -47,18 +42,9 @@ def append(
4742
"slice_id": str,
4843
}
4944
"""
50-
if dataset_item_ids and reference_ids:
51-
raise Exception(
52-
"You cannot specify both dataset_item_ids and reference_ids"
53-
)
54-
55-
payload: Dict[str, Any] = {}
56-
if dataset_item_ids:
57-
payload[DATASET_ITEM_IDS_KEY] = dataset_item_ids
58-
if reference_ids:
59-
payload[REFERENCE_IDS_KEY] = reference_ids
60-
61-
response = self._client._make_request(
62-
payload, f"slice/{self.slice_id}/append"
45+
response = self._client.append_to_slice(
46+
slice_id=self.slice_id,
47+
dataset_item_ids=dataset_item_ids,
48+
reference_ids=reference_ids,
6349
)
6450
return response

pyproject.toml

Lines changed: 42 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ line-length = 79
33
target-version = ['py37']
44
include = '\.pyi?$'
55
exclude = '''
6-
76
(
87
/(
98
\.eggs # exclude a few common directories in the
@@ -17,11 +16,49 @@ exclude = '''
1716
| build
1817
| dist
1918
)/
20-
| foo.py # also separately exclude a file named foo.py in
21-
# the root of the project
2219
)
2320
'''
2421

2522
[build-system]
26-
requires = ["setuptools", "wheel"]
27-
build-backend = "setuptools.build_meta:__legacy__"
23+
requires = [
24+
"flit_core >=2,<4",
25+
]
26+
build-backend = "flit_core.buildapi"
27+
28+
[tool.flit.metadata]
29+
module = "nucleus"
30+
author = "Scale AI"
31+
author-email = "select@scaleapi.com"
32+
home-page = "https://github.com/scaleapi/nucleus-python-client"
33+
classifiers = [ "License :: OSI Approved :: MIT License",]
34+
description-file = "README.md"
35+
dist-name = "scale-nucleus"
36+
requires = ["grequests >= 0.6.0",
37+
"requests >= 2.22.0",
38+
"tqdm >= 4.56.1"]
39+
40+
[tool.tox]
41+
legacy_tox_ini = """
42+
[tox]
43+
isolated_build = True
44+
envlist = py36,py37,py38,py39
45+
46+
[testenv]
47+
deps =
48+
black
49+
coverage
50+
flake8
51+
mypy
52+
mccabe
53+
pylint
54+
boto3
55+
commands =
56+
black nucleus
57+
flake8 nucleus
58+
pylint nucleus
59+
mypy nucleus --ignore-missing-imports
60+
coverage erase
61+
coverage run --include=tests/* -m pytest -ra
62+
coverage report -m
63+
"""
64+

requirements.txt

Lines changed: 0 additions & 5 deletions
This file was deleted.

0 commit comments

Comments
 (0)