Skip to content

[MODEL-1448] Upsert label feedback method #1684

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Jun 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/labelbox/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ Labelbox Python SDK Documentation
identifiable
identifiables
label
label-score
labeling-frontend
labeling-frontend-options
labeling-parameter-override
Expand Down
6 changes: 6 additions & 0 deletions docs/labelbox/label-score.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
Label Score
===============================================================================================

.. automodule:: labelbox.schema.label_score
:members:
:show-inheritance:
1 change: 1 addition & 0 deletions libs/labelbox/src/labelbox/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
from labelbox.schema.slice import Slice, CatalogSlice, ModelSlice
from labelbox.schema.queue_mode import QueueMode
from labelbox.schema.task_queue import TaskQueue
from labelbox.schema.label_score import LabelScore
from labelbox.schema.identifiables import UniqueIds, GlobalKeys, DataRowIds
from labelbox.schema.identifiable import UniqueId, GlobalKey
from labelbox.schema.ontology_kind import OntologyKind
Expand Down
51 changes: 51 additions & 0 deletions libs/labelbox/src/labelbox/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
from labelbox.schema.slice import CatalogSlice, ModelSlice
from labelbox.schema.task import Task
from labelbox.schema.user import User
from labelbox.schema.label_score import LabelScore
from labelbox.schema.ontology_kind import (OntologyKind, EditorTaskTypeMapper,
EditorTaskType)

Expand Down Expand Up @@ -2197,3 +2198,53 @@ def get_embedding_by_name(self, name: str) -> Embedding:
return e
raise labelbox.exceptions.ResourceNotFoundError(Embedding,
dict(name=name))

def upsert_label_feedback(
self, label_id: str, feedback: str,
scores: Dict[str, float]) -> List[LabelScore]:
"""
Submits the label feedback which is a free-form text and numeric
label scores.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

extra whitespace

Args:
label_id: Target label ID
feedback: Free text comment regarding the label
scores: A dict of scores, the key is a score name and the value is
the score value

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

extra whitespace

Returns:
A list of LabelScore instances
"""
mutation_str = """
mutation UpsertAutoQaLabelFeedbackPyApi(
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Since this our public doc (on readthedocs) please do not internal mutation code here

can you elaborate?

Rather, I would recommend to add some method usages

will add a usage example now 👍

$labelId: ID!
$feedback: String!
$scores: Json!
) {
upsertAutoQaLabelFeedback(
input: {
labelId: $labelId,
feedback: $feedback,
scores: $scores
}
) {
id
scores {
id
name
score
}
}
}
"""
res = self.execute(mutation_str, {
"labelId": label_id,
"feedback": feedback,
"scores": scores
})
scores_raw = res["upsertAutoQaLabelFeedback"]["scores"]

return [
labelbox.LabelScore(name=x['name'], score=x['score'])
for x in scores_raw
]
15 changes: 15 additions & 0 deletions libs/labelbox/src/labelbox/schema/label_score.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
from labelbox import pydantic_compat
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Since this is a new package / class we also need to make sure it will appear in readthedocs by adding a new .rst in docs/labelbox/ file and updating docs/labelbox/index.rst
FYI the same is documented in CONTRIBUTING.md under General Guidelines



class LabelScore(pydantic_compat.BaseModel):
"""
A label score.

Attributes:
name (str)
score (float)

"""

name: str
score: float
31 changes: 22 additions & 9 deletions libs/labelbox/tests/integration/test_label.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import os
import time

import pytest
import requests
import os

from labelbox import Label
from labelbox import Client, Label


def test_labels(configured_project_with_label):
Expand All @@ -29,11 +28,13 @@ def test_labels(configured_project_with_label):


# TODO: Skipping this test in staging due to label not updating
@pytest.mark.skipif(condition=os.environ['LABELBOX_TEST_ENVIRON'] == "onprem" or
os.environ['LABELBOX_TEST_ENVIRON'] == "staging" or
os.environ['LABELBOX_TEST_ENVIRON'] == "local" or
os.environ['LABELBOX_TEST_ENVIRON'] == "custom",
reason="does not work for onprem")
@pytest.mark.skipif(
condition=os.environ["LABELBOX_TEST_ENVIRON"] == "onprem" or
os.environ["LABELBOX_TEST_ENVIRON"] == "staging" or
os.environ["LABELBOX_TEST_ENVIRON"] == "local" or
os.environ["LABELBOX_TEST_ENVIRON"] == "custom",
reason="does not work for onprem",
)
def test_label_update(configured_project_with_label):
_, _, _, label = configured_project_with_label
label.update(label="something else")
Expand All @@ -57,7 +58,7 @@ def test_label_bulk_deletion(configured_project_with_label):
project, _, _, _ = configured_project_with_label

for _ in range(2):
#only run twice, already have one label in the fixture
# only run twice, already have one label in the fixture
project.create_label()
labels = project.labels()
l1 = next(labels)
Expand All @@ -74,3 +75,15 @@ def test_label_bulk_deletion(configured_project_with_label):
time.sleep(5)

assert set(project.labels()) == {l2}


def test_upsert_label_scores(configured_project_with_label, client: Client):
project, _, _, _ = configured_project_with_label

label = next(project.labels())

scores = client.upsert_label_feedback(label_id=label.uid,
feedback="That's a great label!",
scores={"overall": 5})
assert len(scores) == 1
assert scores[0].score == 5
Loading