Skip to content

[PLT-1492] Fix 'flaky' tests spotted during a prod run #1846

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Sep 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 45 additions & 21 deletions libs/labelbox/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,39 @@
from datetime import datetime
from random import randint
from string import ascii_letters

import json
import os
import re
import uuid
import time
from labelbox.schema.project import Project
import requests
from labelbox.schema.ontology import Ontology
import pytest
from types import SimpleNamespace
from typing import Type
import uuid
from datetime import datetime
from enum import Enum
from typing import Tuple
from random import randint
from string import ascii_letters
from types import SimpleNamespace
from typing import Tuple, Type

import pytest
import requests

from labelbox import Dataset, DataRow
from labelbox import MediaType
from labelbox import (
Classification,
Client,
DataRow,
Dataset,
LabelingFrontend,
MediaType,
OntologyBuilder,
Option,
Tool,
)
from labelbox.exceptions import LabelboxError
from labelbox.orm import query
from labelbox.pagination import PaginatedCollection
from labelbox.schema.annotation_import import LabelImport
from labelbox.schema.enums import AnnotationImportState
from labelbox.schema.invite import Invite
from labelbox.schema.ontology import Ontology
from labelbox.schema.project import Project
from labelbox.schema.quality_mode import QualityMode
from labelbox.schema.queue_mode import QueueMode
from labelbox import Client

from labelbox import LabelingFrontend
from labelbox import OntologyBuilder, Tool, Option, Classification
from labelbox.schema.annotation_import import LabelImport
from labelbox.schema.enums import AnnotationImportState
from labelbox.exceptions import LabelboxError

IMG_URL = "https://picsum.photos/200/300.jpg"
MASKABLE_IMG_URL = "https://storage.googleapis.com/labelbox-datasets/image_sample_data/2560px-Kitano_Street_Kobe01s5s4110.jpeg"
Expand Down Expand Up @@ -1255,6 +1259,21 @@ def teardown_ontology_feature_schemas(ontology: Ontology):
class ModuleTearDownHelpers(TearDownHelpers): ...


class LabelHelpers:
def wait_for_labels(self, project, number_of_labels=1):
timeout_seconds = 10
while True:
labels = list(project.labels())
if len(labels) >= number_of_labels:
return labels
timeout_seconds -= 2
if timeout_seconds <= 0:
raise TimeoutError(
f"Timed out waiting for label for project '{project.uid}' to finish processing"
)
time.sleep(2)


@pytest.fixture
def teardown_helpers():
return TearDownHelpers()
Expand All @@ -1263,3 +1282,8 @@ def teardown_helpers():
@pytest.fixture(scope="module")
def module_teardown_helpers():
return TearDownHelpers()


@pytest.fixture
def label_helpers():
return LabelHelpers()
Original file line number Diff line number Diff line change
Expand Up @@ -112,11 +112,6 @@ def test_generic_data_row_type_by_global_key(
(MediaType.Conversational, MediaType.Conversational),
(MediaType.Document, MediaType.Document),
(MediaType.Dicom, MediaType.Dicom),
(
MediaType.LLMPromptResponseCreation,
MediaType.LLMPromptResponseCreation,
),
(MediaType.LLMPromptCreation, MediaType.LLMPromptCreation),
(OntologyKind.ResponseCreation, OntologyKind.ResponseCreation),
(OntologyKind.ModelEvaluation, OntologyKind.ModelEvaluation),
],
Expand Down Expand Up @@ -186,6 +181,53 @@ def test_import_media_types(
assert exported_annotations == expected_data


@pytest.mark.parametrize(
"configured_project, media_type",
[
(
MediaType.LLMPromptResponseCreation,
MediaType.LLMPromptResponseCreation,
),
(MediaType.LLMPromptCreation, MediaType.LLMPromptCreation),
],
indirect=["configured_project"],
)
def test_import_media_types_llm(
client: Client,
configured_project: Project,
annotations_by_media_type,
exports_v2_by_media_type,
export_v2_test_helpers,
helpers,
media_type,
wait_for_label_processing,
):
annotations_ndjson = list(
itertools.chain.from_iterable(annotations_by_media_type[media_type])
)

label_import = lb.LabelImport.create_from_objects(
client,
configured_project.uid,
f"test-import-{media_type}",
annotations_ndjson,
)
label_import.wait_until_done()

assert label_import.state == AnnotationImportState.FINISHED
assert len(label_import.errors) == 0

all_annotations = sorted([a["uuid"] for a in annotations_ndjson])
successful_annotations = sorted(
[
status["uuid"]
for status in label_import.statuses
if status["status"] == "SUCCESS"
]
)
assert successful_annotations == all_annotations


@pytest.mark.parametrize(
"configured_project_by_global_key, media_type",
[
Expand Down
32 changes: 8 additions & 24 deletions libs/labelbox/tests/integration/schema/test_user_group.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import pytest
import faker
from uuid import uuid4
from labelbox import Client
from labelbox.schema.user_group import UserGroup, UserGroupColor

import faker
import pytest

from labelbox.exceptions import (
ResourceNotFoundError,
ResourceCreationError,
UnprocessableEntityError,
ResourceNotFoundError,
)
from labelbox.schema.user_group import UserGroup, UserGroupColor

data = faker.Faker()

Expand Down Expand Up @@ -147,35 +147,19 @@ def test_cannot_update_group_id(user_group):
def test_get_user_groups_with_creation_deletion(client):
user_group = None
try:
# Get all user groups
user_groups = list(UserGroup(client).get_user_groups())

# manual delete for iterators
group_name = data.name()
user_group = UserGroup(client)
user_group.name = group_name
user_group.create()

user_groups_post_creation = list(UserGroup(client).get_user_groups())
assert user_group in user_groups_post_creation

# Verify that at least one user group is returned
assert len(user_groups_post_creation) > 0
assert len(user_groups_post_creation) == len(user_groups) + 1

# Verify that each user group has a valid ID and name
for ug in user_groups_post_creation:
assert ug.id is not None
assert ug.name is not None

user_group.delete()
user_group = None

user_groups_post_deletion = list(UserGroup(client).get_user_groups())

assert (
len(user_groups_post_deletion) == len(user_groups_post_creation) - 1
)

assert user_group not in user_groups_post_deletion
finally:
if user_group:
user_group.delete()
Expand Down
4 changes: 3 additions & 1 deletion libs/labelbox/tests/integration/test_label.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,13 @@ def test_label_update(configured_project_with_label):
assert label.label == "something else"


def test_label_filter_order(configured_project_with_label):
def test_label_filter_order(configured_project_with_label, label_helpers):
project, _, _, label = configured_project_with_label

l1 = label
project.create_label()
label_helpers.wait_for_labels(project, 2)

l2 = next(project.labels())

assert set(project.labels()) == {l1, l2}
Expand Down
Loading