Skip to content

Commit

Permalink
MC-43796: Fix import of bodypose from kognic labeling job (hashicorp#671
Browse files Browse the repository at this point in the history
)
  • Loading branch information
nuno407 authored Sep 5, 2023
1 parent 0cbcfff commit ff67ef3
Show file tree
Hide file tree
Showing 4 changed files with 115 additions and 9 deletions.
24 changes: 20 additions & 4 deletions labeling_bridge/labeling_bridge/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,11 +179,18 @@ def kognic_import(self, request_import_job_dto: RequestImportJobDTO): # pylint:
_logger.debug("%s labels from labeling_job %s already exists on sample %s",
annotation_type, request_import_job_dto.labelling_job_name, external_id)
else:
if annotation.content["openlabel"]["frames"]["0"].get("objects") is not None:
if annotation.content["openlabel"]["frames"]["0"].get("objects") is not None and \
len(annotation.content["openlabel"]["frames"]["0"].get("objects")) != 0:
for _, an_object in annotation.content["openlabel"]["frames"]["0"]["objects"].items():
an_object["labeling_job"] = request_import_job_dto.labelling_job_name

a_external_id = os.path.basename(external_id)
else:
# if there is nothing to import in this sample we can skip it
continue

a_external_id = os.path.basename(external_id)
# we need to define the URI of the file in our filesystem. The URI should
# be the same of the one contained in fn_map
annotation.content["openlabel"]["frames"]["0"]["frame_properties"]["streams"]["Voxel_export"]["uri"] = a_external_id # noqa pylint: disable=line-too-long
json_annotation = json.dumps(annotation.content, indent=4)
_logger.debug("File content:\n%s", str(json_annotation))
filename = tmp_dir_name + "/" + a_external_id + ".json"
Expand Down Expand Up @@ -211,6 +218,16 @@ def kognic_import(self, request_import_job_dto: RequestImportJobDTO): # pylint:
annotation_type.value,
os.listdir(tmp_dir_name))

if len(fn_map) == 0:
# if there is nothing to import in this annotation type we can skip it
Repository.update_status_of_tasks(
labeling_jobs=labeling_jobs,
kognic_labeling_types=[KognicLabelingType[annotation_type.name]],
status=StatusDocument(
status=Status.DONE)
)
continue

if dataset.skeletons.get(GT_POSE_LABEL) is None:
_logger.debug("Body pose skeletons not found, adding them")
voxel_functions.set_dataset_skeleton_configuration(dataset)
Expand All @@ -228,7 +245,6 @@ def kognic_import(self, request_import_job_dto: RequestImportJobDTO): # pylint:
skeleton_key="point_class",
label_field=OPENLABEL_LABEL_MAPPING
)
print(fn_map)

for field in fields_to_delete:
dataset.delete_sample_field(annotation_type_field[annotation_type] + field)
Expand Down
55 changes: 55 additions & 0 deletions labeling_bridge/tests/integration/test_data/test_no_labels.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
{
"openlabel": {
"actions": {},
"contexts": {},
"coordinate_systems": {},
"events": {},
"frame_intervals": [
{
"frame_end": 0,
"frame_start": 0
}
],
"frames": {
"0": {
"frame_properties": {
"external_id": "__location__/test_data/test.png",
"streams": {
"Voxel_export": {
"uri": "tmp3k82no6e.jpeg"
}
},
"timestamp": 0
},
"objects": {}
}
},
"metadata": {
"annotation_id": 4935414,
"annotation_instruction": "2023-04-21-001",
"annotation_type": "2D_semseg",
"batch": "marian-test",
"input_external_id": "__location__/test_data/test.png",
"input_uuid": "cfa6339a-47b4-4b12-8c44-fd3646ba2908",
"project": "Orion-RC-Dev",
"schema_version": "1.0.0",
"uri": "app.kognic.com/view/assignment/65c4ccf9-06b5-4c9e-80f8-fb0c69cdefc0",
"uuid": "1d42fe7b-099c-4650-bb17-5695ffff4a24"
},
"objects": {},
"ontologies": {},
"relations": {},
"resources": {},
"streams": {
"Voxel_export": {
"description": "",
"stream_properties": {
"height": 320,
"width": 576
},
"type": "camera"
}
},
"tags": {}
}
}
43 changes: 38 additions & 5 deletions labeling_bridge/tests/integration/test_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,19 +70,24 @@ def api_service(self):
container_services = Mock()
return ApiService(s3_mock, container_services)

@pytest.fixture
def semseg_test_data(self):
with (open(os.path.join(__location__, "test_data/test.json"), "r")) as file:
def semseg_test_data(self, path):
with (open(os.path.join(__location__, path), "r")) as file:
data_string = file.read()
data_string = data_string.replace("__location__", __location__)
data = json.loads(data_string)
print(data)
return data

@pytest.fixture
def semseg_annotation(self, semseg_test_data):
def semseg_annotation(self):
semseg = Mock()
semseg.content = self.semseg_test_data("test_data/test.json")
return semseg

@pytest.fixture
def semseg_empty_annotation(self):
semseg = Mock()
semseg.content = semseg_test_data
semseg.content = self.semseg_test_data("test_data/test_no_labels.json")
return semseg

def test_kognic_import_success(self, api_service: ApiService, semseg_annotation, sample_dataset):
Expand Down Expand Up @@ -121,6 +126,34 @@ def test_kognic_import_success(self, api_service: ApiService, semseg_annotation,
for label_task in LabelingJobTask.objects(kognic_labeling_job=labeling_job):
assert label_task.import_export_status.status == Status.DONE

def test_kognic_import_success_empty(self, api_service: ApiService, semseg_empty_annotation, sample_dataset):
# GIVEN
labeling_type = [KognicLabelingTypeDTO.SEMSEG]
kognic_factory = Mock()
api_service.kognic_interface_factory = Mock(return_value=kognic_factory)
kognic_factory.get_annotation_types = Mock(return_value=labeling_type)
kognic_factory.get_project_annotations = Mock(return_value=[semseg_empty_annotation])

create_sample(sample_dataset, "test.png")
create_sample(sample_dataset, "test2.png")
export_msg = export_message("tag", labeling_type=labeling_type, labeling_job_name=str(uuid.uuid4()))
import_msg = import_message(labeling_job_name=export_msg.labelling_job_name)
Repository.generate_job_and_task_entries(
dataset_view=sample_dataset.view(),
user_email="test@test.com",
request_export_job_dto=export_msg)

# WHEN
print(len(sample_dataset))
api_service.kognic_import(request_import_job_dto=import_msg)

# THEN
labeling_job = LabelingJob.objects(kognic_labeling_job_name=export_msg.labelling_job_name).get()
assert labeling_job.import_export_status.status == Status.DONE
assert len(LabelingJobTask.objects(kognic_labeling_job=labeling_job)) == 2
for label_task in LabelingJobTask.objects(kognic_labeling_job=labeling_job):
assert label_task.import_export_status.status == Status.DONE

@unittest.mock.patch("fiftyone.core.dataset.Dataset.merge_dir", side_effect=[Exception("Voxel")])
@unittest.mock.patch("labeling_bridge.service._logger")
def test_kognic_import_error(self, _logger, _, api_service: ApiService, sample_dataset, semseg_annotation):
Expand Down
2 changes: 2 additions & 0 deletions sonar-project.properties
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,5 @@ sonar.exclusions = **/tests/**/*,frontend/**/node_modules/**,frontend/**/src/ass
sonar.javascript.lcov.reportPaths=frontend-coverage-report-unit/lcov.info

sonar.python.coverage.reportPaths=**/*coverage.xml

sonar.python.version=3.9

0 comments on commit ff67ef3

Please sign in to comment.