Handle invalid json labelmap file case correctly during create/update DL model stage. (#573)

* handle invalid json labelmap file case correctly on create/update DL model stage
* dummy_label_map -> dummy_labelmap
* renamed load_label_map function
* Update CHANGELOG.md
main
Andrey Zhavoronkov 7 years ago committed by Nikita Manovich
parent e0b666ea9d
commit a159826bf7

@ -36,6 +36,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Auto annotation fail for multijob tasks
- Installation of CVAT with OpenVINO on the Windows platform
- Background color was always black in utils/mask/converter.py
- Handling of wrong labelamp json file in auto annotation (https://github.com/opencv/cvat/issues/554)
### Security
-

@ -2,3 +2,14 @@
# Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
from django.contrib import admin
from .models import AnnotationModel
@admin.register(AnnotationModel)
class AnnotationModelAdmin(admin.ModelAdmin):
list_display = ('name', 'owner', 'created_date', 'updated_date',
'shared', 'primary', 'framework')
def has_add_permission(self, request):
return False

@ -65,6 +65,6 @@ class ModelLoader():
return results.copy()
def load_label_map(labels_path):
with open(labels_path, "r") as f:
return json.load(f)["label_map"]
def load_labelmap(labels_path):
with open(labels_path, "r") as f:
return json.load(f)["label_map"]

@ -10,6 +10,8 @@ import rq
import shutil
import tempfile
import itertools
import sys
import traceback
from django.db import transaction
from django.utils import timezone
@ -22,7 +24,7 @@ from cvat.apps.engine.serializers import LabeledDataSerializer
from cvat.apps.engine.annotation import put_task_data, patch_task_data
from .models import AnnotationModel, FrameworkChoice
from .model_loader import ModelLoader
from .model_loader import ModelLoader, load_labelmap
from .image_loader import ImageLoader
from .import_modules import import_modules
@ -44,11 +46,12 @@ def _update_dl_model_thread(dl_model_id, name, is_shared, model_file, weights_fi
def _run_test(model_file, weights_file, labelmap_file, interpretation_file):
test_image = np.ones((1024, 1980, 3), np.uint8) * 255
try:
dummy_labelmap = {key: key for key in load_labelmap(labelmap_file).keys()}
run_inference_engine_annotation(
data=[test_image,],
model_file=model_file,
weights_file=weights_file,
labels_mapping=labelmap_file,
labels_mapping=dummy_labelmap,
attribute_spec={},
convertation_file=interpretation_file,
restricted=restricted
@ -370,7 +373,6 @@ def run_inference_engine_annotation(data, model_file, weights_file,
add_shapes(processed_detections.get_shapes(), result["shapes"])
return result
def run_inference_thread(tid, model_file, weights_file, labels_mapping, attributes, convertation_file, reset, user, restricted=True):

@ -15,7 +15,7 @@ from cvat.apps.engine.models import Task as TaskModel
from cvat.apps.authentication.auth import has_admin_role
from cvat.apps.engine.log import slogger
from .model_loader import load_label_map
from .model_loader import load_labelmap
from . import model_manager
from .models import AnnotationModel
@ -195,7 +195,7 @@ def start_annotation(request, mid, tid):
{db_attr.name: db_attr.id for db_attr in db_label.attributespec_set.all()} for db_label in db_labels}
db_labels = {db_label.name:db_label.id for db_label in db_labels}
model_labels = {value: key for key, value in load_label_map(labelmap_file).items()}
model_labels = {value: key for key, value in load_labelmap(labelmap_file).items()}
labels_mapping = {}
for user_model_label, user_db_label in user_defined_labels_mapping.items():

Loading…
Cancel
Save