RESTful API (#389)
@ -1,9 +0,0 @@
|
||||
|
||||
# Copyright (C) 2018 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
|
||||
@ -1,9 +0,0 @@
|
||||
|
||||
# Copyright (C) 2018 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from django.db import models
|
||||
|
||||
# Create your models here.
|
||||
|
||||
|
Before Width: | Height: | Size: 1.5 KiB |
|
Before Width: | Height: | Size: 5.6 KiB |
|
Before Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 3.0 KiB After Width: | Height: | Size: 5.5 KiB |
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 2.2 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.4 KiB |
@ -1,38 +0,0 @@
|
||||
<!--
|
||||
Copyright (C) 2018 Intel Corporation
|
||||
|
||||
SPDX-License-Identifier: MIT
|
||||
-->
|
||||
<div class="dashboardTaskUI" id="dashboardTask_{{item.id}}">
|
||||
<center class="dashboardTitleWrapper">
|
||||
<label class="semiBold h1 dashboardTaskNameLabel selectable"> {{ item.name }} </label>
|
||||
</center>
|
||||
<center class="dashboardTitleWrapper">
|
||||
<label class="regular dashboardStatusLabel"> {{ item.status }} </label>
|
||||
</center>
|
||||
<div class="dashboardTaskIntro" style='background-image: url("/get/task/{{item.id}}/frame/0")'> </div>
|
||||
<div class="dashboardButtonsUI">
|
||||
<button class="dashboardDumpAnnotation regular dashboardButtonUI"> Dump Annotation </button>
|
||||
<button class="dashboardUploadAnnotation regular dashboardButtonUI"> Upload Annotation </button>
|
||||
<button class="dashboardUpdateTask regular dashboardButtonUI"> Update Task </button>
|
||||
<button class="dashboardDeleteTask regular dashboardButtonUI"> Delete Task </button>
|
||||
{%if item.bug_tracker %}
|
||||
<button class="dashboardOpenTrackerButton regular dashboardButtonUI"> Open Bug Tracker </button>
|
||||
<a class="dashboardBugTrackerLink" href='{{item.bug_tracker}}' style="display: none;"> </a>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="dashboardJobsUI">
|
||||
<center class="dashboardTitleWrapper">
|
||||
<label class="regular h1"> Jobs </label>
|
||||
</center>
|
||||
<table class="dashboardJobList regular">
|
||||
{% for segm in item.segment_set.all %}
|
||||
{% for job in segm.job_set.all %}
|
||||
<tr>
|
||||
<td> <a href="{{base_url}}?id={{job.id}}"> {{base_url}}?id={{job.id}} </a> </td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
@ -1,7 +0,0 @@
|
||||
|
||||
# Copyright (C) 2018 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# Create your tests here.
|
||||
|
||||
@ -0,0 +1,11 @@
|
||||
/*
|
||||
* Copyright (C) 2018 Intel Corporation
|
||||
*
|
||||
* SPDX-License-Identifier: MIT
|
||||
*/
|
||||
|
||||
window.addEventListener('DOMContentLoaded', () => {
|
||||
$('<button class="regular h1" style="margin-left: 5px;"> User Guide </button>').on('click', () => {
|
||||
window.open('/documentation/user_guide.html');
|
||||
}).appendTo('#dashboardManageButtons');
|
||||
});
|
||||
@ -1,15 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2018 Intel Corporation
|
||||
*
|
||||
* SPDX-License-Identifier: MIT
|
||||
*/
|
||||
|
||||
/* global
|
||||
Mousetrap:false
|
||||
*/
|
||||
|
||||
Mousetrap.bind(window.cvat.config.shortkeys["open_help"].value, function() {
|
||||
window.open("/documentation/user_guide.html");
|
||||
|
||||
return false;
|
||||
});
|
||||
@ -1,3 +1,5 @@
|
||||
# Copyright (C) 2018 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
default_app_config = 'cvat.apps.engine.apps.EngineConfig'
|
||||
|
||||
@ -0,0 +1,212 @@
|
||||
# Generated by Django 2.1.5 on 2019-02-17 19:32
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.migrations.operations.special
|
||||
import django.db.models.deletion
|
||||
import cvat.apps.engine.models
|
||||
|
||||
def set_segment_size(apps, schema_editor):
|
||||
Task = apps.get_model('engine', 'Task')
|
||||
for task in Task.objects.all():
|
||||
segment = task.segment_set.first()
|
||||
if segment:
|
||||
task.segment_size = segment.stop_frame - segment.start_frame + 1
|
||||
task.save()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('engine', '0014_job_max_shape_id'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='task',
|
||||
name='segment_size',
|
||||
field=models.PositiveIntegerField(null=True),
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=set_segment_size,
|
||||
reverse_code=django.db.migrations.operations.special.RunPython.noop,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='task',
|
||||
name='segment_size',
|
||||
field=models.PositiveIntegerField(),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ClientFile',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('file', models.FileField(max_length=1024, storage=cvat.apps.engine.models.MyFileSystemStorage(),
|
||||
upload_to=cvat.apps.engine.models.upload_path_handler)),
|
||||
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
|
||||
],
|
||||
options={
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='RemoteFile',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('file', models.CharField(max_length=1024)),
|
||||
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
|
||||
],
|
||||
options={
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ServerFile',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('file', models.CharField(max_length=1024)),
|
||||
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
|
||||
],
|
||||
options={
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='task',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('ANNOTATION', 'annotation'), ('VALIDATION', 'validation'), ('COMPLETED', 'completed')], default=cvat.apps.engine.models.StatusChoice('annotation'), max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='task',
|
||||
name='overlap',
|
||||
field=models.PositiveIntegerField(null=True),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='task',
|
||||
name='path',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='task',
|
||||
name='image_quality',
|
||||
field=models.PositiveSmallIntegerField(default=50),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Plugin',
|
||||
fields=[
|
||||
('name', models.SlugField(max_length=32, primary_key=True, serialize=False)),
|
||||
('description', cvat.apps.engine.models.SafeCharField(max_length=8192)),
|
||||
('created_at', models.DateTimeField(auto_now_add=True)),
|
||||
('updated_at', models.DateTimeField(auto_now_add=True)),
|
||||
('maintainer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='maintainers', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='PluginOption',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('name', cvat.apps.engine.models.SafeCharField(max_length=32)),
|
||||
('value', cvat.apps.engine.models.SafeCharField(max_length=1024)),
|
||||
('plugin', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Plugin')),
|
||||
],
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='label',
|
||||
unique_together={('task', 'name')},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='clientfile',
|
||||
unique_together={('task', 'file')},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='attributespec',
|
||||
name='default_value',
|
||||
field=models.CharField(default='', max_length=128),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='attributespec',
|
||||
name='input_type',
|
||||
field=models.CharField(choices=[('CHECKBOX', 'checkbox'), ('RADIO', 'radio'), ('NUMBER', 'number'), ('TEXT', 'text'), ('SELECT', 'select')], default='select', max_length=16),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='attributespec',
|
||||
name='mutable',
|
||||
field=models.BooleanField(default=True),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='attributespec',
|
||||
name='name',
|
||||
field=models.CharField(default='test', max_length=64),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='attributespec',
|
||||
name='values',
|
||||
field=models.CharField(default='', max_length=4096),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('ANNOTATION', 'annotation'), ('VALIDATION', 'validation'), ('COMPLETED', 'completed')], default=cvat.apps.engine.models.StatusChoice('annotation'), max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='attributespec',
|
||||
name='text',
|
||||
field=models.CharField(default='', max_length=1024),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='attributespec',
|
||||
name='input_type',
|
||||
field=models.CharField(choices=[('checkbox', 'CHECKBOX'), ('radio', 'RADIO'), ('number', 'NUMBER'), ('text', 'TEXT'), ('select', 'SELECT')], max_length=16),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='task',
|
||||
name='segment_size',
|
||||
field=models.PositiveIntegerField(default=0),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('annotation', 'ANNOTATION'), ('validation', 'VALIDATION'), ('completed', 'COMPLETED')], default=cvat.apps.engine.models.StatusChoice('annotation'), max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='task',
|
||||
name='status',
|
||||
field=models.CharField(choices=[('annotation', 'ANNOTATION'), ('validation', 'VALIDATION'), ('completed', 'COMPLETED')], default=cvat.apps.engine.models.StatusChoice('annotation'), max_length=32),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Image',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('path', models.CharField(max_length=1024)),
|
||||
('frame', models.PositiveIntegerField()),
|
||||
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
|
||||
('height', models.PositiveIntegerField()),
|
||||
('width', models.PositiveIntegerField()),
|
||||
],
|
||||
options={
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Video',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('path', models.CharField(max_length=1024)),
|
||||
('start_frame', models.PositiveIntegerField()),
|
||||
('stop_frame', models.PositiveIntegerField()),
|
||||
('step', models.PositiveIntegerField(default=1)),
|
||||
('task', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='engine.Task')),
|
||||
('height', models.PositiveIntegerField()),
|
||||
('width', models.PositiveIntegerField()),
|
||||
],
|
||||
options={
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,172 @@
|
||||
import os
|
||||
import re
|
||||
import csv
|
||||
from io import StringIO
|
||||
from PIL import Image
|
||||
from django.db import migrations
|
||||
from django.conf import settings
|
||||
from cvat.apps.engine.task import _get_mime
|
||||
|
||||
def parse_attribute(value):
|
||||
match = re.match(r'^([~@])(\w+)=(\w+):(.+)?$', value)
|
||||
if match:
|
||||
prefix = match.group(1)
|
||||
input_type = match.group(2)
|
||||
name = match.group(3)
|
||||
if match.group(4):
|
||||
values = list(csv.reader(StringIO(match.group(4)),
|
||||
quotechar="'"))[0]
|
||||
else:
|
||||
values = []
|
||||
|
||||
return {'prefix':prefix, 'type':input_type, 'name':name, 'values':values}
|
||||
else:
|
||||
return None
|
||||
|
||||
def split_text_attribute(apps, schema_editor):
|
||||
AttributeSpec = apps.get_model('engine', 'AttributeSpec')
|
||||
for attribute in AttributeSpec.objects.all():
|
||||
spec = parse_attribute(attribute.text)
|
||||
if spec:
|
||||
attribute.mutable = (spec['prefix'] == '~')
|
||||
attribute.input_type = spec['type']
|
||||
attribute.name = spec['name']
|
||||
attribute.default_value = spec['values'][0] if spec['values'] else ''
|
||||
attribute.values = '\n'.join(spec['values'])
|
||||
attribute.save()
|
||||
|
||||
def join_text_attribute(apps, schema_editor):
|
||||
AttributeSpec = apps.get_model('engine', 'AttributeSpec')
|
||||
for attribute in AttributeSpec.objects.all():
|
||||
attribute.text = ""
|
||||
if attribute.mutable:
|
||||
attribute.text += "~"
|
||||
else:
|
||||
attribute.text += "@"
|
||||
|
||||
attribute.text += attribute.input_type
|
||||
attribute.text += "=" + attribute.name + ":"
|
||||
attribute.text += ",".join(attribute.values.split('\n'))
|
||||
attribute.save()
|
||||
|
||||
def _get_task_dirname(task_obj):
|
||||
return os.path.join(settings.DATA_ROOT, str(task_obj.id))
|
||||
|
||||
def _get_upload_dirname(task_obj):
|
||||
return os.path.join(_get_task_dirname(task_obj), ".upload")
|
||||
|
||||
def _get_frame_path(task_obj, frame):
|
||||
return os.path.join(
|
||||
_get_task_dirname(task_obj),
|
||||
"data",
|
||||
str(int(frame) // 10000),
|
||||
str(int(frame) // 100),
|
||||
str(frame) + '.jpg',
|
||||
)
|
||||
|
||||
def fill_task_meta_data_forward(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
task_model = apps.get_model('engine', 'Task')
|
||||
video_model = apps.get_model('engine', "Video")
|
||||
image_model = apps.get_model('engine', 'Image')
|
||||
|
||||
for db_task in task_model.objects.all():
|
||||
if db_task.mode == 'interpolation':
|
||||
db_video = video_model()
|
||||
db_video.task_id = db_task.id
|
||||
db_video.start_frame = 0
|
||||
db_video.stop_frame = db_task.size
|
||||
db_video.step = 1
|
||||
|
||||
video = ""
|
||||
for root, _, files in os.walk(_get_upload_dirname(db_task)):
|
||||
fullnames = map(lambda f: os.path.join(root, f), files)
|
||||
videos = list(filter(lambda x: _get_mime(x) == 'video', fullnames))
|
||||
if len(videos):
|
||||
video = videos[0]
|
||||
break
|
||||
db_video.path = video
|
||||
try:
|
||||
image = Image.open(_get_frame_path(db_task, 0))
|
||||
db_video.width = image.width
|
||||
db_video.height = image.height
|
||||
image.close()
|
||||
except FileNotFoundError:
|
||||
db_video.width = 0
|
||||
db_video.height = 0
|
||||
|
||||
db_video.save()
|
||||
else:
|
||||
filenames = []
|
||||
for root, _, files in os.walk(_get_upload_dirname(db_task)):
|
||||
fullnames = map(lambda f: os.path.join(root, f), files)
|
||||
images = filter(lambda x: _get_mime(x) == 'image', fullnames)
|
||||
filenames.extend(images)
|
||||
filenames.sort()
|
||||
|
||||
db_images = []
|
||||
for i, image_path in enumerate(filenames):
|
||||
db_image = image_model()
|
||||
db_image.task_id = db_task.id
|
||||
db_image.path = image_path
|
||||
db_image.frame = i
|
||||
try:
|
||||
image = Image.open(image_path)
|
||||
db_image.width = image.width
|
||||
db_image.height = image.height
|
||||
image.close()
|
||||
except FileNotFoundError:
|
||||
db_image.width = 0
|
||||
db_image.height = 0
|
||||
|
||||
db_images.append(db_image)
|
||||
image_model.objects.using(db_alias).bulk_create(db_images)
|
||||
|
||||
def fill_task_meta_data_backward(apps, schema_editor):
|
||||
task_model = apps.get_model('engine', 'Task')
|
||||
video_model = apps.get_model('engine', "Video")
|
||||
image_model = apps.get_model('engine', 'Image')
|
||||
|
||||
for db_task in task_model.objects.all():
|
||||
upload_dir = _get_upload_dirname(db_task)
|
||||
if db_task.mode == 'interpolation':
|
||||
video = video_model.objects.get(task__id=db_task.id)
|
||||
db_task.source = os.path.relpath(video.path, upload_dir)
|
||||
video.delete()
|
||||
else:
|
||||
images = image_model.objects.filter(task__id=db_task.id)
|
||||
db_task.source = '{} images: {}, ...'.format(
|
||||
len(images),
|
||||
", ".join([os.path.relpath(x.path, upload_dir) for x in images[0:2]])
|
||||
)
|
||||
images.delete()
|
||||
db_task.save()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('engine', '0015_db_redesign_20190217'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=split_text_attribute,
|
||||
reverse_code=join_text_attribute,
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='attributespec',
|
||||
name='text',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='attributespec',
|
||||
unique_together={('label', 'name')},
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=fill_task_meta_data_forward,
|
||||
reverse_code=fill_task_meta_data_backward,
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='task',
|
||||
name='source',
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,915 @@
|
||||
# Generated by Django 2.1.5 on 2019-02-21 12:25
|
||||
|
||||
import cvat.apps.engine.models
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from cvat.apps.engine.annotation import _merge_table_rows
|
||||
|
||||
# some modified functions to transer annotation
|
||||
def _bulk_create(db_model, db_alias, objects, flt_param):
|
||||
if objects:
|
||||
if flt_param:
|
||||
if 'postgresql' in settings.DATABASES["default"]["ENGINE"]:
|
||||
return db_model.objects.using(db_alias).bulk_create(objects)
|
||||
else:
|
||||
ids = list(db_model.objects.using(db_alias).filter(**flt_param).values_list('id', flat=True))
|
||||
db_model.objects.using(db_alias).bulk_create(objects)
|
||||
|
||||
return list(db_model.objects.using(db_alias).exclude(id__in=ids).filter(**flt_param))
|
||||
else:
|
||||
return db_model.objects.using(db_alias).bulk_create(objects)
|
||||
|
||||
def get_old_db_shapes(shape_type, db_job):
|
||||
def _get_shape_set(db_job, shape_type):
|
||||
if shape_type == 'polygons':
|
||||
return db_job.labeledpolygon_set
|
||||
elif shape_type == 'polylines':
|
||||
return db_job.labeledpolyline_set
|
||||
elif shape_type == 'boxes':
|
||||
return db_job.labeledbox_set
|
||||
elif shape_type == 'points':
|
||||
return db_job.labeledpoints_set
|
||||
|
||||
def get_values(shape_type):
|
||||
if shape_type == 'polygons':
|
||||
return [
|
||||
('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
|
||||
'labeledpolygonattributeval__value', 'labeledpolygonattributeval__spec_id',
|
||||
'labeledpolygonattributeval__id'), {
|
||||
'attributes': [
|
||||
'labeledpolygonattributeval__value',
|
||||
'labeledpolygonattributeval__spec_id',
|
||||
'labeledpolygonattributeval__id'
|
||||
]
|
||||
}, 'labeledpolygonattributeval_set'
|
||||
]
|
||||
elif shape_type == 'polylines':
|
||||
return [
|
||||
('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
|
||||
'labeledpolylineattributeval__value', 'labeledpolylineattributeval__spec_id',
|
||||
'labeledpolylineattributeval__id'), {
|
||||
'attributes': [
|
||||
'labeledpolylineattributeval__value',
|
||||
'labeledpolylineattributeval__spec_id',
|
||||
'labeledpolylineattributeval__id'
|
||||
]
|
||||
}, 'labeledpolylineattributeval_set'
|
||||
]
|
||||
elif shape_type == 'boxes':
|
||||
return [
|
||||
('id', 'frame', 'xtl', 'ytl', 'xbr', 'ybr', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
|
||||
'labeledboxattributeval__value', 'labeledboxattributeval__spec_id',
|
||||
'labeledboxattributeval__id'), {
|
||||
'attributes': [
|
||||
'labeledboxattributeval__value',
|
||||
'labeledboxattributeval__spec_id',
|
||||
'labeledboxattributeval__id'
|
||||
]
|
||||
}, 'labeledboxattributeval_set'
|
||||
]
|
||||
elif shape_type == 'points':
|
||||
return [
|
||||
('id', 'frame', 'points', 'label_id', 'group_id', 'occluded', 'z_order', 'client_id',
|
||||
'labeledpointsattributeval__value', 'labeledpointsattributeval__spec_id',
|
||||
'labeledpointsattributeval__id'), {
|
||||
'attributes': [
|
||||
'labeledpointsattributeval__value',
|
||||
'labeledpointsattributeval__spec_id',
|
||||
'labeledpointsattributeval__id'
|
||||
]
|
||||
}, 'labeledpointsattributeval_set'
|
||||
]
|
||||
(values, merge_keys, prefetch) = get_values(shape_type)
|
||||
db_shapes = list(_get_shape_set(db_job, shape_type).prefetch_related(prefetch).values(*values).order_by('frame'))
|
||||
return _merge_table_rows(db_shapes, merge_keys, 'id')
|
||||
|
||||
def get_old_db_paths(db_job):
|
||||
db_paths = db_job.objectpath_set
|
||||
for shape in ['trackedpoints_set', 'trackedbox_set', 'trackedpolyline_set', 'trackedpolygon_set']:
|
||||
db_paths.prefetch_related(shape)
|
||||
for shape_attr in ['trackedpoints_set__trackedpointsattributeval_set', 'trackedbox_set__trackedboxattributeval_set',
|
||||
'trackedpolygon_set__trackedpolygonattributeval_set', 'trackedpolyline_set__trackedpolylineattributeval_set']:
|
||||
db_paths.prefetch_related(shape_attr)
|
||||
db_paths.prefetch_related('objectpathattributeval_set')
|
||||
db_paths = list (db_paths.values('id', 'frame', 'group_id', 'shapes', 'client_id', 'objectpathattributeval__spec_id',
|
||||
'objectpathattributeval__id', 'objectpathattributeval__value',
|
||||
'trackedbox', 'trackedpolygon', 'trackedpolyline', 'trackedpoints',
|
||||
'trackedbox__id', 'label_id', 'trackedbox__xtl', 'trackedbox__ytl',
|
||||
'trackedbox__xbr', 'trackedbox__ybr', 'trackedbox__frame', 'trackedbox__occluded',
|
||||
'trackedbox__z_order','trackedbox__outside', 'trackedbox__trackedboxattributeval__spec_id',
|
||||
'trackedbox__trackedboxattributeval__value', 'trackedbox__trackedboxattributeval__id',
|
||||
'trackedpolygon__id' ,'trackedpolygon__points', 'trackedpolygon__frame', 'trackedpolygon__occluded',
|
||||
'trackedpolygon__z_order', 'trackedpolygon__outside', 'trackedpolygon__trackedpolygonattributeval__spec_id',
|
||||
'trackedpolygon__trackedpolygonattributeval__value', 'trackedpolygon__trackedpolygonattributeval__id',
|
||||
'trackedpolyline__id', 'trackedpolyline__points', 'trackedpolyline__frame', 'trackedpolyline__occluded',
|
||||
'trackedpolyline__z_order', 'trackedpolyline__outside', 'trackedpolyline__trackedpolylineattributeval__spec_id',
|
||||
'trackedpolyline__trackedpolylineattributeval__value', 'trackedpolyline__trackedpolylineattributeval__id',
|
||||
'trackedpoints__id', 'trackedpoints__points', 'trackedpoints__frame', 'trackedpoints__occluded',
|
||||
'trackedpoints__z_order', 'trackedpoints__outside', 'trackedpoints__trackedpointsattributeval__spec_id',
|
||||
'trackedpoints__trackedpointsattributeval__value', 'trackedpoints__trackedpointsattributeval__id')
|
||||
.order_by('id', 'trackedbox__frame', 'trackedpolygon__frame', 'trackedpolyline__frame', 'trackedpoints__frame'))
|
||||
|
||||
db_box_paths = list(filter(lambda path: path['shapes'] == 'boxes', db_paths ))
|
||||
db_polygon_paths = list(filter(lambda path: path['shapes'] == 'polygons', db_paths ))
|
||||
db_polyline_paths = list(filter(lambda path: path['shapes'] == 'polylines', db_paths ))
|
||||
db_points_paths = list(filter(lambda path: path['shapes'] == 'points', db_paths ))
|
||||
|
||||
object_path_attr_merge_key = [
|
||||
'objectpathattributeval__value',
|
||||
'objectpathattributeval__spec_id',
|
||||
'objectpathattributeval__id'
|
||||
]
|
||||
|
||||
db_box_paths = _merge_table_rows(db_box_paths, {
|
||||
'attributes': object_path_attr_merge_key,
|
||||
'shapes': [
|
||||
'trackedbox__id', 'trackedbox__xtl', 'trackedbox__ytl',
|
||||
'trackedbox__xbr', 'trackedbox__ybr', 'trackedbox__frame',
|
||||
'trackedbox__occluded', 'trackedbox__z_order', 'trackedbox__outside',
|
||||
'trackedbox__trackedboxattributeval__value',
|
||||
'trackedbox__trackedboxattributeval__spec_id',
|
||||
'trackedbox__trackedboxattributeval__id'
|
||||
],
|
||||
}, 'id')
|
||||
|
||||
db_polygon_paths = _merge_table_rows(db_polygon_paths, {
|
||||
'attributes': object_path_attr_merge_key,
|
||||
'shapes': [
|
||||
'trackedpolygon__id', 'trackedpolygon__points', 'trackedpolygon__frame',
|
||||
'trackedpolygon__occluded', 'trackedpolygon__z_order', 'trackedpolygon__outside',
|
||||
'trackedpolygon__trackedpolygonattributeval__value',
|
||||
'trackedpolygon__trackedpolygonattributeval__spec_id',
|
||||
'trackedpolygon__trackedpolygonattributeval__id'
|
||||
]
|
||||
}, 'id')
|
||||
|
||||
db_polyline_paths = _merge_table_rows(db_polyline_paths, {
|
||||
'attributes': object_path_attr_merge_key,
|
||||
'shapes': [
|
||||
'trackedpolyline__id', 'trackedpolyline__points', 'trackedpolyline__frame',
|
||||
'trackedpolyline__occluded', 'trackedpolyline__z_order', 'trackedpolyline__outside',
|
||||
'trackedpolyline__trackedpolylineattributeval__value',
|
||||
'trackedpolyline__trackedpolylineattributeval__spec_id',
|
||||
'trackedpolyline__trackedpolylineattributeval__id'
|
||||
],
|
||||
}, 'id')
|
||||
|
||||
db_points_paths = _merge_table_rows(db_points_paths, {
|
||||
'attributes': object_path_attr_merge_key,
|
||||
'shapes': [
|
||||
'trackedpoints__id', 'trackedpoints__points', 'trackedpoints__frame',
|
||||
'trackedpoints__occluded', 'trackedpoints__z_order', 'trackedpoints__outside',
|
||||
'trackedpoints__trackedpointsattributeval__value',
|
||||
'trackedpoints__trackedpointsattributeval__spec_id',
|
||||
'trackedpoints__trackedpointsattributeval__id'
|
||||
]
|
||||
}, 'id')
|
||||
|
||||
for db_box_path in db_box_paths:
|
||||
db_box_path.attributes = list(set(db_box_path.attributes))
|
||||
db_box_path.type = 'box_path'
|
||||
db_box_path.shapes = _merge_table_rows(db_box_path.shapes, {
|
||||
'attributes': [
|
||||
'trackedboxattributeval__value',
|
||||
'trackedboxattributeval__spec_id',
|
||||
'trackedboxattributeval__id'
|
||||
]
|
||||
}, 'id')
|
||||
|
||||
for db_polygon_path in db_polygon_paths:
|
||||
db_polygon_path.attributes = list(set(db_polygon_path.attributes))
|
||||
db_polygon_path.type = 'poligon_path'
|
||||
db_polygon_path.shapes = _merge_table_rows(db_polygon_path.shapes, {
|
||||
'attributes': [
|
||||
'trackedpolygonattributeval__value',
|
||||
'trackedpolygonattributeval__spec_id',
|
||||
'trackedpolygonattributeval__id'
|
||||
]
|
||||
}, 'id')
|
||||
|
||||
for db_polyline_path in db_polyline_paths:
|
||||
db_polyline_path.attributes = list(set(db_polyline_path.attributes))
|
||||
db_polyline_path.type = 'polyline_path'
|
||||
db_polyline_path.shapes = _merge_table_rows(db_polyline_path.shapes, {
|
||||
'attributes': [
|
||||
'trackedpolylineattributeval__value',
|
||||
'trackedpolylineattributeval__spec_id',
|
||||
'trackedpolylineattributeval__id'
|
||||
]
|
||||
}, 'id')
|
||||
|
||||
for db_points_path in db_points_paths:
|
||||
db_points_path.attributes = list(set(db_points_path.attributes))
|
||||
db_points_path.type = 'points_path'
|
||||
db_points_path.shapes = _merge_table_rows(db_points_path.shapes, {
|
||||
'attributes': [
|
||||
'trackedpointsattributeval__value',
|
||||
'trackedpointsattributeval__spec_id',
|
||||
'trackedpointsattributeval__id'
|
||||
]
|
||||
}, 'id')
|
||||
return db_box_paths + db_polygon_paths + db_polyline_paths + db_points_paths
|
||||
|
||||
def process_shapes(db_job, apps, db_labels, db_attributes, db_alias):
|
||||
LabeledShape = apps.get_model('engine', 'LabeledShape')
|
||||
LabeledShapeAttributeVal = apps.get_model('engine', 'LabeledShapeAttributeVal')
|
||||
new_db_shapes = []
|
||||
new_db_attrvals = []
|
||||
for shape_type in ['boxes', 'points', 'polygons', 'polylines']:
|
||||
for shape in get_old_db_shapes(shape_type, db_job):
|
||||
new_db_shape = LabeledShape()
|
||||
new_db_shape.job = db_job
|
||||
new_db_shape.label = db_labels[shape.label_id]
|
||||
new_db_shape.group = shape.group_id
|
||||
|
||||
if shape_type == 'boxes':
|
||||
new_db_shape.type = cvat.apps.engine.models.ShapeType.RECTANGLE
|
||||
new_db_shape.points = [shape.xtl, shape.ytl, shape.xbr, shape.ybr]
|
||||
else:
|
||||
new_db_shape.points = shape.points.replace(',', ' ').split()
|
||||
if shape_type == 'points':
|
||||
new_db_shape.type = cvat.apps.engine.models.ShapeType.POINTS
|
||||
elif shape_type == 'polygons':
|
||||
new_db_shape.type = cvat.apps.engine.models.ShapeType.POLYGON
|
||||
elif shape_type == 'polylines':
|
||||
new_db_shape.type = cvat.apps.engine.models.ShapeType.POLYLINE
|
||||
|
||||
new_db_shape.frame = shape.frame
|
||||
new_db_shape.occluded = shape.occluded
|
||||
new_db_shape.z_order = shape.z_order
|
||||
|
||||
for attr in shape.attributes:
|
||||
db_attrval = LabeledShapeAttributeVal()
|
||||
db_attrval.shape_id = len(new_db_shapes)
|
||||
db_attrval.spec = db_attributes[attr.spec_id]
|
||||
db_attrval.value = attr.value
|
||||
new_db_attrvals.append(db_attrval)
|
||||
|
||||
new_db_shapes.append(new_db_shape)
|
||||
|
||||
new_db_shapes = _bulk_create(LabeledShape, db_alias, new_db_shapes, {"job_id": db_job.id})
|
||||
for db_attrval in new_db_attrvals:
|
||||
db_attrval.shape_id = new_db_shapes[db_attrval.shape_id].id
|
||||
|
||||
_bulk_create(LabeledShapeAttributeVal, db_alias, new_db_attrvals, {})
|
||||
|
||||
def process_paths(db_job, apps, db_labels, db_attributes, db_alias):
|
||||
TrackedShape = apps.get_model('engine', 'TrackedShape')
|
||||
LabeledTrack = apps.get_model('engine', 'LabeledTrack')
|
||||
LabeledTrackAttributeVal = apps.get_model('engine', 'LabeledTrackAttributeVal')
|
||||
TrackedShapeAttributeVal = apps.get_model('engine', 'TrackedShapeAttributeVal')
|
||||
tracks = get_old_db_paths(db_job)
|
||||
|
||||
new_db_tracks = []
|
||||
new_db_track_attrvals = []
|
||||
new_db_shapes = []
|
||||
new_db_shape_attrvals = []
|
||||
|
||||
for track in tracks:
|
||||
db_track = LabeledTrack()
|
||||
db_track.job = db_job
|
||||
db_track.label = db_labels[track.label_id]
|
||||
db_track.frame = track.frame
|
||||
db_track.group = track.group_id
|
||||
|
||||
for attr in track.attributes:
|
||||
db_attrspec = db_attributes[attr.spec_id]
|
||||
db_attrval = LabeledTrackAttributeVal()
|
||||
db_attrval.track_id = len(new_db_tracks)
|
||||
db_attrval.spec = db_attrspec
|
||||
db_attrval.value = attr.value
|
||||
new_db_track_attrvals.append(db_attrval)
|
||||
|
||||
for shape in track.shapes:
|
||||
db_shape = TrackedShape()
|
||||
db_shape.track_id = len(new_db_tracks)
|
||||
db_shape.frame = shape.frame
|
||||
db_shape.occluded = shape.occluded
|
||||
db_shape.z_order = shape.z_order
|
||||
db_shape.outside = shape.outside
|
||||
if track.type == 'box_path':
|
||||
db_shape.type = cvat.apps.engine.models.ShapeType.RECTANGLE
|
||||
db_shape.points = [shape.xtl, shape.ytl, shape.xbr, shape.ybr]
|
||||
else:
|
||||
db_shape.points = shape.points.replace(',', ' ').split()
|
||||
if track.type == 'points_path':
|
||||
db_shape.type = cvat.apps.engine.models.ShapeType.POINTS
|
||||
elif track.type == 'polygon_path':
|
||||
db_shape.type = cvat.apps.engine.models.ShapeType.POLYGON
|
||||
elif track.type == 'polyline_path':
|
||||
db_shape.type = cvat.apps.engine.models.ShapeType.POLYLINE
|
||||
|
||||
for attr in shape.attributes:
|
||||
db_attrspec = db_attributes[attr.spec_id]
|
||||
db_attrval = TrackedShapeAttributeVal()
|
||||
db_attrval.shape_id = len(new_db_shapes)
|
||||
db_attrval.spec = db_attrspec
|
||||
db_attrval.value = attr.value
|
||||
new_db_shape_attrvals.append(db_attrval)
|
||||
|
||||
new_db_shapes.append(db_shape)
|
||||
new_db_tracks.append(db_track)
|
||||
|
||||
new_db_tracks = _bulk_create(LabeledTrack, db_alias, new_db_tracks, {"job_id": db_job.id})
|
||||
|
||||
for db_attrval in new_db_track_attrvals:
|
||||
db_attrval.track_id = new_db_tracks[db_attrval.track_id].id
|
||||
_bulk_create(LabeledTrackAttributeVal, db_alias, new_db_track_attrvals, {})
|
||||
|
||||
for db_shape in new_db_shapes:
|
||||
db_shape.track_id = new_db_tracks[db_shape.track_id].id
|
||||
|
||||
new_db_shapes = _bulk_create(TrackedShape, db_alias, new_db_shapes, {"track__job_id": db_job.id})
|
||||
|
||||
for db_attrval in new_db_shape_attrvals:
|
||||
db_attrval.shape_id = new_db_shapes[db_attrval.shape_id].id
|
||||
|
||||
_bulk_create(TrackedShapeAttributeVal, db_alias, new_db_shape_attrvals, {})
|
||||
|
||||
def copy_annotations_forward(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
Task = apps.get_model('engine', 'Task')
|
||||
AttributeSpec = apps.get_model('engine', 'AttributeSpec')
|
||||
|
||||
|
||||
for task in Task.objects.all():
|
||||
print("run anno migration for the task {}".format(task.id))
|
||||
db_labels = {db_label.id:db_label for db_label in task.label_set.all()}
|
||||
db_attributes = {db_attr.id:db_attr for db_attr in AttributeSpec.objects.filter(label__task__id=task.id)}
|
||||
for segment in task.segment_set.prefetch_related('job_set').all():
|
||||
db_job = segment.job_set.first()
|
||||
print("run anno migration for the job {}".format(db_job.id))
|
||||
process_shapes(db_job, apps, db_labels, db_attributes, db_alias)
|
||||
process_paths(db_job, apps, db_labels, db_attributes, db_alias)
|
||||
|
||||
def _save_old_shapes_to_db(apps, db_shapes, db_attributes, db_alias, db_job):
|
||||
def _get_shape_class(shape_type):
|
||||
if shape_type == 'polygons':
|
||||
return apps.get_model('engine', 'LabeledPolygon')
|
||||
elif shape_type == 'polylines':
|
||||
return apps.get_model('engine', 'LabeledPolyline')
|
||||
elif shape_type == 'boxes':
|
||||
return apps.get_model('engine', 'LabeledBox')
|
||||
elif shape_type == 'points':
|
||||
return apps.get_model('engine', 'LabeledPoints')
|
||||
|
||||
def _get_shape_attr_class(shape_type):
|
||||
if shape_type == 'polygons':
|
||||
return apps.get_model('engine', 'LabeledPolygonAttributeVal')
|
||||
elif shape_type == 'polylines':
|
||||
return apps.get_model('engine', 'LabeledPolylineAttributeVal')
|
||||
elif shape_type == 'boxes':
|
||||
return apps.get_model('engine', 'LabeledBoxAttributeVal')
|
||||
elif shape_type == 'points':
|
||||
return apps.get_model('engine', 'LabeledPointsAttributeVal')
|
||||
|
||||
shapes = [
|
||||
list(filter(lambda s: s.type == cvat.apps.engine.models.ShapeType.RECTANGLE, db_shapes)),
|
||||
list(filter(lambda s: s.type == cvat.apps.engine.models.ShapeType.POLYLINE, db_shapes)),
|
||||
list(filter(lambda s: s.type == cvat.apps.engine.models.ShapeType.POLYGON, db_shapes)),
|
||||
list(filter(lambda s: s.type == cvat.apps.engine.models.ShapeType.POINTS, db_shapes)),
|
||||
]
|
||||
for i, shape_type in enumerate(['boxes', 'polylines', 'polygons', 'points']):
|
||||
new_db_shapes = []
|
||||
new_db_attrvals = []
|
||||
for shape in shapes[i]:
|
||||
db_shape = _get_shape_class(shape_type)()
|
||||
db_shape.job = shape.job
|
||||
db_shape.label = shape.label
|
||||
db_shape.group_id = shape.group
|
||||
if shape.type == cvat.apps.engine.models.ShapeType.RECTANGLE:
|
||||
db_shape.xtl = shape.points[0]
|
||||
db_shape.ytl = shape.points[1]
|
||||
db_shape.xbr = shape.points[2]
|
||||
db_shape.ybr = shape.points[3]
|
||||
else:
|
||||
point_iterator = iter(shape.points)
|
||||
db_shape.points = ' '.join(['{},{}'.format(point, next(point_iterator)) for point in point_iterator])
|
||||
db_shape.frame = shape.frame
|
||||
db_shape.occluded = shape.occluded
|
||||
db_shape.z_order = shape.z_order
|
||||
|
||||
for attr in list(shape.labeledshapeattributeval_set.all()):
|
||||
db_attrval = _get_shape_attr_class(shape_type)()
|
||||
if shape.type == cvat.apps.engine.models.ShapeType.POLYGON:
|
||||
db_attrval.polygon_id = len(new_db_shapes)
|
||||
elif shape.type == cvat.apps.engine.models.ShapeType.POLYLINE:
|
||||
db_attrval.polyline_id = len(new_db_shapes)
|
||||
elif shape.type == cvat.apps.engine.models.ShapeType.RECTANGLE:
|
||||
db_attrval.box_id = len(new_db_shapes)
|
||||
else:
|
||||
db_attrval.points_id = len(new_db_shapes)
|
||||
|
||||
db_attrval.spec = db_attributes[attr.spec_id]
|
||||
db_attrval.value = attr.value
|
||||
new_db_attrvals.append(db_attrval)
|
||||
|
||||
new_db_shapes.append(db_shape)
|
||||
|
||||
new_db_shapes = _bulk_create(_get_shape_class(shape_type), db_alias, new_db_shapes, {"job_id": db_job.id})
|
||||
|
||||
for db_attrval in new_db_attrvals:
|
||||
if shape_type == 'polygons':
|
||||
db_attrval.polygon_id = new_db_shapes[db_attrval.polygon_id].id
|
||||
elif shape_type == 'polylines':
|
||||
db_attrval.polyline_id = new_db_shapes[db_attrval.polyline_id].id
|
||||
elif shape_type == 'boxes':
|
||||
db_attrval.box_id = new_db_shapes[db_attrval.box_id].id
|
||||
else:
|
||||
db_attrval.points_id = new_db_shapes[db_attrval.points_id].id
|
||||
|
||||
_bulk_create(_get_shape_attr_class(shape_type), db_alias, new_db_attrvals, {})
|
||||
|
||||
def _save_old_tracks_to_db(apps, db_shapes, db_attributes, db_alias, db_job):
|
||||
def _get_shape_class(shape_type):
|
||||
if shape_type == 'polygon_paths':
|
||||
return apps.get_model('engine', 'TrackedPolygon')
|
||||
elif shape_type == 'polyline_paths':
|
||||
return apps.get_model('engine', 'TrackedPolyline')
|
||||
elif shape_type == 'box_paths':
|
||||
return apps.get_model('engine', 'TrackedBox')
|
||||
elif shape_type == 'points_paths':
|
||||
return apps.get_model('engine', 'TrackedPoints')
|
||||
|
||||
def _get_shape_attr_class(shape_type):
|
||||
if shape_type == 'polygon_paths':
|
||||
return apps.get_model('engine', 'TrackedPolygonAttributeVal')
|
||||
elif shape_type == 'polyline_paths':
|
||||
return apps.get_model('engine', 'TrackedPolylineAttributeVal')
|
||||
elif shape_type == 'box_paths':
|
||||
return apps.get_model('engine', 'TrackedBoxAttributeVal')
|
||||
elif shape_type == 'points_paths':
|
||||
return apps.get_model('engine', 'TrackedPointsAttributeVal')
|
||||
|
||||
tracks = [
|
||||
list(filter(lambda t: t.trackedshape_set.first().type == cvat.apps.engine.models.ShapeType.RECTANGLE, db_shapes)),
|
||||
list(filter(lambda t: t.trackedshape_set.first().type == cvat.apps.engine.models.ShapeType.POLYLINE, db_shapes)),
|
||||
list(filter(lambda t: t.trackedshape_set.first().type == cvat.apps.engine.models.ShapeType.POLYGON, db_shapes)),
|
||||
list(filter(lambda t: t.trackedshape_set.first().type == cvat.apps.engine.models.ShapeType.POINTS, db_shapes)),
|
||||
]
|
||||
|
||||
ObjectPath = apps.get_model('engine', 'ObjectPath')
|
||||
ObjectPathAttributeVal = apps.get_model('engine', 'ObjectPathAttributeVal')
|
||||
|
||||
for i, shape_type in enumerate(['box_paths', 'polyline_paths', 'polygon_paths', 'points_paths', ]):
|
||||
new_db_paths = []
|
||||
new_db_path_attrvals = []
|
||||
new_db_shapes = []
|
||||
new_db_shape_attrvals = []
|
||||
|
||||
for path in tracks[i]:
|
||||
db_path = ObjectPath()
|
||||
db_path.job = db_job
|
||||
db_path.label = path.label
|
||||
db_path.frame = path.frame
|
||||
db_path.group_id = path.group
|
||||
# db_path.client_id = path.client_id
|
||||
if shape_type == 'polygon_paths':
|
||||
db_path.shapes = 'polygons'
|
||||
elif shape_type == 'polyline_paths':
|
||||
db_path.shapes = 'polylines'
|
||||
elif shape_type == 'box_paths':
|
||||
db_path.shapes = 'boxes'
|
||||
elif shape_type == 'points_paths':
|
||||
db_path.shapes = 'points'
|
||||
|
||||
for attr in list(path.labeledtrackattributeval_set.all()):
|
||||
db_attrspec = db_attributes[attr.spec_id]
|
||||
db_attrval = ObjectPathAttributeVal()
|
||||
db_attrval.track_id = len(new_db_paths)
|
||||
db_attrval.spec = db_attrspec
|
||||
db_attrval.value = attr.value
|
||||
new_db_path_attrvals.append(db_attrval)
|
||||
|
||||
for shape in list(path.trackedshape_set.all()):
|
||||
db_shape = _get_shape_class(shape_type)()
|
||||
db_shape.track_id = len(new_db_paths)
|
||||
if shape_type == 'box_paths':
|
||||
db_shape.xtl = shape.points[0]
|
||||
db_shape.ytl = shape.points[1]
|
||||
db_shape.xbr = shape.points[2]
|
||||
db_shape.ybr = shape.points[3]
|
||||
else:
|
||||
point_iterator = iter(shape.points)
|
||||
db_shape.points = ' '.join(['{},{}'.format(point, next(point_iterator)) for point in point_iterator])
|
||||
|
||||
db_shape.frame = shape.frame
|
||||
db_shape.occluded = shape.occluded
|
||||
db_shape.z_order = shape.z_order
|
||||
db_shape.outside = shape.outside
|
||||
|
||||
for attr in list(shape.trackedshapeattributeval_set.all()):
|
||||
db_attrspec = db_attributes[attr.spec_id]
|
||||
db_attrval = _get_shape_attr_class(shape_type)()
|
||||
if shape_type == 'polygon_paths':
|
||||
db_attrval.polygon_id = len(new_db_shapes)
|
||||
elif shape_type == 'polyline_paths':
|
||||
db_attrval.polyline_id = len(new_db_shapes)
|
||||
elif shape_type == 'box_paths':
|
||||
db_attrval.box_id = len(new_db_shapes)
|
||||
elif shape_type == 'points_paths':
|
||||
db_attrval.points_id = len(new_db_shapes)
|
||||
db_attrval.spec = db_attrspec
|
||||
db_attrval.value = attr.value
|
||||
new_db_shape_attrvals.append(db_attrval)
|
||||
|
||||
new_db_shapes.append(db_shape)
|
||||
new_db_paths.append(db_path)
|
||||
|
||||
new_db_paths = _bulk_create(ObjectPath, db_alias, new_db_paths, {"job_id": db_job.id})
|
||||
|
||||
for db_attrval in new_db_path_attrvals:
|
||||
db_attrval.track_id = new_db_paths[db_attrval.track_id].id
|
||||
_bulk_create(ObjectPathAttributeVal, db_alias, new_db_path_attrvals, {})
|
||||
|
||||
for db_shape in new_db_shapes:
|
||||
db_shape.track_id = new_db_paths[db_shape.track_id].id
|
||||
|
||||
db_shapes = _bulk_create(_get_shape_class(shape_type), db_alias, new_db_shapes, {"track__job_id": db_job.id})
|
||||
|
||||
for db_attrval in new_db_shape_attrvals:
|
||||
if shape_type == 'polygon_paths':
|
||||
db_attrval.polygon_id = db_shapes[db_attrval.polygon_id].id
|
||||
elif shape_type == 'polyline_paths':
|
||||
db_attrval.polyline_id = db_shapes[db_attrval.polyline_id].id
|
||||
elif shape_type == 'box_paths':
|
||||
db_attrval.box_id = db_shapes[db_attrval.box_id].id
|
||||
elif shape_type == 'points_paths':
|
||||
db_attrval.points_id = db_shapes[db_attrval.points_id].id
|
||||
|
||||
_bulk_create(_get_shape_attr_class(shape_type), db_alias, new_db_shape_attrvals, {})
|
||||
|
||||
def copy_annotations_backward(apps, schema_editor):
|
||||
Task = apps.get_model('engine', 'Task')
|
||||
AttributeSpec = apps.get_model('engine', 'AttributeSpec')
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
for task in Task.objects.all():
|
||||
db_attributes = {db_attr.id:db_attr for db_attr in AttributeSpec.objects.filter(label__task__id=task.id)}
|
||||
for segment in task.segment_set.prefetch_related('job_set').all():
|
||||
db_job = segment.job_set.first()
|
||||
|
||||
db_shapes = list(db_job.labeledshape_set
|
||||
.prefetch_related("label")
|
||||
.prefetch_related("labeledshapeattributeval_set"))
|
||||
_save_old_shapes_to_db(apps, db_shapes, db_attributes, db_alias, db_job)
|
||||
|
||||
db_tracks = list(db_job.labeledtrack_set
|
||||
.select_related("label")
|
||||
.prefetch_related("labeledtrackattributeval_set")
|
||||
.prefetch_related("trackedshape_set__trackedshapeattributeval_set"))
|
||||
_save_old_tracks_to_db(apps, db_tracks, db_attributes, db_alias, db_job)
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('engine', '0016_attribute_spec_20190217'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='LabeledImageAttributeVal',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('value', cvat.apps.engine.models.SafeCharField(max_length=64)),
|
||||
('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.AttributeSpec')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LabeledShapeAttributeVal',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('value', cvat.apps.engine.models.SafeCharField(max_length=64)),
|
||||
('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.AttributeSpec')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LabeledTrackAttributeVal',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('value', cvat.apps.engine.models.SafeCharField(max_length=64)),
|
||||
('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.AttributeSpec')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='TrackedShape',
|
||||
fields=[
|
||||
('type', models.CharField(choices=[('rectangle', 'RECTANGLE'), ('polygon', 'POLYGON'), ('polyline', 'POLYLINE'), ('points', 'POINTS')], max_length=16)),
|
||||
('occluded', models.BooleanField(default=False)),
|
||||
('z_order', models.IntegerField(default=0)),
|
||||
('points', cvat.apps.engine.models.FloatArrayField()),
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('frame', models.PositiveIntegerField()),
|
||||
('outside', models.BooleanField(default=False)),
|
||||
],
|
||||
options={
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='TrackedShapeAttributeVal',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('value', cvat.apps.engine.models.SafeCharField(max_length=64)),
|
||||
('shape', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.TrackedShape')),
|
||||
('spec', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.AttributeSpec')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LabeledImage',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('frame', models.PositiveIntegerField()),
|
||||
('group', models.PositiveIntegerField(null=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LabeledShape',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('frame', models.PositiveIntegerField()),
|
||||
('group', models.PositiveIntegerField(null=True)),
|
||||
('type', models.CharField(choices=[('rectangle', 'RECTANGLE'), ('polygon', 'POLYGON'), ('polyline', 'POLYLINE'), ('points', 'POINTS')], max_length=16)),
|
||||
('occluded', models.BooleanField(default=False)),
|
||||
('z_order', models.IntegerField(default=0)),
|
||||
('points', cvat.apps.engine.models.FloatArrayField()),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LabeledTrack',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('frame', models.PositiveIntegerField()),
|
||||
('group', models.PositiveIntegerField(null=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='labeledimage',
|
||||
name='job',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Job'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='labeledtrack',
|
||||
name='job',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Job'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='labeledshape',
|
||||
name='job',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Job'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='labeledimage',
|
||||
name='label',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Label'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='labeledshape',
|
||||
name='label',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Label'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='labeledtrack',
|
||||
name='label',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.Label'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='trackedshape',
|
||||
name='track',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.LabeledTrack'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='labeledtrackattributeval',
|
||||
name='track',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.LabeledTrack'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='labeledshapeattributeval',
|
||||
name='shape',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.LabeledShape'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='labeledimageattributeval',
|
||||
name='image',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='engine.LabeledImage'),
|
||||
),
|
||||
migrations.RunPython(
|
||||
code=copy_annotations_forward,
|
||||
reverse_code=copy_annotations_backward,
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledbox',
|
||||
name='job',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledbox',
|
||||
name='label',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledboxattributeval',
|
||||
name='box',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledboxattributeval',
|
||||
name='spec',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpoints',
|
||||
name='job',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpoints',
|
||||
name='label',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpointsattributeval',
|
||||
name='points',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpointsattributeval',
|
||||
name='spec',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpolygon',
|
||||
name='job',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='job',
|
||||
name='max_shape_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpolygon',
|
||||
name='label',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpolygonattributeval',
|
||||
name='polygon',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpolygonattributeval',
|
||||
name='spec',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpolyline',
|
||||
name='job',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpolyline',
|
||||
name='label',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpolylineattributeval',
|
||||
name='polyline',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='labeledpolylineattributeval',
|
||||
name='spec',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='objectpath',
|
||||
name='job',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='objectpath',
|
||||
name='label',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='objectpathattributeval',
|
||||
name='spec',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='objectpathattributeval',
|
||||
name='track',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedbox',
|
||||
name='track',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedboxattributeval',
|
||||
name='box',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedboxattributeval',
|
||||
name='spec',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedpoints',
|
||||
name='track',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedpointsattributeval',
|
||||
name='points',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedpointsattributeval',
|
||||
name='spec',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedpolygon',
|
||||
name='track',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedpolygonattributeval',
|
||||
name='polygon',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedpolygonattributeval',
|
||||
name='spec',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedpolyline',
|
||||
name='track',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedpolylineattributeval',
|
||||
name='polyline',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='trackedpolylineattributeval',
|
||||
name='spec',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LabeledBox',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LabeledBoxAttributeVal',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LabeledPoints',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LabeledPointsAttributeVal',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LabeledPolygon',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LabeledPolygonAttributeVal',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LabeledPolyline',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='LabeledPolylineAttributeVal',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='ObjectPath',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='ObjectPathAttributeVal',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TrackedBox',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TrackedBoxAttributeVal',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TrackedPoints',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TrackedPointsAttributeVal',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TrackedPolygon',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TrackedPolygonAttributeVal',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TrackedPolyline',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TrackedPolylineAttributeVal',
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,31 @@
|
||||
# Generated by Django 2.1.7 on 2019-04-17 09:25
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('engine', '0017_db_redesign_20190221'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='JobCommit',
|
||||
fields=[
|
||||
('id', models.BigAutoField(primary_key=True, serialize=False)),
|
||||
('version', models.PositiveIntegerField(default=0)),
|
||||
('timestamp', models.DateTimeField(auto_now=True)),
|
||||
('message', models.CharField(default='', max_length=4096)),
|
||||
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
|
||||
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='commits', to='engine.Job')),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
'default_permissions': (),
|
||||
},
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,366 @@
|
||||
# Copyright (C) 2019 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from rest_framework import serializers
|
||||
from django.contrib.auth.models import User, Group
|
||||
|
||||
from cvat.apps.engine import models
|
||||
from cvat.apps.engine.log import slogger
|
||||
|
||||
|
||||
class AttributeSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.AttributeSpec
|
||||
fields = ('id', 'name', 'mutable', 'input_type', 'default_value',
|
||||
'values')
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def to_internal_value(self, data):
|
||||
attribute = data.copy()
|
||||
attribute['values'] = '\n'.join(data.get('values', []))
|
||||
return attribute
|
||||
|
||||
def to_representation(self, instance):
|
||||
attribute = super().to_representation(instance)
|
||||
attribute['values'] = attribute['values'].split('\n')
|
||||
return attribute
|
||||
|
||||
class LabelSerializer(serializers.ModelSerializer):
|
||||
attributes = AttributeSerializer(many=True, source='attributespec_set',
|
||||
default=[])
|
||||
class Meta:
|
||||
model = models.Label
|
||||
fields = ('id', 'name', 'attributes')
|
||||
|
||||
class JobCommitSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.JobCommit
|
||||
fields = ('id', 'version', 'author', 'message', 'timestamp')
|
||||
|
||||
class JobSerializer(serializers.ModelSerializer):
|
||||
task_id = serializers.ReadOnlyField(source="segment.task.id")
|
||||
start_frame = serializers.ReadOnlyField(source="segment.start_frame")
|
||||
stop_frame = serializers.ReadOnlyField(source="segment.stop_frame")
|
||||
|
||||
class Meta:
|
||||
model = models.Job
|
||||
fields = ('url', 'id', 'assignee', 'status', 'start_frame',
|
||||
'stop_frame', 'task_id')
|
||||
|
||||
class SimpleJobSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Job
|
||||
fields = ('url', 'id', 'assignee', 'status')
|
||||
|
||||
class SegmentSerializer(serializers.ModelSerializer):
|
||||
jobs = SimpleJobSerializer(many=True, source='job_set')
|
||||
|
||||
class Meta:
|
||||
model = models.Segment
|
||||
fields = ('start_frame', 'stop_frame', 'jobs')
|
||||
|
||||
class ClientFileSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.ClientFile
|
||||
fields = ('file', )
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def to_internal_value(self, data):
|
||||
return {'file': data}
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def to_representation(self, instance):
|
||||
upload_dir = instance.task.get_upload_dirname()
|
||||
return instance.file.path[len(upload_dir) + 1:]
|
||||
|
||||
class ServerFileSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.ServerFile
|
||||
fields = ('file', )
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def to_internal_value(self, data):
|
||||
return {'file': data}
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def to_representation(self, instance):
|
||||
return instance.file
|
||||
|
||||
class RemoteFileSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.RemoteFile
|
||||
fields = ('file', )
|
||||
|
||||
class RqStatusSerializer(serializers.Serializer):
|
||||
state = serializers.ChoiceField(choices=[
|
||||
"Queued", "Started", "Finished", "Failed"])
|
||||
message = serializers.CharField(allow_blank=True, default="")
|
||||
|
||||
class TaskDataSerializer(serializers.ModelSerializer):
|
||||
client_files = ClientFileSerializer(many=True, source='clientfile_set',
|
||||
default=[])
|
||||
server_files = ServerFileSerializer(many=True, source='serverfile_set',
|
||||
default=[])
|
||||
remote_files = RemoteFileSerializer(many=True, source='remotefile_set',
|
||||
default=[])
|
||||
|
||||
class Meta:
|
||||
model = models.Task
|
||||
fields = ('client_files', 'server_files', 'remote_files')
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def update(self, instance, validated_data):
|
||||
client_files = validated_data.pop('clientfile_set')
|
||||
server_files = validated_data.pop('serverfile_set')
|
||||
remote_files = validated_data.pop('remotefile_set')
|
||||
|
||||
for file in client_files:
|
||||
client_file = models.ClientFile(task=instance, **file)
|
||||
client_file.save()
|
||||
|
||||
for file in server_files:
|
||||
server_file = models.ServerFile(task=instance, **file)
|
||||
server_file.save()
|
||||
|
||||
for file in remote_files:
|
||||
remote_file = models.RemoteFile(task=instance, **file)
|
||||
remote_file.save()
|
||||
|
||||
return instance
|
||||
|
||||
class WriteOnceMixin:
|
||||
"""Adds support for write once fields to serializers.
|
||||
|
||||
To use it, specify a list of fields as `write_once_fields` on the
|
||||
serializer's Meta:
|
||||
```
|
||||
class Meta:
|
||||
model = SomeModel
|
||||
fields = '__all__'
|
||||
write_once_fields = ('collection', )
|
||||
```
|
||||
|
||||
Now the fields in `write_once_fields` can be set during POST (create),
|
||||
but cannot be changed afterwards via PUT or PATCH (update).
|
||||
Inspired by http://stackoverflow.com/a/37487134/627411.
|
||||
"""
|
||||
|
||||
def get_extra_kwargs(self):
|
||||
extra_kwargs = super().get_extra_kwargs()
|
||||
|
||||
# We're only interested in PATCH/PUT.
|
||||
if 'update' in getattr(self.context.get('view'), 'action', ''):
|
||||
return self._set_write_once_fields(extra_kwargs)
|
||||
|
||||
return extra_kwargs
|
||||
|
||||
def _set_write_once_fields(self, extra_kwargs):
|
||||
"""Set all fields in `Meta.write_once_fields` to read_only."""
|
||||
write_once_fields = getattr(self.Meta, 'write_once_fields', None)
|
||||
if not write_once_fields:
|
||||
return extra_kwargs
|
||||
|
||||
if not isinstance(write_once_fields, (list, tuple)):
|
||||
raise TypeError(
|
||||
'The `write_once_fields` option must be a list or tuple. '
|
||||
'Got {}.'.format(type(write_once_fields).__name__)
|
||||
)
|
||||
|
||||
for field_name in write_once_fields:
|
||||
kwargs = extra_kwargs.get(field_name, {})
|
||||
kwargs['read_only'] = True
|
||||
extra_kwargs[field_name] = kwargs
|
||||
|
||||
return extra_kwargs
|
||||
|
||||
class TaskSerializer(WriteOnceMixin, serializers.ModelSerializer):
|
||||
labels = LabelSerializer(many=True, source='label_set', partial=True)
|
||||
segments = SegmentSerializer(many=True, source='segment_set', read_only=True)
|
||||
image_quality = serializers.IntegerField(min_value=0, max_value=100)
|
||||
|
||||
class Meta:
|
||||
model = models.Task
|
||||
fields = ('url', 'id', 'name', 'size', 'mode', 'owner', 'assignee',
|
||||
'bug_tracker', 'created_date', 'updated_date', 'overlap',
|
||||
'segment_size', 'z_order', 'flipped', 'status', 'labels', 'segments',
|
||||
'image_quality')
|
||||
read_only_fields = ('size', 'mode', 'created_date', 'updated_date',
|
||||
'status')
|
||||
write_once_fields = ('overlap', 'segment_size', 'image_quality')
|
||||
ordering = ['-id']
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def create(self, validated_data):
|
||||
labels = validated_data.pop('label_set')
|
||||
db_task = models.Task.objects.create(size=0, **validated_data)
|
||||
for label in labels:
|
||||
attributes = label.pop('attributespec_set')
|
||||
db_label = models.Label.objects.create(task=db_task, **label)
|
||||
for attr in attributes:
|
||||
models.AttributeSpec.objects.create(label=db_label, **attr)
|
||||
|
||||
task_path = db_task.get_task_dirname()
|
||||
if os.path.isdir(task_path):
|
||||
shutil.rmtree(task_path)
|
||||
|
||||
upload_dir = db_task.get_upload_dirname()
|
||||
os.makedirs(upload_dir)
|
||||
output_dir = db_task.get_data_dirname()
|
||||
os.makedirs(output_dir)
|
||||
|
||||
return db_task
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def update(self, instance, validated_data):
|
||||
instance.name = validated_data.get('name', instance.name)
|
||||
instance.owner = validated_data.get('owner', instance.owner)
|
||||
instance.assignee = validated_data.get('assignee', instance.assignee)
|
||||
instance.bug_tracker = validated_data.get('bug_tracker',
|
||||
instance.bug_tracker)
|
||||
instance.z_order = validated_data.get('z_order', instance.z_order)
|
||||
instance.flipped = validated_data.get('flipped', instance.flipped)
|
||||
instance.image_quality = validated_data.get('image_quality',
|
||||
instance.image_quality)
|
||||
labels = validated_data.get('label_set', [])
|
||||
for label in labels:
|
||||
attributes = label.pop('attributespec_set', [])
|
||||
(db_label, created) = models.Label.objects.get_or_create(task=instance,
|
||||
name=label['name'])
|
||||
if created:
|
||||
slogger.task[instance.id].info("New {} label was created"
|
||||
.format(db_label.name))
|
||||
else:
|
||||
slogger.task[instance.id].info("{} label was updated"
|
||||
.format(db_label.name))
|
||||
for attr in attributes:
|
||||
(db_attr, created) = models.AttributeSpec.objects.get_or_create(
|
||||
label=db_label, name=attr['name'], defaults=attr)
|
||||
if created:
|
||||
slogger.task[instance.id].info("New {} attribute for {} label was created"
|
||||
.format(db_attr.name, db_label.name))
|
||||
else:
|
||||
slogger.task[instance.id].info("{} attribute for {} label was updated"
|
||||
.format(db_attr.name, db_label.name))
|
||||
|
||||
# FIXME: need to update only "safe" fields
|
||||
db_attr.default_value = attr.get('default_value', db_attr.default_value)
|
||||
db_attr.mutable = attr.get('mutable', db_attr.mutable)
|
||||
db_attr.input_type = attr.get('input_type', db_attr.input_type)
|
||||
db_attr.values = attr.get('values', db_attr.values)
|
||||
db_attr.save()
|
||||
|
||||
return instance
|
||||
|
||||
class UserSerializer(serializers.ModelSerializer):
|
||||
groups = serializers.SlugRelatedField(many=True,
|
||||
slug_field='name', queryset=Group.objects.all())
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ('url', 'id', 'username', 'first_name', 'last_name', 'email',
|
||||
'groups', 'is_staff', 'is_superuser', 'is_active', 'last_login',
|
||||
'date_joined', 'groups')
|
||||
read_only_fields = ('last_login', 'date_joined')
|
||||
write_only_fields = ('password', )
|
||||
ordering = ['-id']
|
||||
|
||||
class ExceptionSerializer(serializers.Serializer):
|
||||
system = serializers.CharField(max_length=255)
|
||||
client = serializers.CharField(max_length=255)
|
||||
time = serializers.DateTimeField()
|
||||
|
||||
job_id = serializers.IntegerField(required=False)
|
||||
task_id = serializers.IntegerField(required=False)
|
||||
proj_id = serializers.IntegerField(required=False)
|
||||
client_id = serializers.IntegerField()
|
||||
|
||||
message = serializers.CharField(max_length=4096)
|
||||
filename = serializers.URLField()
|
||||
line = serializers.IntegerField()
|
||||
column = serializers.IntegerField()
|
||||
stack = serializers.CharField(max_length=8192,
|
||||
style={'base_template': 'textarea.html'}, allow_blank=True)
|
||||
|
||||
class AboutSerializer(serializers.Serializer):
|
||||
name = serializers.CharField(max_length=128)
|
||||
description = serializers.CharField(max_length=2048)
|
||||
version = serializers.CharField(max_length=64)
|
||||
|
||||
class ImageMetaSerializer(serializers.Serializer):
|
||||
width = serializers.IntegerField()
|
||||
height = serializers.IntegerField()
|
||||
|
||||
class AttributeValSerializer(serializers.Serializer):
|
||||
spec_id = serializers.IntegerField()
|
||||
value = serializers.CharField(max_length=64, allow_blank=True)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
data['value'] = str(data['value'])
|
||||
return super().to_internal_value(data)
|
||||
|
||||
class AnnotationSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField(default=None, allow_null=True)
|
||||
frame = serializers.IntegerField(min_value=0)
|
||||
label_id = serializers.IntegerField(min_value=0)
|
||||
group = serializers.IntegerField(min_value=0, allow_null=True)
|
||||
|
||||
class LabeledImageSerializer(AnnotationSerializer):
|
||||
attributes = AttributeValSerializer(many=True,
|
||||
source="labeledimageattributeval_set")
|
||||
|
||||
class ShapeSerializer(serializers.Serializer):
|
||||
type = serializers.ChoiceField(choices=models.ShapeType.choices())
|
||||
occluded = serializers.BooleanField()
|
||||
z_order = serializers.IntegerField(default=0)
|
||||
points = serializers.ListField(
|
||||
child=serializers.FloatField(min_value=0)
|
||||
)
|
||||
|
||||
class LabeledShapeSerializer(ShapeSerializer, AnnotationSerializer):
|
||||
attributes = AttributeValSerializer(many=True,
|
||||
source="labeledshapeattributeval_set")
|
||||
|
||||
class TrackedShapeSerializer(ShapeSerializer):
|
||||
id = serializers.IntegerField(default=None, allow_null=True)
|
||||
frame = serializers.IntegerField(min_value=0)
|
||||
outside = serializers.BooleanField()
|
||||
attributes = AttributeValSerializer(many=True,
|
||||
source="trackedshapeattributeval_set")
|
||||
|
||||
class LabeledTrackSerializer(AnnotationSerializer):
|
||||
shapes = TrackedShapeSerializer(many=True, allow_empty=False,
|
||||
source="trackedshape_set")
|
||||
attributes = AttributeValSerializer(many=True,
|
||||
source="labeledtrackattributeval_set")
|
||||
|
||||
class LabeledDataSerializer(serializers.Serializer):
|
||||
version = serializers.IntegerField()
|
||||
tags = LabeledImageSerializer(many=True)
|
||||
shapes = LabeledShapeSerializer(many=True)
|
||||
tracks = LabeledTrackSerializer(many=True)
|
||||
|
||||
class FileInfoSerializer(serializers.Serializer):
|
||||
name = serializers.CharField(max_length=1024)
|
||||
type = serializers.ChoiceField(choices=["REG", "DIR"])
|
||||
|
||||
class PluginSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Plugin
|
||||
fields = ('name', 'description', 'maintainer', 'created_at',
|
||||
'updated_at')
|
||||
|
||||
class LogEventSerializer(serializers.Serializer):
|
||||
job_id = serializers.IntegerField(required=False)
|
||||
task_id = serializers.IntegerField(required=False)
|
||||
proj_id = serializers.IntegerField(required=False)
|
||||
client_id = serializers.IntegerField()
|
||||
|
||||
name = serializers.CharField(max_length=64)
|
||||
time = serializers.DateTimeField()
|
||||
message = serializers.CharField(max_length=4096, required=False)
|
||||
payload = serializers.DictField(required=False)
|
||||
is_active = serializers.BooleanField()
|
||||
@ -0,0 +1,19 @@
|
||||
# Copyright (C) 2019 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from .models import Job, StatusChoice
|
||||
|
||||
def update_task_status(instance, **kwargs):
|
||||
db_task = instance.segment.task
|
||||
db_jobs = list(Job.objects.filter(segment__task_id=db_task.id))
|
||||
status = StatusChoice.COMPLETED
|
||||
if list(filter(lambda x: x.status == StatusChoice.ANNOTATION, db_jobs)):
|
||||
status = StatusChoice.ANNOTATION
|
||||
elif list(filter(lambda x: x.status == StatusChoice.VALIDATION, db_jobs)):
|
||||
status = StatusChoice.VALIDATION
|
||||
|
||||
if status != db_task.status:
|
||||
db_task.status = status
|
||||
db_task.save()
|
||||
|
||||
@ -1,882 +0,0 @@
|
||||
/*
|
||||
* defiant.js.js [v1.4.5]
|
||||
* http://www.defiantjs.com
|
||||
* Copyright (c) 2013-2017, Hakan Bilgin <hbi@longscript.com>
|
||||
* Licensed under the MIT License
|
||||
*/
|
||||
/*
|
||||
* x10.js v0.1.3
|
||||
* Web worker wrapper with simple interface
|
||||
*
|
||||
* Copyright (c) 2013-2015, Hakan Bilgin <hbi@longscript.com>
|
||||
* Licensed under the MIT License
|
||||
*/
|
||||
|
||||
(function(window, undefined) {
|
||||
//'use strict';
|
||||
|
||||
var x10 = {
|
||||
init: function() {
|
||||
return this;
|
||||
},
|
||||
work_handler: function(event) {
|
||||
var args = Array.prototype.slice.call(event.data, 1),
|
||||
func = event.data[0],
|
||||
ret = tree[func].apply(tree, args);
|
||||
|
||||
// return process finish
|
||||
postMessage([func, ret]);
|
||||
},
|
||||
setup: function(tree) {
|
||||
var url = window.URL || window.webkitURL,
|
||||
script = 'var tree = {'+ this.parse(tree).join(',') +'};',
|
||||
blob = new Blob([script + 'self.addEventListener("message", '+ this.work_handler.toString() +', false);'],
|
||||
{type: 'text/javascript'}),
|
||||
worker = new Worker(url.createObjectURL(blob));
|
||||
|
||||
// thread pipe
|
||||
worker.onmessage = function(event) {
|
||||
var args = Array.prototype.slice.call(event.data, 1),
|
||||
func = event.data[0];
|
||||
x10.observer.emit('x10:'+ func, args);
|
||||
};
|
||||
|
||||
return worker;
|
||||
},
|
||||
call_handler: function(func, worker) {
|
||||
return function() {
|
||||
var args = Array.prototype.slice.call(arguments, 0, -1),
|
||||
callback = arguments[arguments.length-1];
|
||||
|
||||
// add method name
|
||||
args.unshift(func);
|
||||
|
||||
// listen for 'done'
|
||||
x10.observer.on('x10:'+ func, function(event) {
|
||||
callback(event.detail[0]);
|
||||
});
|
||||
|
||||
// start worker
|
||||
worker.postMessage(args);
|
||||
};
|
||||
},
|
||||
compile: function(hash) {
|
||||
var worker = this.setup(typeof(hash) === 'function' ? {func: hash} : hash),
|
||||
obj = {},
|
||||
fn;
|
||||
// create return object
|
||||
if (typeof(hash) === 'function') {
|
||||
obj.func = this.call_handler('func', worker);
|
||||
return obj.func;
|
||||
} else {
|
||||
for (fn in hash) {
|
||||
obj[fn] = this.call_handler(fn, worker);
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
},
|
||||
parse: function(tree, isArray) {
|
||||
var hash = [],
|
||||
key,
|
||||
val,
|
||||
v;
|
||||
|
||||
for (key in tree) {
|
||||
v = tree[key];
|
||||
// handle null
|
||||
if (v === null) {
|
||||
hash.push(key +':null');
|
||||
continue;
|
||||
}
|
||||
// handle undefined
|
||||
if (v === undefined) {
|
||||
hash.push(key +':undefined');
|
||||
continue;
|
||||
}
|
||||
switch (v.constructor) {
|
||||
case Date: val = 'new Date('+ v.valueOf() +')'; break;
|
||||
case Object: val = '{'+ this.parse(v).join(',') +'}'; break;
|
||||
case Array: val = '['+ this.parse(v, true).join(',') +']'; break;
|
||||
case String: val = '"'+ v.replace(/"/g, '\\"') +'"'; break;
|
||||
case RegExp:
|
||||
case Function: val = v.toString(); break;
|
||||
default: val = v;
|
||||
}
|
||||
if (isArray) hash.push(val);
|
||||
else hash.push(key +':'+ val);
|
||||
}
|
||||
return hash;
|
||||
},
|
||||
// simple event emitter
|
||||
observer: (function() {
|
||||
var stack = {};
|
||||
|
||||
return {
|
||||
on: function(type, fn) {
|
||||
if (!stack[type]) {
|
||||
stack[type] = [];
|
||||
}
|
||||
stack[type].unshift(fn);
|
||||
},
|
||||
off: function(type, fn) {
|
||||
if (!stack[type]) return;
|
||||
var i = stack[type].indexOf(fn);
|
||||
stack[type].splice(i,1);
|
||||
},
|
||||
emit: function(type, detail) {
|
||||
if (!stack[type]) return;
|
||||
var event = {
|
||||
type : type,
|
||||
detail : detail,
|
||||
isCanceled : false,
|
||||
cancelBubble : function() {
|
||||
this.isCanceled = true;
|
||||
}
|
||||
},
|
||||
len = stack[type].length;
|
||||
while(len--) {
|
||||
if (event.isCanceled) return;
|
||||
stack[type][len](event);
|
||||
}
|
||||
}
|
||||
};
|
||||
})()
|
||||
};
|
||||
|
||||
if (typeof module === "undefined") {
|
||||
// publish x10
|
||||
window.x10 = x10.init();
|
||||
} else {
|
||||
module.exports = x10.init();
|
||||
}
|
||||
|
||||
})(this);
|
||||
|
||||
|
||||
(function(window, module, undefined) {
|
||||
'use strict';
|
||||
|
||||
var Defiant = {
|
||||
is_ie : /(msie|trident)/i.test(navigator.userAgent),
|
||||
is_safari : /safari/i.test(navigator.userAgent),
|
||||
env : 'production',
|
||||
xml_decl : '<?xml version="1.0" encoding="utf-8"?>',
|
||||
namespace : 'xmlns:d="defiant-namespace"',
|
||||
tabsize : 4,
|
||||
render: function(template, data) {
|
||||
var processor = new XSLTProcessor(),
|
||||
span = document.createElement('span'),
|
||||
opt = {match: '/'},
|
||||
tmpltXpath,
|
||||
scripts,
|
||||
temp,
|
||||
sorter;
|
||||
// handle arguments
|
||||
switch (typeof(template)) {
|
||||
case 'object':
|
||||
this.extend(opt, template);
|
||||
if (!opt.data) opt.data = data;
|
||||
break;
|
||||
case 'string':
|
||||
opt.template = template;
|
||||
opt.data = data;
|
||||
break;
|
||||
default:
|
||||
throw 'error';
|
||||
}
|
||||
opt.data = JSON.toXML(opt.data);
|
||||
tmpltXpath = '//xsl:template[@name="'+ opt.template +'"]';
|
||||
|
||||
if (!this.xsl_template) this.gatherTemplates();
|
||||
|
||||
if (opt.sorter) {
|
||||
sorter = this.node.selectSingleNode(this.xsl_template, tmpltXpath +'//xsl:for-each//xsl:sort');
|
||||
if (sorter) {
|
||||
if (opt.sorter.order) sorter.setAttribute('order', opt.sorter.order);
|
||||
if (opt.sorter.select) sorter.setAttribute('select', opt.sorter.select);
|
||||
sorter.setAttribute('data-type', opt.sorter.type || 'text');
|
||||
}
|
||||
}
|
||||
|
||||
temp = this.node.selectSingleNode(this.xsl_template, tmpltXpath);
|
||||
temp.setAttribute('match', opt.match);
|
||||
processor.importStylesheet(this.xsl_template);
|
||||
span.appendChild(processor.transformToFragment(opt.data, document));
|
||||
temp.removeAttribute('match');
|
||||
|
||||
if (this.is_safari) {
|
||||
scripts = span.getElementsByTagName('script');
|
||||
for (var i=0, il=scripts.length; i<il; i++) scripts[i].defer = true;
|
||||
}
|
||||
return span.innerHTML;
|
||||
},
|
||||
gatherTemplates: function() {
|
||||
var scripts = document.getElementsByTagName('script'),
|
||||
str = '',
|
||||
i = 0,
|
||||
il = scripts.length;
|
||||
for (; i<il; i++) {
|
||||
if (scripts[i].type === 'defiant/xsl-template') str += scripts[i].innerHTML;
|
||||
}
|
||||
this.xsl_template = this.xmlFromString('<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:xlink="http://www.w3.org/1999/xlink" '+ this.namespace +'>'+ str.replace(/defiant:(\w+)/g, '$1') +'</xsl:stylesheet>');
|
||||
},
|
||||
getSnapshot: function(data, callback) {
|
||||
return JSON.toXML(data, callback || true);
|
||||
},
|
||||
xmlFromString: function(str) {
|
||||
var parser,
|
||||
doc;
|
||||
str = str.replace(/>\s{1,}</g, '><');
|
||||
if (str.trim().match(/<\?xml/) === null) {
|
||||
str = this.xml_decl + str;
|
||||
}
|
||||
if ( 'ActiveXObject' in window ) {
|
||||
doc = new ActiveXObject('Msxml2.DOMDocument');
|
||||
doc.loadXML(str);
|
||||
doc.setProperty('SelectionNamespaces', this.namespace);
|
||||
if (str.indexOf('xsl:stylesheet') === -1) {
|
||||
doc.setProperty('SelectionLanguage', 'XPath');
|
||||
}
|
||||
} else {
|
||||
parser = new DOMParser();
|
||||
doc = parser.parseFromString(str, 'text/xml');
|
||||
}
|
||||
return doc;
|
||||
},
|
||||
extend: function(src, dest) {
|
||||
for (var content in dest) {
|
||||
if (!src[content] || typeof(dest[content]) !== 'object') {
|
||||
src[content] = dest[content];
|
||||
} else {
|
||||
this.extend(src[content], dest[content]);
|
||||
}
|
||||
}
|
||||
return src;
|
||||
},
|
||||
node: {}
|
||||
};
|
||||
|
||||
// Export
|
||||
window.Defiant = module.exports = Defiant;
|
||||
|
||||
})(
|
||||
typeof window !== 'undefined' ? window : {},
|
||||
typeof module !== 'undefined' ? module : {}
|
||||
);
|
||||
|
||||
|
||||
if (typeof(XSLTProcessor) === 'undefined') {
|
||||
|
||||
// emulating XSLT Processor (enough to be used in defiant)
|
||||
var XSLTProcessor = function() {};
|
||||
XSLTProcessor.prototype = {
|
||||
importStylesheet: function(xsldoc) {
|
||||
this.xsldoc = xsldoc;
|
||||
},
|
||||
transformToFragment: function(data, doc) {
|
||||
var str = data.transformNode(this.xsldoc),
|
||||
span = document.createElement('span');
|
||||
span.innerHTML = str;
|
||||
return span;
|
||||
}
|
||||
};
|
||||
|
||||
} else if (typeof(XSLTProcessor) !== 'function' && !XSLTProcessor) {
|
||||
|
||||
// throw error
|
||||
throw 'XSLTProcessor transformNode not implemented';
|
||||
|
||||
}
|
||||
|
||||
|
||||
// extending STRING
|
||||
if (!String.prototype.fill) {
|
||||
String.prototype.fill = function(i,c) {
|
||||
var str = this;
|
||||
c = c || ' ';
|
||||
for (; str.length<i; str+=c){}
|
||||
return str;
|
||||
};
|
||||
}
|
||||
|
||||
if (!String.prototype.trim) {
|
||||
String.prototype.trim = function () {
|
||||
return this.replace(/^\s+|\s+$/gm, '');
|
||||
};
|
||||
}
|
||||
|
||||
if (!String.prototype.xTransform) {
|
||||
String.prototype.xTransform = function () {
|
||||
var str = this;
|
||||
if (this.indexOf('translate(') === -1) {
|
||||
str = this.replace(/contains\(([^,]+),([^\\)]+)\)/g, function(c,h,n) {
|
||||
var a = 'abcdefghijklmnopqrstuvwxyz';
|
||||
return "contains(translate("+ h +", \""+ a.toUpperCase() +"\", \""+ a +"\"),"+ n.toLowerCase() +")";
|
||||
});
|
||||
}
|
||||
return str.toString();
|
||||
};
|
||||
}
|
||||
|
||||
/* jshint ignore:start */
|
||||
if (typeof(JSON) === 'undefined') {
|
||||
window.JSON = {
|
||||
parse: function (sJSON) { return eval("(" + sJSON + ")"); },
|
||||
stringify: function (vContent) {
|
||||
if (vContent instanceof Object) {
|
||||
var sOutput = "";
|
||||
if (vContent.constructor === Array) {
|
||||
for (var nId = 0; nId < vContent.length; sOutput += this.stringify(vContent[nId]) + ",", nId++);
|
||||
return "[" + sOutput.substr(0, sOutput.length - 1) + "]";
|
||||
}
|
||||
if (vContent.toString !== Object.prototype.toString) {
|
||||
return "\"" + vContent.toString().replace(/"/g, "\\$&") + "\"";
|
||||
}
|
||||
for (var sProp in vContent) {
|
||||
sOutput += "\"" + sProp.replace(/"/g, "\\$&") + "\":" + this.stringify(vContent[sProp]) + ",";
|
||||
}
|
||||
return "{" + sOutput.substr(0, sOutput.length - 1) + "}";
|
||||
}
|
||||
return typeof vContent === "string" ? "\"" + vContent.replace(/"/g, "\\$&") + "\"" : String(vContent);
|
||||
}
|
||||
};
|
||||
}
|
||||
/* jshint ignore:end */
|
||||
|
||||
if (!JSON.toXML) {
|
||||
JSON.toXML = function(tree, callback) {
|
||||
'use strict';
|
||||
|
||||
var interpreter = {
|
||||
map : [],
|
||||
rx_validate_name : /^(?!xml)[a-z_][\w\d.:]*$/i,
|
||||
rx_node : /<(.+?)( .*?)>/,
|
||||
rx_constructor : /<(.+?)( d:contr=".*?")>/,
|
||||
rx_namespace : / xmlns\:d="defiant\-namespace"/,
|
||||
rx_data : /(<.+?>)(.*?)(<\/d:data>)/i,
|
||||
rx_function : /function (\w+)/i,
|
||||
namespace : 'xmlns:d="defiant-namespace"',
|
||||
to_xml_str: function(tree) {
|
||||
return {
|
||||
str: this.hash_to_xml(null, tree),
|
||||
map: this.map
|
||||
};
|
||||
},
|
||||
hash_to_xml: function(name, tree, array_child) {
|
||||
var is_array = tree.constructor === Array,
|
||||
self = this,
|
||||
elem = [],
|
||||
attr = [],
|
||||
key,
|
||||
val,
|
||||
val_is_array,
|
||||
type,
|
||||
is_attr,
|
||||
cname,
|
||||
constr,
|
||||
cnName,
|
||||
i,
|
||||
il,
|
||||
fn = function(key, tree) {
|
||||
val = tree[key];
|
||||
if (val === null || val === undefined || val.toString() === 'NaN') val = null;
|
||||
|
||||
is_attr = key.slice(0,1) === '@';
|
||||
cname = array_child ? name : key;
|
||||
if (cname == +cname && tree.constructor !== Object) cname = 'd:item';
|
||||
if (val === null) {
|
||||
constr = null;
|
||||
cnName = false;
|
||||
} else {
|
||||
constr = val.constructor;
|
||||
cnName = constr.toString().match(self.rx_function)[1];
|
||||
}
|
||||
|
||||
if (is_attr) {
|
||||
attr.push( cname.slice(1) +'="'+ self.escape_xml(val) +'"' );
|
||||
if (cnName !== 'String') attr.push( 'd:'+ cname.slice(1) +'="'+ cnName +'"' );
|
||||
} else if (val === null) {
|
||||
elem.push( self.scalar_to_xml( cname, val ) );
|
||||
} else {
|
||||
switch (constr) {
|
||||
case Function:
|
||||
// if constructor is function, then it's not a JSON structure
|
||||
throw 'JSON data should not contain functions. Please check your structure.';
|
||||
/* falls through */
|
||||
case Object:
|
||||
elem.push( self.hash_to_xml( cname, val ) );
|
||||
break;
|
||||
case Array:
|
||||
if (key === cname) {
|
||||
val_is_array = val.constructor === Array;
|
||||
if (val_is_array) {
|
||||
i = val.length;
|
||||
while (i--) {
|
||||
if (val[i] === null || !val[i] || val[i].constructor === Array) val_is_array = true;
|
||||
if (!val_is_array && val[i].constructor === Object) val_is_array = true;
|
||||
}
|
||||
}
|
||||
elem.push( self.scalar_to_xml( cname, val, val_is_array ) );
|
||||
break;
|
||||
}
|
||||
/* falls through */
|
||||
case String:
|
||||
if (typeof(val) === 'string') {
|
||||
val = val.toString().replace(/\&/g, '&')
|
||||
.replace(/\r|\n/g, ' ');
|
||||
}
|
||||
if (cname === '#text') {
|
||||
// prepare map
|
||||
self.map.push(tree);
|
||||
attr.push('d:mi="'+ self.map.length +'"');
|
||||
attr.push('d:constr="'+ cnName +'"');
|
||||
elem.push( self.escape_xml(val) );
|
||||
break;
|
||||
}
|
||||
/* falls through */
|
||||
case Number:
|
||||
case Boolean:
|
||||
if (cname === '#text' && cnName !== 'String') {
|
||||
// prepare map
|
||||
self.map.push(tree);
|
||||
attr.push('d:mi="'+ self.map.length +'"');
|
||||
attr.push('d:constr="'+ cnName +'"');
|
||||
elem.push( self.escape_xml(val) );
|
||||
break;
|
||||
}
|
||||
elem.push( self.scalar_to_xml( cname, val ) );
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
if (tree.constructor === Array) {
|
||||
i = 0;
|
||||
il = tree.length;
|
||||
for (; i<il; i++) {
|
||||
fn(i.toString(), tree);
|
||||
}
|
||||
} else {
|
||||
for (key in tree) {
|
||||
fn(key, tree);
|
||||
}
|
||||
}
|
||||
if (!name) {
|
||||
name = 'd:data';
|
||||
attr.push(this.namespace);
|
||||
if (is_array) attr.push('d:constr="Array"');
|
||||
}
|
||||
if (name.match(this.rx_validate_name) === null) {
|
||||
attr.push( 'd:name="'+ name +'"' );
|
||||
name = 'd:name';
|
||||
}
|
||||
if (array_child) return elem.join('');
|
||||
// prepare map
|
||||
this.map.push(tree);
|
||||
attr.push('d:mi="'+ this.map.length +'"');
|
||||
|
||||
return '<'+ name + (attr.length ? ' '+ attr.join(' ') : '') + (elem.length ? '>'+ elem.join('') +'</'+ name +'>' : '/>' );
|
||||
},
|
||||
scalar_to_xml: function(name, val, override) {
|
||||
var attr = '',
|
||||
text,
|
||||
constr,
|
||||
cnName;
|
||||
|
||||
// check whether the nodename is valid
|
||||
if (name.match(this.rx_validate_name) === null) {
|
||||
attr += ' d:name="'+ name +'"';
|
||||
name = 'd:name';
|
||||
override = false;
|
||||
}
|
||||
if (val === null || val.toString() === 'NaN') val = null;
|
||||
if (val === null) return '<'+ name +' d:constr="null"/>';
|
||||
if (val.length === 1 && val.constructor === Array && !val[0]) {
|
||||
return '<'+ name +' d:constr="null" d:type="ArrayItem"/>';
|
||||
}
|
||||
if (val.length === 1 && val[0].constructor === Object) {
|
||||
|
||||
text = this.hash_to_xml(false, val[0]);
|
||||
|
||||
var a1 = text.match(this.rx_node),
|
||||
a2 = text.match(this.rx_constructor);
|
||||
a1 = (a1 !== null)? a1[2]
|
||||
.replace(this.rx_namespace, '')
|
||||
.replace(/>/, '')
|
||||
.replace(/"\/$/, '"') : '';
|
||||
a2 = (a2 !== null)? a2[2] : '';
|
||||
|
||||
text = text.match(this.rx_data);
|
||||
text = (text !== null)? text[2] : '';
|
||||
|
||||
return '<'+ name + a1 +' '+ a2 +' d:type="ArrayItem">'+ text +'</'+ name +'>';
|
||||
} else if (val.length === 0 && val.constructor === Array) {
|
||||
return '<'+ name +' d:constr="Array"/>';
|
||||
}
|
||||
// else
|
||||
if (override) {
|
||||
return this.hash_to_xml( name, val, true );
|
||||
}
|
||||
|
||||
constr = val.constructor;
|
||||
cnName = constr.toString().match(this.rx_function)[1];
|
||||
text = (constr === Array) ? this.hash_to_xml( 'd:item', val, true )
|
||||
: this.escape_xml(val);
|
||||
|
||||
attr += ' d:constr="'+ cnName +'"';
|
||||
// prepare map
|
||||
this.map.push(val);
|
||||
attr += ' d:mi="'+ this.map.length +'"';
|
||||
|
||||
return (name === '#text') ? this.escape_xml(val) : '<'+ name + attr +'>'+ text +'</'+ name +'>';
|
||||
},
|
||||
escape_xml: function(text) {
|
||||
return String(text) .replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/ /g, ' ');
|
||||
}
|
||||
},
|
||||
processed,
|
||||
doc,
|
||||
task;
|
||||
// depending on request
|
||||
switch (typeof callback) {
|
||||
case 'function':
|
||||
// compile interpreter with 'x10.js'
|
||||
task = x10.compile(interpreter);
|
||||
|
||||
// parse in a dedicated thread
|
||||
task.to_xml_str(tree, function(processed) {
|
||||
// snapshot distinctly improves performance
|
||||
callback({
|
||||
doc: Defiant.xmlFromString(processed.str),
|
||||
src: tree,
|
||||
map: processed.map
|
||||
});
|
||||
});
|
||||
return;
|
||||
case 'boolean':
|
||||
processed = interpreter.to_xml_str.call(interpreter, tree);
|
||||
// return snapshot
|
||||
return {
|
||||
doc: Defiant.xmlFromString(processed.str),
|
||||
src: tree,
|
||||
map: processed.map
|
||||
};
|
||||
default:
|
||||
processed = interpreter.to_xml_str.call(interpreter, tree);
|
||||
doc = Defiant.xmlFromString(processed.str);
|
||||
|
||||
this.search.map = processed.map;
|
||||
return doc;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
if (!JSON.search) {
|
||||
JSON.search = function(tree, xpath, single) {
|
||||
'use strict';
|
||||
|
||||
var isSnapshot = tree.doc && tree.doc.nodeType,
|
||||
doc = isSnapshot ? tree.doc : JSON.toXML(tree),
|
||||
map = isSnapshot ? tree.map : this.search.map,
|
||||
src = isSnapshot ? tree.src : tree,
|
||||
xres = Defiant.node[ single ? 'selectSingleNode' : 'selectNodes' ](doc, xpath.xTransform()),
|
||||
ret = [],
|
||||
mapIndex,
|
||||
i;
|
||||
|
||||
if (single) xres = [xres];
|
||||
i = xres.length;
|
||||
|
||||
while (i--) {
|
||||
switch(xres[i].nodeType) {
|
||||
case 2:
|
||||
case 3:
|
||||
ret.unshift( xres[i].nodeValue );
|
||||
break;
|
||||
default:
|
||||
mapIndex = +xres[i].getAttribute('d:mi');
|
||||
//if (map[mapIndex-1] !== false) {
|
||||
ret.unshift( map[mapIndex-1] );
|
||||
//}
|
||||
}
|
||||
}
|
||||
|
||||
// if environment = development, add search tracing
|
||||
if (Defiant.env === 'development') {
|
||||
this.trace = JSON.mtrace(src, ret, xres);
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
||||
}
|
||||
|
||||
if (!JSON.mtrace) {
|
||||
JSON.mtrace = function(root, hits, xres) {
|
||||
'use strict';
|
||||
|
||||
var win = window,
|
||||
stringify = JSON.stringify,
|
||||
sroot = stringify( root, null, '\t' ).replace(/\t/g, ''),
|
||||
trace = [],
|
||||
i = 0,
|
||||
il = xres.length,
|
||||
od = il ? xres[i].ownerDocument.documentElement : false,
|
||||
map = this.search.map,
|
||||
hstr,
|
||||
cConstr,
|
||||
fIndex = 0,
|
||||
mIndex,
|
||||
lStart,
|
||||
lEnd;
|
||||
|
||||
for (; i<il; i++) {
|
||||
switch (xres[i].nodeType) {
|
||||
case 2:
|
||||
cConstr = xres[i].ownerElement ? xres[i].ownerElement.getAttribute('d:'+ xres[i].nodeName) : 'String';
|
||||
hstr = '"@'+ xres[i].nodeName +'": '+ win[ cConstr ]( hits[i] );
|
||||
mIndex = sroot.indexOf(hstr);
|
||||
lEnd = 0;
|
||||
break;
|
||||
case 3:
|
||||
cConstr = xres[i].parentNode.getAttribute('d:constr');
|
||||
hstr = win[ cConstr ]( hits[i] );
|
||||
hstr = '"'+ xres[i].parentNode.nodeName +'": '+ (hstr === 'Number' ? hstr : '"'+ hstr +'"');
|
||||
mIndex = sroot.indexOf(hstr);
|
||||
lEnd = 0;
|
||||
break;
|
||||
default:
|
||||
if (xres[i] === od) continue;
|
||||
if (xres[i].getAttribute('d:constr') === 'String' || xres[i].getAttribute('d:constr') === 'Number') {
|
||||
cConstr = xres[i].getAttribute('d:constr');
|
||||
hstr = win[ cConstr ]( hits[i] );
|
||||
mIndex = sroot.indexOf(hstr, fIndex);
|
||||
hstr = '"'+ xres[i].nodeName +'": '+ (cConstr === 'Number' ? hstr : '"'+ hstr +'"');
|
||||
lEnd = 0;
|
||||
fIndex = mIndex + 1;
|
||||
} else {
|
||||
hstr = stringify( hits[i], null, '\t' ).replace(/\t/g, '');
|
||||
mIndex = sroot.indexOf(hstr);
|
||||
lEnd = hstr.match(/\n/g).length;
|
||||
}
|
||||
}
|
||||
lStart = sroot.substring(0,mIndex).match(/\n/g).length+1;
|
||||
trace.push([lStart, lEnd]);
|
||||
}
|
||||
|
||||
return trace;
|
||||
};
|
||||
}
|
||||
|
||||
Defiant.node.selectNodes = function(XNode, XPath) {
|
||||
if (XNode.evaluate) {
|
||||
var ns = XNode.createNSResolver(XNode.documentElement),
|
||||
qI = XNode.evaluate(XPath, XNode, ns, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null),
|
||||
res = [],
|
||||
i = 0,
|
||||
il = qI.snapshotLength;
|
||||
for (; i<il; i++) {
|
||||
res.push( qI.snapshotItem(i) );
|
||||
}
|
||||
return res;
|
||||
} else {
|
||||
return XNode.selectNodes(XPath);
|
||||
}
|
||||
};
|
||||
Defiant.node.selectSingleNode = function(XNode, XPath) {
|
||||
if (XNode.evaluate) {
|
||||
var xI = this.selectNodes(XNode, XPath);
|
||||
return (xI.length > 0)? xI[0] : null;
|
||||
} else {
|
||||
return XNode.selectSingleNode(XPath);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
Defiant.node.prettyPrint = function(node) {
|
||||
var root = Defiant,
|
||||
tabs = root.tabsize,
|
||||
decl = root.xml_decl.toLowerCase(),
|
||||
ser,
|
||||
xstr;
|
||||
if (root.is_ie) {
|
||||
xstr = node.xml;
|
||||
} else {
|
||||
ser = new XMLSerializer();
|
||||
xstr = ser.serializeToString(node);
|
||||
}
|
||||
if (root.env !== 'development') {
|
||||
// if environment is not development, remove defiant related info
|
||||
xstr = xstr.replace(/ \w+\:d=".*?"| d\:\w+=".*?"/g, '');
|
||||
}
|
||||
var str = xstr.trim().replace(/(>)\s*(<)(\/*)/g, '$1\n$2$3'),
|
||||
lines = str.split('\n'),
|
||||
indent = -1,
|
||||
i = 0,
|
||||
il = lines.length,
|
||||
start,
|
||||
end;
|
||||
for (; i<il; i++) {
|
||||
if (i === 0 && lines[i].toLowerCase() === decl) continue;
|
||||
start = lines[i].match(/<[A-Za-z_\:]+.*?>/g) !== null;
|
||||
//start = lines[i].match(/<[^\/]+>/g) !== null;
|
||||
end = lines[i].match(/<\/[\w\:]+>/g) !== null;
|
||||
if (lines[i].match(/<.*?\/>/g) !== null) start = end = true;
|
||||
if (start) indent++;
|
||||
lines[i] = String().fill(indent, '\t') + lines[i];
|
||||
if (start && end) indent--;
|
||||
if (!start && end) indent--;
|
||||
}
|
||||
return lines.join('\n').replace(/\t/g, String().fill(tabs, ' '));
|
||||
};
|
||||
|
||||
|
||||
Defiant.node.toJSON = function(xnode, stringify) {
|
||||
'use strict';
|
||||
|
||||
var interpret = function(leaf) {
|
||||
var obj = {},
|
||||
win = window,
|
||||
attr,
|
||||
type,
|
||||
item,
|
||||
cname,
|
||||
cConstr,
|
||||
cval,
|
||||
text,
|
||||
i, il, a;
|
||||
|
||||
switch (leaf.nodeType) {
|
||||
case 1:
|
||||
cConstr = leaf.getAttribute('d:constr');
|
||||
if (cConstr === 'Array') obj = [];
|
||||
else if (cConstr === 'String' && leaf.textContent === '') obj = '';
|
||||
|
||||
attr = leaf.attributes;
|
||||
i = 0;
|
||||
il = attr.length;
|
||||
for (; i<il; i++) {
|
||||
a = attr.item(i);
|
||||
if (a.nodeName.match(/\:d|d\:/g) !== null) continue;
|
||||
|
||||
cConstr = leaf.getAttribute('d:'+ a.nodeName);
|
||||
if (cConstr && cConstr !== 'undefined') {
|
||||
if (a.nodeValue === 'null') cval = null;
|
||||
else cval = win[ cConstr ]( (a.nodeValue === 'false') ? '' : a.nodeValue );
|
||||
} else {
|
||||
cval = a.nodeValue;
|
||||
}
|
||||
obj['@'+ a.nodeName] = cval;
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
type = leaf.parentNode.getAttribute('d:type');
|
||||
cval = (type) ? win[ type ]( leaf.nodeValue === 'false' ? '' : leaf.nodeValue ) : leaf.nodeValue;
|
||||
obj = cval;
|
||||
break;
|
||||
}
|
||||
if (leaf.hasChildNodes()) {
|
||||
i = 0;
|
||||
il = leaf.childNodes.length;
|
||||
for(; i<il; i++) {
|
||||
item = leaf.childNodes.item(i);
|
||||
cname = item.nodeName;
|
||||
attr = leaf.attributes;
|
||||
|
||||
if (cname === 'd:name') {
|
||||
cname = item.getAttribute('d:name');
|
||||
}
|
||||
if (cname === '#text') {
|
||||
cConstr = leaf.getAttribute('d:constr');
|
||||
if (cConstr === 'undefined') cConstr = undefined;
|
||||
text = item.textContent || item.text;
|
||||
cval = cConstr === 'Boolean' && text === 'false' ? '' : text;
|
||||
|
||||
if (!cConstr && !attr.length) obj = cval;
|
||||
else if (cConstr && il === 1) {
|
||||
obj = win[cConstr](cval);
|
||||
} else if (!leaf.hasChildNodes()) {
|
||||
obj[cname] = (cConstr)? win[cConstr](cval) : cval;
|
||||
} else {
|
||||
if (attr.length < 3) obj = (cConstr)? win[cConstr](cval) : cval;
|
||||
else obj[cname] = (cConstr)? win[cConstr](cval) : cval;
|
||||
}
|
||||
} else {
|
||||
if (item.getAttribute('d:constr') === 'null') {
|
||||
if (obj[cname] && obj[cname].push) obj[cname].push(null);
|
||||
else if (item.getAttribute('d:type') === 'ArrayItem') obj[cname] = [obj[cname]];
|
||||
else obj[cname] = null;
|
||||
continue;
|
||||
}
|
||||
if (obj[cname]) {
|
||||
if (obj[cname].push) obj[cname].push(interpret(item));
|
||||
else obj[cname] = [obj[cname], interpret(item)];
|
||||
continue;
|
||||
}
|
||||
cConstr = item.getAttribute('d:constr');
|
||||
switch (cConstr) {
|
||||
case 'null':
|
||||
if (obj.push) obj.push(null);
|
||||
else obj[cname] = null;
|
||||
break;
|
||||
case 'Array':
|
||||
//console.log( Defiant.node.prettyPrint(item) );
|
||||
if (item.parentNode.firstChild === item && cConstr === 'Array' && cname !== 'd:item') {
|
||||
if (cname === 'd:item' || cConstr === 'Array') {
|
||||
cval = interpret(item);
|
||||
obj[cname] = cval.length ? [cval] : cval;
|
||||
} else {
|
||||
obj[cname] = interpret(item);
|
||||
}
|
||||
}
|
||||
else if (obj.push) obj.push( interpret(item) );
|
||||
else obj[cname] = interpret(item);
|
||||
break;
|
||||
case 'String':
|
||||
case 'Number':
|
||||
case 'Boolean':
|
||||
text = item.textContent || item.text;
|
||||
cval = cConstr === 'Boolean' && text === 'false' ? '' : text;
|
||||
|
||||
if (obj.push) obj.push( win[cConstr](cval) );
|
||||
else obj[cname] = interpret(item);
|
||||
break;
|
||||
default:
|
||||
if (obj.push) obj.push( interpret( item ) );
|
||||
else obj[cname] = interpret( item );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (leaf.nodeType === 1 && leaf.getAttribute('d:type') === 'ArrayItem') {
|
||||
obj = [obj];
|
||||
}
|
||||
return obj;
|
||||
},
|
||||
node = (xnode.nodeType === 9) ? xnode.documentElement : xnode,
|
||||
ret = interpret(node),
|
||||
rn = ret[node.nodeName];
|
||||
|
||||
// exclude root, if "this" is root node
|
||||
if (node === node.ownerDocument.documentElement && rn && rn.constructor === Array) {
|
||||
ret = rn;
|
||||
}
|
||||
if (stringify && stringify.toString() === 'true') stringify = '\t';
|
||||
return stringify ? JSON.stringify(ret, null, stringify) : ret;
|
||||
};
|
||||
|
||||
|
||||
// check if jQuery is present
|
||||
if (typeof(jQuery) !== 'undefined') {
|
||||
(function ( $ ) {
|
||||
'use strict';
|
||||
|
||||
$.fn.defiant = function(template, xpath) {
|
||||
this.html( Defiant.render(template, xpath) );
|
||||
return this;
|
||||
};
|
||||
|
||||
}(jQuery));
|
||||
}
|
||||
@ -0,0 +1,402 @@
|
||||
/* exported buildAnnotationSaver */
|
||||
|
||||
/* global
|
||||
showOverlay:false
|
||||
showMessage:false
|
||||
Listener:false
|
||||
Logger:false
|
||||
Mousetrap:false
|
||||
*/
|
||||
|
||||
|
||||
class AnnotationSaverModel extends Listener {
|
||||
constructor(initialData, shapeCollection) {
|
||||
super('onAnnotationSaverUpdate', () => this._state);
|
||||
|
||||
this._state = {
|
||||
status: null,
|
||||
message: null,
|
||||
};
|
||||
|
||||
this._version = initialData.version;
|
||||
this._shapeCollection = shapeCollection;
|
||||
this._initialObjects = [];
|
||||
|
||||
this._hash = this._getHash();
|
||||
|
||||
// We need use data from export instead of initialData
|
||||
// Otherwise we have differ keys order and JSON comparison code incorrect
|
||||
const data = this._shapeCollection.export()[0];
|
||||
for (const shape of data.shapes) {
|
||||
this._initialObjects[shape.id] = shape;
|
||||
}
|
||||
|
||||
for (const track of data.tracks) {
|
||||
this._initialObjects[track.id] = track;
|
||||
}
|
||||
}
|
||||
|
||||
async _request(data, action) {
|
||||
return new Promise((resolve, reject) => {
|
||||
$.ajax({
|
||||
url: `/api/v1/jobs/${window.cvat.job.id}/annotations?action=${action}`,
|
||||
type: 'PATCH',
|
||||
data: JSON.stringify(data),
|
||||
contentType: 'application/json',
|
||||
}).done((savedData) => {
|
||||
resolve(savedData);
|
||||
}).fail((errorData) => {
|
||||
const message = `Could not make ${action} annotations. Code: ${errorData.status}. `
|
||||
+ `Message: ${errorData.responseText || errorData.statusText}`;
|
||||
reject(new Error(message));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async _put(data) {
|
||||
return new Promise((resolve, reject) => {
|
||||
$.ajax({
|
||||
url: `/api/v1/jobs/${window.cvat.job.id}/annotations`,
|
||||
type: 'PUT',
|
||||
data: JSON.stringify(data),
|
||||
contentType: 'application/json',
|
||||
}).done((savedData) => {
|
||||
resolve(savedData);
|
||||
}).fail((errorData) => {
|
||||
const message = `Could not put annotations. Code: ${errorData.status}. `
|
||||
+ `Message: ${errorData.responseText || errorData.statusText}`;
|
||||
reject(new Error(message));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async _create(created) {
|
||||
return this._request(created, 'create');
|
||||
}
|
||||
|
||||
async _update(updated) {
|
||||
return this._request(updated, 'update');
|
||||
}
|
||||
|
||||
async _delete(deleted) {
|
||||
return this._request(deleted, 'delete');
|
||||
}
|
||||
|
||||
async _logs() {
|
||||
Logger.addEvent(Logger.EventType.saveJob);
|
||||
const totalStat = this._shapeCollection.collectStatistic()[1];
|
||||
Logger.addEvent(Logger.EventType.sendTaskInfo, {
|
||||
'track count': totalStat.boxes.annotation + totalStat.boxes.interpolation
|
||||
+ totalStat.polygons.annotation + totalStat.polygons.interpolation
|
||||
+ totalStat.polylines.annotation + totalStat.polylines.interpolation
|
||||
+ totalStat.points.annotation + totalStat.points.interpolation,
|
||||
'frame count': window.cvat.player.frames.stop - window.cvat.player.frames.start + 1,
|
||||
'object count': totalStat.total,
|
||||
'box count': totalStat.boxes.annotation + totalStat.boxes.interpolation,
|
||||
'polygon count': totalStat.polygons.annotation + totalStat.polygons.interpolation,
|
||||
'polyline count': totalStat.polylines.annotation + totalStat.polylines.interpolation,
|
||||
'points count': totalStat.points.annotation + totalStat.points.interpolation,
|
||||
});
|
||||
|
||||
const annotationLogs = Logger.getLogs();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
$.ajax({
|
||||
url: '/api/v1/server/logs',
|
||||
type: 'POST',
|
||||
data: JSON.stringify(annotationLogs.export()),
|
||||
contentType: 'application/json',
|
||||
}).done(() => {
|
||||
resolve();
|
||||
}).fail((errorData) => {
|
||||
annotationLogs.save();
|
||||
const message = `Could not send logs. Code: ${errorData.status}. `
|
||||
+ `Message: ${errorData.responseText || errorData.statusText}`;
|
||||
reject(new Error(message));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_split(exported) {
|
||||
const exportedIDs = Array.from(exported.shapes, shape => +shape.id)
|
||||
.concat(Array.from(exported.tracks, track => +track.id));
|
||||
|
||||
const created = {
|
||||
version: this._version,
|
||||
shapes: [],
|
||||
tracks: [],
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const updated = {
|
||||
version: this._version + 1,
|
||||
shapes: [],
|
||||
tracks: [],
|
||||
tags: [],
|
||||
};
|
||||
|
||||
const deleted = {
|
||||
version: this._version + 2,
|
||||
shapes: [],
|
||||
tracks: [],
|
||||
tags: [],
|
||||
};
|
||||
|
||||
// Compare initial state objects and export state objects
|
||||
// in order to get updated and created objects
|
||||
for (const obj of exported.shapes.concat(exported.tracks)) {
|
||||
if (obj.id in this._initialObjects) {
|
||||
const exportedHash = JSON.stringify(obj);
|
||||
const initialSash = JSON.stringify(this._initialObjects[obj.id]);
|
||||
if (exportedHash !== initialSash) {
|
||||
const target = 'shapes' in obj ? updated.tracks : updated.shapes;
|
||||
target.push(obj);
|
||||
}
|
||||
} else if (typeof obj.id === 'undefined') {
|
||||
const target = 'shapes' in obj ? created.tracks : created.shapes;
|
||||
target.push(obj);
|
||||
} else {
|
||||
throw Error(`Bad object ID found: ${obj.id}. `
|
||||
+ 'It is not contained in initial state and have server ID');
|
||||
}
|
||||
}
|
||||
|
||||
// Compare initial state indexes and export state indexes
|
||||
// in order to get removed objects
|
||||
for (const shapeID in this._initialObjects) {
|
||||
if (!exportedIDs.includes(+shapeID)) {
|
||||
const initialShape = this._initialObjects[shapeID];
|
||||
const target = 'shapes' in initialShape ? deleted.tracks : deleted.shapes;
|
||||
target.push(initialShape);
|
||||
}
|
||||
}
|
||||
|
||||
return [created, updated, deleted];
|
||||
}
|
||||
|
||||
_getHash() {
|
||||
const exported = this._shapeCollection.export()[0];
|
||||
return JSON.stringify(exported);
|
||||
}
|
||||
|
||||
_updateCreatedObjects(objectsToSave, savedObjects, mapping) {
|
||||
// Method setups IDs of created objects after saving on a server
|
||||
const allSavedObjects = savedObjects.shapes.concat(savedObjects.tracks);
|
||||
const allObjectsToSave = objectsToSave.shapes.concat(objectsToSave.tracks);
|
||||
if (allSavedObjects.length !== allObjectsToSave.length) {
|
||||
throw Error('Number of saved objects and objects to save is not match');
|
||||
}
|
||||
|
||||
for (let idx = 0; idx < allSavedObjects.length; idx += 1) {
|
||||
const objectModel = mapping.filter(el => el[0] === allObjectsToSave[idx])[0][1];
|
||||
const { id } = allSavedObjects[idx];
|
||||
objectModel.serverID = id;
|
||||
allObjectsToSave[idx].id = id;
|
||||
}
|
||||
|
||||
this._shapeCollection.update();
|
||||
}
|
||||
|
||||
notify(status, message = null) {
|
||||
this._state.status = status;
|
||||
this._state.message = message;
|
||||
Listener.prototype.notify.call(this);
|
||||
}
|
||||
|
||||
hasUnsavedChanges() {
|
||||
return this._getHash() !== this._hash;
|
||||
}
|
||||
|
||||
async save() {
|
||||
this.notify('saveStart');
|
||||
try {
|
||||
const [exported, mapping] = this._shapeCollection.export();
|
||||
const { flush } = this._shapeCollection;
|
||||
if (flush) {
|
||||
const data = Object.assign({}, exported, {
|
||||
version: this._version,
|
||||
tags: [],
|
||||
});
|
||||
|
||||
this._version += 1;
|
||||
|
||||
this.notify('saveCreated');
|
||||
const savedObjects = await this._put(data);
|
||||
this._updateCreatedObjects(exported, savedObjects, mapping);
|
||||
this._shapeCollection.flush = false;
|
||||
this._version = savedObjects.version;
|
||||
for (const object of savedObjects.shapes.concat(savedObjects.tracks)) {
|
||||
this._initialObjects[object.id] = object;
|
||||
}
|
||||
|
||||
this._version = savedObjects.version;
|
||||
} else {
|
||||
const [created, updated, deleted] = this._split(exported);
|
||||
this.notify('saveCreated');
|
||||
const savedCreated = await this._create(created);
|
||||
this._updateCreatedObjects(created, savedCreated, mapping);
|
||||
this._version = savedCreated.version;
|
||||
for (const object of created.shapes.concat(created.tracks)) {
|
||||
this._initialObjects[object.id] = object;
|
||||
}
|
||||
|
||||
this.notify('saveUpdated');
|
||||
const savedUpdated = await this._update(updated);
|
||||
this._version = savedUpdated.version;
|
||||
for (const object of updated.shapes.concat(updated.tracks)) {
|
||||
if (object.id in this._initialObjects) {
|
||||
this._initialObjects[object.id] = object;
|
||||
}
|
||||
}
|
||||
|
||||
this.notify('saveDeleted');
|
||||
const savedDeleted = await this._delete(deleted);
|
||||
this._version = savedDeleted.version;
|
||||
for (const object of savedDeleted.shapes.concat(savedDeleted.tracks)) {
|
||||
if (object.id in this._initialObjects) {
|
||||
delete this._initialObjects[object.id];
|
||||
}
|
||||
}
|
||||
|
||||
this._version = savedDeleted.version;
|
||||
}
|
||||
|
||||
await this._logs();
|
||||
} catch (error) {
|
||||
this.notify('saveUnlocked');
|
||||
this.notify('saveError', error.message);
|
||||
this._state = {
|
||||
status: null,
|
||||
message: null,
|
||||
};
|
||||
throw Error(error);
|
||||
}
|
||||
|
||||
this._hash = this._getHash();
|
||||
this.notify('saveDone');
|
||||
|
||||
setTimeout(() => {
|
||||
this.notify('saveUnlocked');
|
||||
this._state = {
|
||||
status: null,
|
||||
message: null,
|
||||
};
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
get state() {
|
||||
return JSON.parse(JSON.stringify(this._state));
|
||||
}
|
||||
}
|
||||
|
||||
class AnnotationSaverController {
|
||||
constructor(model) {
|
||||
this._model = model;
|
||||
this._autoSaveInterval = null;
|
||||
|
||||
const { shortkeys } = window.cvat.config;
|
||||
Mousetrap.bind(shortkeys.save_work.value, () => {
|
||||
this.save();
|
||||
return false;
|
||||
}, 'keydown');
|
||||
}
|
||||
|
||||
autoSave(enabled, time) {
|
||||
if (this._autoSaveInterval) {
|
||||
clearInterval(this._autoSaveInterval);
|
||||
this._autoSaveInterval = null;
|
||||
}
|
||||
|
||||
if (enabled) {
|
||||
this._autoSaveInterval = setInterval(() => {
|
||||
this.save();
|
||||
}, time * 1000 * 60);
|
||||
}
|
||||
}
|
||||
|
||||
hasUnsavedChanges() {
|
||||
return this._model.hasUnsavedChanges();
|
||||
}
|
||||
|
||||
save() {
|
||||
if (this._model.state.status === null) {
|
||||
this._model.save().catch((error) => {
|
||||
setTimeout(() => {
|
||||
throw error;
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnnotationSaverView {
|
||||
constructor(model, controller) {
|
||||
model.subscribe(this);
|
||||
|
||||
this._controller = controller;
|
||||
this._overlay = null;
|
||||
|
||||
const { shortkeys } = window.cvat.config;
|
||||
const saveHelp = `${shortkeys.save_work.view_value} - ${shortkeys.save_work.description}`;
|
||||
|
||||
this._saveButton = $('#saveButton').on('click', () => {
|
||||
this._controller.save();
|
||||
}).attr('title', saveHelp);
|
||||
|
||||
this._autoSaveBox = $('#autoSaveBox').on('change', (e) => {
|
||||
const enabled = e.target.checked;
|
||||
const time = +this._autoSaveTime.prop('value');
|
||||
this._controller.autoSave(enabled, time);
|
||||
});
|
||||
|
||||
this._autoSaveTime = $('#autoSaveTime').on('change', (e) => {
|
||||
e.target.value = Math.clamp(+e.target.value, +e.target.min, +e.target.max);
|
||||
this._autoSaveBox.trigger('change');
|
||||
});
|
||||
|
||||
window.onbeforeunload = (e) => {
|
||||
if (this._controller.hasUnsavedChanges()) { // eslint-disable-line react/no-this-in-sfc
|
||||
const message = 'You have unsaved changes. Leave this page?';
|
||||
e.returnValue = message;
|
||||
return message;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
||||
|
||||
onAnnotationSaverUpdate(state) {
|
||||
if (state.status === 'saveStart') {
|
||||
this._overlay = showOverlay('Annotations are being saved..');
|
||||
this._saveButton.prop('disabled', true).text('Saving..');
|
||||
} else if (state.status === 'saveDone') {
|
||||
this._saveButton.text('Successful save');
|
||||
this._overlay.remove();
|
||||
} else if (state.status === 'saveError') {
|
||||
this._saveButton.prop('disabled', false).text('Save Work');
|
||||
const message = `Couldn't to save the job. Errors occured: ${state.message}. `
|
||||
+ 'Please report the problem to support team immediately.';
|
||||
showMessage(message);
|
||||
this._overlay.remove();
|
||||
} else if (state.status === 'saveCreated') {
|
||||
this._overlay.setMessage(`${this._overlay.getMessage()} <br /> - Created objects are being saved..`);
|
||||
} else if (state.status === 'saveUpdated') {
|
||||
this._overlay.setMessage(`${this._overlay.getMessage()} <br /> - Updated objects are being saved..`);
|
||||
} else if (state.status === 'saveDeleted') {
|
||||
this._overlay.setMessage(`${this._overlay.getMessage()} <br /> - Deleted objects are being saved..`);
|
||||
} else if (state.status === 'saveUnlocked') {
|
||||
this._saveButton.prop('disabled', false).text('Save Work');
|
||||
} else {
|
||||
const message = `Unknown state has been reached during annotation saving: ${state.status} `
|
||||
+ 'Please report the problem to support team immediately.';
|
||||
showMessage(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function buildAnnotationSaver(initialData, shapeCollection) {
|
||||
const model = new AnnotationSaverModel(initialData, shapeCollection);
|
||||
const controller = new AnnotationSaverController(model);
|
||||
new AnnotationSaverView(model, controller);
|
||||
}
|
||||
@ -1,36 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2018 Intel Corporation
|
||||
*
|
||||
* SPDX-License-Identifier: MIT
|
||||
*/
|
||||
|
||||
/* exported
|
||||
IncrementIdGenerator
|
||||
ConstIdGenerator
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
class IncrementIdGenerator {
|
||||
constructor(startId=0) {
|
||||
this._startId = startId;
|
||||
}
|
||||
|
||||
next() {
|
||||
return this._startId++;
|
||||
}
|
||||
|
||||
reset(startId=0) {
|
||||
this._startId = startId;
|
||||
}
|
||||
}
|
||||
|
||||
class ConstIdGenerator {
|
||||
constructor(startId=-1) {
|
||||
this._startId = startId;
|
||||
}
|
||||
|
||||
next() {
|
||||
return this._startId;
|
||||
}
|
||||
}
|
||||
@ -1,9 +0,0 @@
|
||||
|
||||
# Copyright (C) 2018 Intel Corporation
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
||||
|
||||
@ -0,0 +1,11 @@
|
||||
/*
|
||||
* Copyright (C) 2018 Intel Corporation
|
||||
*
|
||||
* SPDX-License-Identifier: MIT
|
||||
*/
|
||||
|
||||
window.addEventListener('DOMContentLoaded', () => {
|
||||
$('<button class="regular h1" style="margin-left: 5px;"> Analytics </button>').on('click', () => {
|
||||
window.open('/analytics/app/kibana');
|
||||
}).appendTo('#dashboardManageButtons');
|
||||
});
|
||||
@ -1,15 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2018 Intel Corporation
|
||||
*
|
||||
* SPDX-License-Identifier: MIT
|
||||
*/
|
||||
|
||||
/* global
|
||||
Mousetrap:false
|
||||
*/
|
||||
|
||||
Mousetrap.bind(window.cvat.config.shortkeys["open_analytics"].value, function() {
|
||||
window.open("/analytics/app/kibana");
|
||||
|
||||
return false;
|
||||
});
|
||||
@ -0,0 +1,112 @@
|
||||
/*
|
||||
* Copyright (C) 2018 Intel Corporation
|
||||
*
|
||||
* SPDX-License-Identifier: MIT
|
||||
*/
|
||||
|
||||
/* global
|
||||
userConfirm:false
|
||||
showMessage:false
|
||||
*/
|
||||
|
||||
window.addEventListener('dashboardReady', () => {
|
||||
function checkProcess(tid, button) {
|
||||
function checkCallback() {
|
||||
$.get(`/tensorflow/annotation/check/task/${tid}`).done((statusData) => {
|
||||
if (['started', 'queued'].includes(statusData.status)) {
|
||||
const progress = Math.round(statusData.progress) || '0';
|
||||
button.text(`Cancel TF Annotation (${progress}%)`);
|
||||
setTimeout(checkCallback, 5000);
|
||||
} else {
|
||||
button.text('Run TF Annotation');
|
||||
button.removeClass('tfAnnotationProcess');
|
||||
button.prop('disabled', false);
|
||||
|
||||
if (statusData.status === 'failed') {
|
||||
const message = `Tensorflow annotation failed. Error: ${statusData.stderr}`;
|
||||
showMessage(message);
|
||||
} else if (statusData.status !== 'finished') {
|
||||
const message = `Tensorflow annotation check request returned status "${statusData.status}"`;
|
||||
showMessage(message);
|
||||
}
|
||||
}
|
||||
}).fail((errorData) => {
|
||||
const message = `Can not sent tensorflow annotation check request. Code: ${errorData.status}. `
|
||||
+ `Message: ${errorData.responseText || errorData.statusText}`;
|
||||
showMessage(message);
|
||||
});
|
||||
}
|
||||
|
||||
setTimeout(checkCallback, 5000);
|
||||
}
|
||||
|
||||
|
||||
function runProcess(tid, button) {
|
||||
$.get(`/tensorflow/annotation/create/task/${tid}`).done(() => {
|
||||
showMessage('Process has started');
|
||||
button.text('Cancel TF Annotation (0%)');
|
||||
button.addClass('tfAnnotationProcess');
|
||||
checkProcess(tid, button);
|
||||
}).fail((errorData) => {
|
||||
const message = `Can not run tf annotation. Code: ${errorData.status}. `
|
||||
+ `Message: ${errorData.responseText || errorData.statusText}`;
|
||||
showMessage(message);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function cancelProcess(tid, button) {
|
||||
$.get(`/tensorflow/annotation/cancel/task/${tid}`).done(() => {
|
||||
button.prop('disabled', true);
|
||||
}).fail((errorData) => {
|
||||
const message = `Can not cancel tf annotation. Code: ${errorData.status}. `
|
||||
+ `Message: ${errorData.responseText || errorData.statusText}`;
|
||||
showMessage(message);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function setupDashboardItem(item, metaData) {
|
||||
const tid = +item.attr('tid');
|
||||
const button = $('<button> Run TF Annotation </button>');
|
||||
|
||||
button.on('click', () => {
|
||||
if (button.hasClass('tfAnnotationProcess')) {
|
||||
userConfirm('The process will be canceled. Continue?', () => {
|
||||
cancelProcess(tid, button);
|
||||
});
|
||||
} else {
|
||||
userConfirm('The current annotation will be lost. Are you sure?', () => {
|
||||
runProcess(tid, button);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
button.addClass('dashboardTFAnnotationButton regular dashboardButtonUI');
|
||||
button.appendTo(item.find('div.dashboardButtonsUI'));
|
||||
|
||||
if ((tid in metaData) && (metaData[tid].active)) {
|
||||
button.text('Cancel TF Annotation');
|
||||
button.addClass('tfAnnotationProcess');
|
||||
checkProcess(tid, button);
|
||||
}
|
||||
}
|
||||
|
||||
const elements = $('.dashboardItem');
|
||||
const tids = Array.from(elements, el => +el.getAttribute('tid'));
|
||||
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: '/tensorflow/annotation/meta/get',
|
||||
data: JSON.stringify(tids),
|
||||
contentType: 'application/json; charset=utf-8',
|
||||
}).done((metaData) => {
|
||||
elements.each(function setupDashboardItemWrapper() {
|
||||
setupDashboardItem($(this), metaData);
|
||||
});
|
||||
}).fail((errorData) => {
|
||||
const message = `Can not get tf annotation meta info. Code: ${errorData.status}. `
|
||||
+ `Message: ${errorData.responseText || errorData.statusText}`;
|
||||
showMessage(message);
|
||||
});
|
||||
});
|
||||
@ -1,134 +0,0 @@
|
||||
/*
|
||||
* Copyright (C) 2018 Intel Corporation
|
||||
*
|
||||
* SPDX-License-Identifier: MIT
|
||||
*/
|
||||
|
||||
/* global
|
||||
userConfirm:false
|
||||
showMessage:false
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
function CheckTFAnnotationRequest(taskId, tfAnnotationButton) {
|
||||
let frequence = 5000;
|
||||
let errorCount = 0;
|
||||
let interval = setInterval(function() {
|
||||
$.ajax ({
|
||||
url: '/tensorflow/annotation/check/task/' + taskId,
|
||||
success: function(jsonData) {
|
||||
let status = jsonData["status"];
|
||||
if (status == "started" || status == "queued") {
|
||||
let progress = Math.round(jsonData["progress"]) || "0";
|
||||
tfAnnotationButton.text(`Cancel TF Annotation (${progress}%)`);
|
||||
}
|
||||
else {
|
||||
tfAnnotationButton.text("Run TF Annotation");
|
||||
tfAnnotationButton.removeClass("tfAnnotationProcess");
|
||||
tfAnnotationButton.prop("disabled", false);
|
||||
clearInterval(interval);
|
||||
}
|
||||
},
|
||||
error: function() {
|
||||
errorCount ++;
|
||||
if (errorCount > 5) {
|
||||
clearInterval(interval);
|
||||
tfAnnotationButton.prop("disabled", false);
|
||||
tfAnnotationButton.text("Status Check Error");
|
||||
throw Error(`TF annotation check request error for task ${window.cvat.dashboard.taskID}:${window.cvat.dashboard.taskName}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}, frequence);
|
||||
}
|
||||
|
||||
function RunTFAnnotationRequest() {
|
||||
let tfAnnotationButton = this;
|
||||
let taskID = window.cvat.dashboard.taskID;
|
||||
$.ajax ({
|
||||
url: '/tensorflow/annotation/create/task/' + taskID,
|
||||
success: function() {
|
||||
showMessage('Process started.');
|
||||
tfAnnotationButton.text(`Cancel TF Annotation (0%)`);
|
||||
tfAnnotationButton.addClass("tfAnnotationProcess");
|
||||
CheckTFAnnotationRequest(taskID, tfAnnotationButton);
|
||||
},
|
||||
error: function(response) {
|
||||
let message = 'Abort. Reason: ' + response.responseText;
|
||||
showMessage(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function CancelTFAnnotationRequest() {
|
||||
let tfAnnotationButton = this;
|
||||
$.ajax ({
|
||||
url: '/tensorflow/annotation/cancel/task/' + window.cvat.dashboard.taskID,
|
||||
success: function() {
|
||||
tfAnnotationButton.prop("disabled", true);
|
||||
},
|
||||
error: function(data) {
|
||||
let message = `TF annotation cancel error: ${data.responseText}`;
|
||||
showMessage(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function onTFAnnotationClick() {
|
||||
let button = this;
|
||||
let uiElem = button.closest('div.dashboardTaskUI');
|
||||
let taskId = +uiElem.attr('id').split('_')[1];
|
||||
let taskName = $.trim($( uiElem.find('label.dashboardTaskNameLabel')[0] ).text());
|
||||
|
||||
window.cvat.dashboard.taskID = taskId;
|
||||
window.cvat.dashboard.taskName = taskName;
|
||||
|
||||
if (button.hasClass("tfAnnotationProcess")) {
|
||||
userConfirm('The process will be canceled. Continue?', CancelTFAnnotationRequest.bind(button));
|
||||
}
|
||||
else {
|
||||
userConfirm('The current annotation will be lost. Are you sure?', RunTFAnnotationRequest.bind(button));
|
||||
}
|
||||
}
|
||||
|
||||
window.cvat = window.cvat || {};
|
||||
window.cvat.dashboard = window.cvat.dashboard || {};
|
||||
window.cvat.dashboard.uiCallbacks = window.cvat.dashboard.uiCallbacks || [];
|
||||
|
||||
window.cvat.dashboard.uiCallbacks.push(function(newElements) {
|
||||
let tids = [];
|
||||
for (let el of newElements) {
|
||||
tids.push(el.id.split('_')[1]);
|
||||
}
|
||||
|
||||
$.ajax({
|
||||
type: 'POST',
|
||||
url: '/tensorflow/annotation/meta/get',
|
||||
data: JSON.stringify(tids),
|
||||
contentType: "application/json; charset=utf-8",
|
||||
success: (data) => {
|
||||
newElements.each(function(idx) {
|
||||
let elem = $(newElements[idx]);
|
||||
let tid = +elem.attr('id').split('_')[1];
|
||||
let buttonsUI = elem.find('div.dashboardButtonsUI')[0];
|
||||
let tfAnnotationButton = $('<button> Run TF Annotation </button>');
|
||||
|
||||
tfAnnotationButton.on('click', onTFAnnotationClick.bind(tfAnnotationButton));
|
||||
tfAnnotationButton.addClass('dashboardTFAnnotationButton regular dashboardButtonUI');
|
||||
tfAnnotationButton.appendTo(buttonsUI);
|
||||
|
||||
if ((tid in data) && (data[tid].active)) {
|
||||
tfAnnotationButton.text("Cancel TF Annotation");
|
||||
tfAnnotationButton.addClass("tfAnnotationProcess");
|
||||
CheckTFAnnotationRequest(tid, tfAnnotationButton);
|
||||
}
|
||||
});
|
||||
},
|
||||
error: (data) => {
|
||||
let message = `Can not get tf annotation meta info. Code: ${data.status}. Message: ${data.responseText || data.statusText}`;
|
||||
showMessage(message);
|
||||
throw Error(message);
|
||||
}
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,4 @@
|
||||
-r development.txt
|
||||
-r production.txt
|
||||
-r staging.txt
|
||||
-r testing.txt
|
||||
@ -0,0 +1,2 @@
|
||||
-f development.txt
|
||||
fakeredis==1.0.3
|
||||