Fixed corner case during search (#4198)

* Fixed corner case during search

* Updated version & changelog

* Added tests
main
Boris Sekachev 4 years ago committed by GitHub
parent 129584b99e
commit 3c750d4efc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -62,6 +62,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Issues disappear when rescale a browser (<https://github.com/openvinotoolkit/cvat/pull/4189>) - Issues disappear when rescale a browser (<https://github.com/openvinotoolkit/cvat/pull/4189>)
- Auth token key is not returned when registering without email verification (<https://github.com/openvinotoolkit/cvat/pull/4092>) - Auth token key is not returned when registering without email verification (<https://github.com/openvinotoolkit/cvat/pull/4092>)
- Error in create project from backup for standard 3D annotation (<https://github.com/openvinotoolkit/cvat/pull/4160>) - Error in create project from backup for standard 3D annotation (<https://github.com/openvinotoolkit/cvat/pull/4160>)
- Annotations search does not work correctly in some corner cases (when use complex properties with width, height) (<https://github.com/openvinotoolkit/cvat/pull/4198>)
### Security ### Security
- Updated ELK to 6.8.23 which uses log4j 2.17.1 (<https://github.com/openvinotoolkit/cvat/pull/4206>) - Updated ELK to 6.8.23 which uses log4j 2.17.1 (<https://github.com/openvinotoolkit/cvat/pull/4206>)

@ -1,12 +1,12 @@
{ {
"name": "cvat-core", "name": "cvat-core",
"version": "4.1.1", "version": "4.1.2",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "cvat-core", "name": "cvat-core",
"version": "4.1.1", "version": "4.1.2",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"axios": "^0.21.4", "axios": "^0.21.4",

@ -1,6 +1,6 @@
{ {
"name": "cvat-core", "name": "cvat-core",
"version": "4.1.1", "version": "4.1.2",
"description": "Part of Computer Vision Tool which presents an interface for client-side integration", "description": "Part of Computer Vision Tool which presents an interface for client-side integration",
"main": "babel.config.js", "main": "babel.config.js",
"scripts": { "scripts": {

@ -1,4 +1,4 @@
// Copyright (C) 2019-2021 Intel Corporation // Copyright (C) 2019-2022 Intel Corporation
// //
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
@ -917,35 +917,14 @@
search(filters, frameFrom, frameTo) { search(filters, frameFrom, frameTo) {
const sign = Math.sign(frameTo - frameFrom); const sign = Math.sign(frameTo - frameFrom);
const filtersStr = JSON.stringify(filters); const filtersStr = JSON.stringify(filters);
const containsDifficultProperties = filtersStr.match(/"var":"width"/) || filtersStr.match(/"var":"height"/); const linearSearch = filtersStr.match(/"var":"width"/) || filtersStr.match(/"var":"height"/);
const deepSearch = (deepSearchFrom, deepSearchTo) => {
// deepSearchFrom is expected to be a frame that doesn't satisfy a filter
// deepSearchTo is expected to be a frame that satisfies a filter
let [prev, next] = [deepSearchFrom, deepSearchTo];
// half division method instead of linear search
while (!(Math.abs(prev - next) === 1)) {
const middle = next + Math.floor((prev - next) / 2);
const shapesData = this.tracks.map((track) => track.get(middle));
const filtered = this.annotationsFilter.filter(shapesData, filters);
if (filtered.length) {
next = middle;
} else {
prev = middle;
}
}
return next;
};
const keyframesMemory = {};
const predicate = sign > 0 ? (frame) => frame <= frameTo : (frame) => frame >= frameTo; const predicate = sign > 0 ? (frame) => frame <= frameTo : (frame) => frame >= frameTo;
const update = sign > 0 ? (frame) => frame + 1 : (frame) => frame - 1; const update = sign > 0 ? (frame) => frame + 1 : (frame) => frame - 1;
for (let frame = frameFrom; predicate(frame); frame = update(frame)) { for (let frame = frameFrom; predicate(frame); frame = update(frame)) {
// First prepare all data for the frame // First prepare all data for the frame
// Consider all shapes, tags, and not outside tracks that have keyframe here // Consider all shapes, tags, and not outside tracks that have keyframe here
// In particular consider first and last frame as keyframes for all frames // In particular consider first and last frame as keyframes for all tracks
const statesData = [].concat( const statesData = [].concat(
(frame in this.shapes ? this.shapes[frame] : []) (frame in this.shapes ? this.shapes[frame] : [])
.filter((shape) => !shape.removed) .filter((shape) => !shape.removed)
@ -955,7 +934,9 @@
.map((tag) => tag.get(frame)), .map((tag) => tag.get(frame)),
); );
const tracks = Object.values(this.tracks) const tracks = Object.values(this.tracks)
.filter((track) => frame in track.shapes || frame === frameFrom || frame === frameTo) .filter((track) => (
frame in track.shapes || frame === frameFrom ||
frame === frameTo || linearSearch))
.filter((track) => !track.removed); .filter((track) => !track.removed);
statesData.push(...tracks.map((track) => track.get(frame)).filter((state) => !state.outside)); statesData.push(...tracks.map((track) => track.get(frame)).filter((state) => !state.outside));
@ -966,31 +947,6 @@
// Filtering // Filtering
const filtered = this.annotationsFilter.filter(statesData, filters); const filtered = this.annotationsFilter.filter(statesData, filters);
// Now we are checking whether we need deep search or not
// Deep search is needed in some difficult cases
// For example when filter contains fields which
// can be changed between keyframes (like: height and width of a shape)
// It's expected, that a track doesn't satisfy a filter on the previous keyframe
// At the same time it sutisfies the filter on the next keyframe
let withDeepSearch = false;
if (containsDifficultProperties) {
for (const track of tracks) {
const trackIsSatisfy = filtered.includes(track.clientID);
if (!trackIsSatisfy) {
keyframesMemory[track.clientID] = [filtered.includes(track.clientID), frame];
} else if (keyframesMemory[track.clientID] && keyframesMemory[track.clientID][0] === false) {
withDeepSearch = true;
}
}
}
if (withDeepSearch) {
const reducer = sign > 0 ? Math.min : Math.max;
const deepSearchFrom = reducer(...Object.values(keyframesMemory).map((value) => value[1]));
return deepSearch(deepSearchFrom, frame);
}
if (filtered.length) { if (filtered.length) {
return frame; return frame;
} }

@ -1,4 +1,4 @@
// Copyright (C) 2020-2021 Intel Corporation // Copyright (C) 2020-2022 Intel Corporation
// //
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
@ -831,3 +831,34 @@ describe('Feature: select object', () => {
expect(task.annotations.select(annotations, '5', '10')).rejects.toThrow(window.cvat.exceptions.ArgumentError); expect(task.annotations.select(annotations, '5', '10')).rejects.toThrow(window.cvat.exceptions.ArgumentError);
}); });
}); });
describe('Feature: search frame', () => {
test('applying different filters', async () => {
const job = (await window.cvat.jobs.get({ jobID: 102 }))[0];
await job.annotations.clear(true);
let frame = await job.annotations.search(JSON.parse('[{"and":[{"==":[{"var":"type"},"tag"]}]}]'), 495, 994);
expect(frame).toBe(500);
frame = await job.annotations.search(JSON.parse('[{"and":[{"==":[{"var":"type"},"tag"]},{"==":[{"var":"label"},"bicycle"]}]}]'), 495, 994);
expect(frame).toBe(500);
frame = await job.annotations.search(JSON.parse('[{"and":[{"==":[{"var":"type"},"track"]},{"==":[{"var":"label"},"bicycle"]}]}]'), 495, 994);
expect(frame).toBe(null);
frame = await job.annotations.search(JSON.parse('[{"and":[{"==":[{"var":"type"},"shape"]},{"==":[{"var":"shape"},"rectangle"]}]}]'), 495, 994);
expect(frame).toBe(510);
frame = await job.annotations.search(JSON.parse('[{"and":[{"==":[{"var":"type"},"shape"]},{"==":[{"var":"shape"},"rectangle"]}]}]'), 511, 994);
expect(frame).toBe(null);
frame = await job.annotations.search(JSON.parse('[{"and":[{"==":[{"var":"type"},"shape"]},{"==":[{"var":"shape"},"polygon"]}]}]'), 511, 994);
expect(frame).toBe(520);
frame = await job.annotations.search(JSON.parse('[{"and":[{"==":[{"var":"attr.motorcycle.model"},"some text for test"]}]}]'), 495, 994);
expect(frame).toBe(520);
frame = await job.annotations.search(JSON.parse('[{"and":[{"==":[{"var":"attr.motorcycle.model"},"some text for test"]},{"==":[{"var":"shape"},"ellipse"]}]}]'), 495, 994);
expect(frame).toBe(null);
frame = await job.annotations.search(JSON.parse('[{"and":[{"<=":[450,{"var":"width"},550]}]}]'), 540, 994);
expect(frame).toBe(563);
frame = await job.annotations.search(JSON.parse('[{"and":[{"<=":[450,{"var":"width"},550]}]}]'), 588, 994);
expect(frame).toBe(null);
frame = await job.annotations.search(JSON.parse('[{"and":[{">=":[{"var":"width"},500]},{"<=":[{"var":"height"},300]}]}]'), 540, 994);
expect(frame).toBe(575);
});
});

@ -1455,6 +1455,74 @@ const taskAnnotationsDummyData = {
}, },
], ],
}, },
102: {
version: 21,
tags: [{
id: 1,
frame: 500,
label_id: 22,
group: 0,
attributes: [{
spec_id: 13,
value: 'woman',
}, {
spec_id: 14,
value: 'false',
}],
}],
shapes: [{
type: 'rectangle',
occluded: false,
z_order: 1,
points: [557.7890625, 276.2216796875, 907.1888732910156, 695.5014038085938],
id: 2,
frame: 510,
label_id: 21,
group: 0,
attributes: [],
}, {
type: 'polygon',
occluded: false,
z_order: 2,
points: [0, 0, 500, 500, 1000, 0],
id: 3,
frame: 520,
label_id: 23,
group: 0,
attributes: [{ spec_id: 15, value: 'some text for test' }],
}],
tracks: [
{
id: 4,
frame: 550,
label_id: 24,
group: 0,
shapes: [
{
type: 'rectangle',
occluded: true,
z_order: 2,
points: [100, 100, 500, 500],
id: 1,
frame: 550,
outside: false,
attributes: [],
},
{
type: 'rectangle',
occluded: false,
z_order: 2,
points: [100, 100, 700, 300],
id: 3,
frame: 600,
outside: false,
attributes: [],
},
],
attributes: [],
},
],
},
101: { 101: {
version: 21, version: 21,
tags: [], tags: [],

Loading…
Cancel
Save