Added button to cancel started automatic annotation (#1198)

main
Boris Sekachev 6 years ago committed by GitHub
parent 401d66dcd7
commit 65cbabd8df
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -2,17 +2,20 @@
// //
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
import { AnyAction, Dispatch, ActionCreator } from 'redux'; import { ActionUnion, createAction, ThunkAction } from 'utils/redux';
import { ThunkAction } from 'redux-thunk';
import getCore from 'cvat-core';
import { getCVATStore } from 'cvat-store';
import { import {
Model, Model,
ModelType,
ModelFiles, ModelFiles,
ActiveInference, ActiveInference,
CombinedState, CombinedState,
} from '../reducers/interfaces'; } from 'reducers/interfaces';
import getCore from 'cvat-core';
export enum PreinstalledModels {
RCNN = 'RCNN Object Detector',
MaskRCNN = 'Mask RCNN Object Detector',
}
export enum ModelsActionTypes { export enum ModelsActionTypes {
GET_MODELS = 'GET_MODELS', GET_MODELS = 'GET_MODELS',
@ -25,66 +28,101 @@ export enum ModelsActionTypes {
CREATE_MODEL_SUCCESS = 'CREATE_MODEL_SUCCESS', CREATE_MODEL_SUCCESS = 'CREATE_MODEL_SUCCESS',
CREATE_MODEL_FAILED = 'CREATE_MODEL_FAILED', CREATE_MODEL_FAILED = 'CREATE_MODEL_FAILED',
CREATE_MODEL_STATUS_UPDATED = 'CREATE_MODEL_STATUS_UPDATED', CREATE_MODEL_STATUS_UPDATED = 'CREATE_MODEL_STATUS_UPDATED',
INFER_MODEL = 'INFER_MODEL', START_INFERENCE_FAILED = 'START_INFERENCE_FAILED',
INFER_MODEL_SUCCESS = 'INFER_MODEL_SUCCESS',
INFER_MODEL_FAILED = 'INFER_MODEL_FAILED',
FETCH_META_FAILED = 'FETCH_META_FAILED',
GET_INFERENCE_STATUS = 'GET_INFERENCE_STATUS',
GET_INFERENCE_STATUS_SUCCESS = 'GET_INFERENCE_STATUS_SUCCESS', GET_INFERENCE_STATUS_SUCCESS = 'GET_INFERENCE_STATUS_SUCCESS',
GET_INFERENCE_STATUS_FAILED = 'GET_INFERENCE_STATUS_FAILED', GET_INFERENCE_STATUS_FAILED = 'GET_INFERENCE_STATUS_FAILED',
FETCH_META_FAILED = 'FETCH_META_FAILED',
SHOW_RUN_MODEL_DIALOG = 'SHOW_RUN_MODEL_DIALOG', SHOW_RUN_MODEL_DIALOG = 'SHOW_RUN_MODEL_DIALOG',
CLOSE_RUN_MODEL_DIALOG = 'CLOSE_RUN_MODEL_DIALOG', CLOSE_RUN_MODEL_DIALOG = 'CLOSE_RUN_MODEL_DIALOG',
CANCEL_INFERENCE_SUCCESS = 'CANCEL_INFERENCE_SUCCESS',
CANCEL_INFERENCE_FAILED = 'CANCEL_INFERENCE_FAILED',
} }
export enum PreinstalledModels { export const modelsActions = {
RCNN = 'RCNN Object Detector', getModels: () => createAction(ModelsActionTypes.GET_MODELS),
MaskRCNN = 'Mask RCNN Object Detector', getModelsSuccess: (models: Model[]) => createAction(
} ModelsActionTypes.GET_MODELS_SUCCESS, {
const core = getCore();
const baseURL = core.config.backendAPI.slice(0, -7);
function getModels(): AnyAction {
const action = {
type: ModelsActionTypes.GET_MODELS,
payload: {},
};
return action;
}
function getModelsSuccess(models: Model[]): AnyAction {
const action = {
type: ModelsActionTypes.GET_MODELS_SUCCESS,
payload: {
models, models,
}, },
}; ),
getModelsFailed: (error: any) => createAction(
return action; ModelsActionTypes.GET_MODELS_FAILED, {
}
function getModelsFailed(error: any): AnyAction {
const action = {
type: ModelsActionTypes.GET_MODELS_FAILED,
payload: {
error, error,
}, },
}; ),
deleteModelSuccess: (id: number) => createAction(
ModelsActionTypes.DELETE_MODEL_SUCCESS, {
id,
},
),
deleteModelFailed: (id: number, error: any) => createAction(
ModelsActionTypes.DELETE_MODEL_FAILED, {
error, id,
},
),
createModel: () => createAction(ModelsActionTypes.CREATE_MODEL),
createModelSuccess: () => createAction(ModelsActionTypes.CREATE_MODEL_SUCCESS),
createModelFailed: (error: any) => createAction(
ModelsActionTypes.CREATE_MODEL_FAILED, {
error,
},
),
createModelUpdateStatus: (status: string) => createAction(
ModelsActionTypes.CREATE_MODEL_STATUS_UPDATED, {
status,
},
),
fetchMetaFailed: (error: any) => createAction(ModelsActionTypes.FETCH_META_FAILED, { error }),
getInferenceStatusSuccess: (taskID: number, activeInference: ActiveInference) => createAction(
ModelsActionTypes.GET_INFERENCE_STATUS_SUCCESS, {
taskID,
activeInference,
},
),
getInferenceStatusFailed: (taskID: number, error: any) => createAction(
ModelsActionTypes.GET_INFERENCE_STATUS_FAILED, {
taskID,
error,
},
),
startInferenceFailed: (taskID: number, error: any) => createAction(
ModelsActionTypes.START_INFERENCE_FAILED, {
taskID,
error,
},
),
cancelInferenceSuccess: (taskID: number) => createAction(
ModelsActionTypes.CANCEL_INFERENCE_SUCCESS, {
taskID,
},
),
cancelInferenceFaild: (taskID: number, error: any) => createAction(
ModelsActionTypes.CANCEL_INFERENCE_FAILED, {
taskID,
error,
},
),
closeRunModelDialog: () => createAction(ModelsActionTypes.CLOSE_RUN_MODEL_DIALOG),
showRunModelDialog: (taskInstance: any) => createAction(
ModelsActionTypes.SHOW_RUN_MODEL_DIALOG, {
taskInstance,
},
),
};
return action; export type ModelsActions = ActionUnion<typeof modelsActions>;
}
export function getModelsAsync(): const core = getCore();
ThunkAction<Promise<void>, {}, {}, AnyAction> { const baseURL = core.config.backendAPI.slice(0, -7);
return async (dispatch: ActionCreator<Dispatch>): Promise<void> => {
const store = getCVATStore(); export function getModelsAsync(): ThunkAction {
const state: CombinedState = store.getState(); return async (dispatch, getState): Promise<void> => {
const state: CombinedState = getState();
const OpenVINO = state.plugins.list.AUTO_ANNOTATION; const OpenVINO = state.plugins.list.AUTO_ANNOTATION;
const RCNN = state.plugins.list.TF_ANNOTATION; const RCNN = state.plugins.list.TF_ANNOTATION;
const MaskRCNN = state.plugins.list.TF_SEGMENTATION; const MaskRCNN = state.plugins.list.TF_SEGMENTATION;
dispatch(getModels()); dispatch(modelsActions.getModels());
const models: Model[] = []; const models: Model[] = [];
try { try {
@ -170,108 +208,31 @@ ThunkAction<Promise<void>, {}, {}, AnyAction> {
}); });
} }
} catch (error) { } catch (error) {
dispatch(getModelsFailed(error)); dispatch(modelsActions.getModelsFailed(error));
return; return;
} }
dispatch(getModelsSuccess(models)); dispatch(modelsActions.getModelsSuccess(models));
}; };
} }
function deleteModel(id: number): AnyAction { export function deleteModelAsync(id: number): ThunkAction {
const action = { return async (dispatch): Promise<void> => {
type: ModelsActionTypes.DELETE_MODEL,
payload: {
id,
},
};
return action;
}
function deleteModelSuccess(id: number): AnyAction {
const action = {
type: ModelsActionTypes.DELETE_MODEL_SUCCESS,
payload: {
id,
},
};
return action;
}
function deleteModelFailed(id: number, error: any): AnyAction {
const action = {
type: ModelsActionTypes.DELETE_MODEL_FAILED,
payload: {
error,
id,
},
};
return action;
}
export function deleteModelAsync(id: number): ThunkAction<Promise<void>, {}, {}, AnyAction> {
return async (dispatch: ActionCreator<Dispatch>): Promise<void> => {
dispatch(deleteModel(id));
try { try {
await core.server.request(`${baseURL}/auto_annotation/delete/${id}`, { await core.server.request(`${baseURL}/auto_annotation/delete/${id}`, {
method: 'DELETE', method: 'DELETE',
}); });
} catch (error) { } catch (error) {
dispatch(deleteModelFailed(id, error)); dispatch(modelsActions.deleteModelFailed(id, error));
return; return;
} }
dispatch(deleteModelSuccess(id)); dispatch(modelsActions.deleteModelSuccess(id));
};
}
function createModel(): AnyAction {
const action = {
type: ModelsActionTypes.CREATE_MODEL,
payload: {},
};
return action;
}
function createModelSuccess(): AnyAction {
const action = {
type: ModelsActionTypes.CREATE_MODEL_SUCCESS,
payload: {},
}; };
return action;
} }
function createModelFailed(error: any): AnyAction { export function createModelAsync(name: string, files: ModelFiles, global: boolean): ThunkAction {
const action = { return async (dispatch): Promise<void> => {
type: ModelsActionTypes.CREATE_MODEL_FAILED,
payload: {
error,
},
};
return action;
}
function createModelUpdateStatus(status: string): AnyAction {
const action = {
type: ModelsActionTypes.CREATE_MODEL_STATUS_UPDATED,
payload: {
status,
},
};
return action;
}
export function createModelAsync(name: string, files: ModelFiles, global: boolean):
ThunkAction<Promise<void>, {}, {}, AnyAction> {
return async (dispatch: ActionCreator<Dispatch>): Promise<void> => {
async function checkCallback(id: string): Promise<void> { async function checkCallback(id: string): Promise<void> {
try { try {
const data = await core.server.request( const data = await core.server.request(
@ -282,30 +243,30 @@ ThunkAction<Promise<void>, {}, {}, AnyAction> {
switch (data.status) { switch (data.status) {
case 'failed': case 'failed':
dispatch(createModelFailed( dispatch(modelsActions.createModelFailed(
`Checking request has returned the "${data.status}" status. Message: ${data.error}`, `Checking request has returned the "${data.status}" status. Message: ${data.error}`,
)); ));
break; break;
case 'unknown': case 'unknown':
dispatch(createModelFailed( dispatch(modelsActions.createModelFailed(
`Checking request has returned the "${data.status}" status.`, `Checking request has returned the "${data.status}" status.`,
)); ));
break; break;
case 'finished': case 'finished':
dispatch(createModelSuccess()); dispatch(modelsActions.createModelSuccess());
break; break;
default: default:
if ('progress' in data) { if ('progress' in data) {
createModelUpdateStatus(data.progress); modelsActions.createModelUpdateStatus(data.progress);
} }
setTimeout(checkCallback.bind(null, id), 1000); setTimeout(checkCallback.bind(null, id), 1000);
} }
} catch (error) { } catch (error) {
dispatch(createModelFailed(error)); dispatch(modelsActions.createModelFailed(error));
} }
} }
dispatch(createModel()); dispatch(modelsActions.createModel());
const data = new FormData(); const data = new FormData();
data.append('name', name); data.append('name', name);
data.append('storage', typeof files.bin === 'string' ? 'shared' : 'local'); data.append('storage', typeof files.bin === 'string' ? 'shared' : 'local');
@ -316,7 +277,7 @@ ThunkAction<Promise<void>, {}, {}, AnyAction> {
}, data); }, data);
try { try {
dispatch(createModelUpdateStatus('Request is beign sent..')); dispatch(modelsActions.createModelUpdateStatus('Request is beign sent..'));
const response = await core.server.request( const response = await core.server.request(
`${baseURL}/auto_annotation/create`, { `${baseURL}/auto_annotation/create`, {
method: 'POST', method: 'POST',
@ -326,56 +287,19 @@ ThunkAction<Promise<void>, {}, {}, AnyAction> {
}, },
); );
dispatch(createModelUpdateStatus('Request is being processed..')); dispatch(modelsActions.createModelUpdateStatus('Request is being processed..'));
setTimeout(checkCallback.bind(null, response.id), 1000); setTimeout(checkCallback.bind(null, response.id), 1000);
} catch (error) { } catch (error) {
dispatch(createModelFailed(error)); dispatch(modelsActions.createModelFailed(error));
} }
}; };
} }
function fetchMetaFailed(error: any): AnyAction {
const action = {
type: ModelsActionTypes.FETCH_META_FAILED,
payload: {
error,
},
};
return action;
}
function getInferenceStatusSuccess(
taskID: number,
activeInference: ActiveInference,
): AnyAction {
const action = {
type: ModelsActionTypes.GET_INFERENCE_STATUS_SUCCESS,
payload: {
taskID,
activeInference,
},
};
return action;
}
function getInferenceStatusFailed(taskID: number, error: any): AnyAction {
const action = {
type: ModelsActionTypes.GET_INFERENCE_STATUS_FAILED,
payload: {
taskID,
error,
},
};
return action;
}
interface InferenceMeta { interface InferenceMeta {
active: boolean; active: boolean;
taskID: number; taskID: number;
requestID: string; requestID: string;
modelType: ModelType;
} }
const timers: any = {}; const timers: any = {};
@ -383,7 +307,8 @@ const timers: any = {};
async function timeoutCallback( async function timeoutCallback(
url: string, url: string,
taskID: number, taskID: number,
dispatch: ActionCreator<Dispatch>, modelType: ModelType,
dispatch: (action: ModelsActions) => void,
): Promise<void> { ): Promise<void> {
try { try {
delete timers[taskID]; delete timers[taskID];
@ -396,11 +321,12 @@ async function timeoutCallback(
status: response.status, status: response.status,
progress: +response.progress || 0, progress: +response.progress || 0,
error: response.error || response.stderr || '', error: response.error || response.stderr || '',
modelType,
}; };
if (activeInference.status === 'unknown') { if (activeInference.status === 'unknown') {
dispatch(getInferenceStatusFailed( dispatch(modelsActions.getInferenceStatusFailed(
taskID, taskID,
new Error( new Error(
`Inference status for the task ${taskID} is unknown.`, `Inference status for the task ${taskID} is unknown.`,
@ -411,7 +337,7 @@ async function timeoutCallback(
} }
if (activeInference.status === 'failed') { if (activeInference.status === 'failed') {
dispatch(getInferenceStatusFailed( dispatch(modelsActions.getInferenceStatusFailed(
taskID, taskID,
new Error( new Error(
`Inference status for the task ${taskID} is failed. ${activeInference.error}`, `Inference status for the task ${taskID} is failed. ${activeInference.error}`,
@ -427,55 +353,67 @@ async function timeoutCallback(
null, null,
url, url,
taskID, taskID,
modelType,
dispatch, dispatch,
), 3000, ), 3000,
); );
} }
dispatch(getInferenceStatusSuccess(taskID, activeInference)); dispatch(modelsActions.getInferenceStatusSuccess(taskID, activeInference));
} catch (error) { } catch (error) {
dispatch(getInferenceStatusFailed(taskID, new Error( dispatch(modelsActions.getInferenceStatusFailed(taskID, new Error(
`Server request for the task ${taskID} was failed`, `Server request for the task ${taskID} was failed`,
))); )));
} }
} }
function subscribe( function subscribe(
urlPath: string,
inferenceMeta: InferenceMeta, inferenceMeta: InferenceMeta,
dispatch: ActionCreator<Dispatch>, dispatch: (action: ModelsActions) => void,
): void { ): void {
if (!(inferenceMeta.taskID in timers)) { if (!(inferenceMeta.taskID in timers)) {
const requestURL = `${baseURL}/${urlPath}/${inferenceMeta.requestID}`; let requestURL = `${baseURL}`;
if (inferenceMeta.modelType === ModelType.OPENVINO) {
requestURL = `${requestURL}/auto_annotation/check`;
} else if (inferenceMeta.modelType === ModelType.RCNN) {
requestURL = `${requestURL}/tensorflow/annotation/check/task`;
} else if (inferenceMeta.modelType === ModelType.MASK_RCNN) {
requestURL = `${requestURL}/tensorflow/segmentation/check/task`;
}
requestURL = `${requestURL}/${inferenceMeta.requestID}`;
timers[inferenceMeta.taskID] = setTimeout( timers[inferenceMeta.taskID] = setTimeout(
timeoutCallback.bind( timeoutCallback.bind(
null, null,
requestURL, requestURL,
inferenceMeta.taskID, inferenceMeta.taskID,
inferenceMeta.modelType,
dispatch, dispatch,
), ),
); );
} }
} }
export function getInferenceStatusAsync(tasks: number[]): export function getInferenceStatusAsync(tasks: number[]): ThunkAction {
ThunkAction<Promise<void>, {}, {}, AnyAction> { return async (dispatch, getState): Promise<void> => {
return async (dispatch: ActionCreator<Dispatch>): Promise<void> => { function parse(response: any, modelType: ModelType): InferenceMeta[] {
function parse(response: any): InferenceMeta[] {
return Object.keys(response).map((key: string): InferenceMeta => ({ return Object.keys(response).map((key: string): InferenceMeta => ({
taskID: +key, taskID: +key,
requestID: response[key].rq_id || key, requestID: response[key].rq_id || key,
active: typeof (response[key].active) === 'undefined' ? ['queued', 'started'] active: typeof (response[key].active) === 'undefined' ? ['queued', 'started']
.includes(response[key].status.toLowerCase()) : response[key].active, .includes(response[key].status.toLowerCase()) : response[key].active,
modelType,
})); }));
} }
const store = getCVATStore(); const state: CombinedState = getState();
const state: CombinedState = store.getState();
const OpenVINO = state.plugins.list.AUTO_ANNOTATION; const OpenVINO = state.plugins.list.AUTO_ANNOTATION;
const RCNN = state.plugins.list.TF_ANNOTATION; const RCNN = state.plugins.list.TF_ANNOTATION;
const MaskRCNN = state.plugins.list.TF_SEGMENTATION; const MaskRCNN = state.plugins.list.TF_SEGMENTATION;
const dispatchCallback = (action: ModelsActions): void => {
dispatch(action);
};
try { try {
if (OpenVINO) { if (OpenVINO) {
const response = await core.server.request( const response = await core.server.request(
@ -488,10 +426,10 @@ ThunkAction<Promise<void>, {}, {}, AnyAction> {
}, },
); );
parse(response.run) parse(response.run, ModelType.OPENVINO)
.filter((inferenceMeta: InferenceMeta): boolean => inferenceMeta.active) .filter((inferenceMeta: InferenceMeta): boolean => inferenceMeta.active)
.forEach((inferenceMeta: InferenceMeta): void => { .forEach((inferenceMeta: InferenceMeta): void => {
subscribe('auto_annotation/check', inferenceMeta, dispatch); subscribe(inferenceMeta, dispatchCallback);
}); });
} }
@ -506,10 +444,10 @@ ThunkAction<Promise<void>, {}, {}, AnyAction> {
}, },
); );
parse(response) parse(response, ModelType.RCNN)
.filter((inferenceMeta: InferenceMeta): boolean => inferenceMeta.active) .filter((inferenceMeta: InferenceMeta): boolean => inferenceMeta.active)
.forEach((inferenceMeta: InferenceMeta): void => { .forEach((inferenceMeta: InferenceMeta): void => {
subscribe('tensorflow/annotation/check/task', inferenceMeta, dispatch); subscribe(inferenceMeta, dispatchCallback);
}); });
} }
@ -524,60 +462,27 @@ ThunkAction<Promise<void>, {}, {}, AnyAction> {
}, },
); );
parse(response) parse(response, ModelType.MASK_RCNN)
.filter((inferenceMeta: InferenceMeta): boolean => inferenceMeta.active) .filter((inferenceMeta: InferenceMeta): boolean => inferenceMeta.active)
.forEach((inferenceMeta: InferenceMeta): void => { .forEach((inferenceMeta: InferenceMeta): void => {
subscribe('tensorflow/segmentation/check/task', inferenceMeta, dispatch); subscribe(inferenceMeta, dispatchCallback);
}); });
} }
} catch (error) { } catch (error) {
dispatch(fetchMetaFailed(error)); dispatch(modelsActions.fetchMetaFailed(error));
} }
}; };
} }
export function startInferenceAsync(
function inferModel(): AnyAction {
const action = {
type: ModelsActionTypes.INFER_MODEL,
payload: {},
};
return action;
}
function inferModelSuccess(): AnyAction {
const action = {
type: ModelsActionTypes.INFER_MODEL_SUCCESS,
payload: {},
};
return action;
}
function inferModelFailed(error: any, taskID: number): AnyAction {
const action = {
type: ModelsActionTypes.INFER_MODEL_FAILED,
payload: {
taskID,
error,
},
};
return action;
}
export function inferModelAsync(
taskInstance: any, taskInstance: any,
model: Model, model: Model,
mapping: { mapping: {
[index: string]: string; [index: string]: string;
}, },
cleanOut: boolean, cleanOut: boolean,
): ThunkAction<Promise<void>, {}, {}, AnyAction> { ): ThunkAction {
return async (dispatch: ActionCreator<Dispatch>): Promise<void> => { return async (dispatch): Promise<void> => {
dispatch(inferModel());
try { try {
if (model.name === PreinstalledModels.RCNN) { if (model.name === PreinstalledModels.RCNN) {
await core.server.request( await core.server.request(
@ -604,30 +509,39 @@ export function inferModelAsync(
dispatch(getInferenceStatusAsync([taskInstance.id])); dispatch(getInferenceStatusAsync([taskInstance.id]));
} catch (error) { } catch (error) {
dispatch(inferModelFailed(error, taskInstance.id)); dispatch(modelsActions.startInferenceFailed(taskInstance.id, error));
return;
} }
dispatch(inferModelSuccess());
}; };
} }
export function closeRunModelDialog(): AnyAction { export function cancelInferenceAsync(taskID: number): ThunkAction {
const action = { return async (dispatch, getState): Promise<void> => {
type: ModelsActionTypes.CLOSE_RUN_MODEL_DIALOG, try {
payload: {}, const inference = getState().models.inferences[taskID];
}; if (inference) {
if (inference.modelType === ModelType.OPENVINO) {
await core.server.request(
`${baseURL}/auto_annotation/cancel/${taskID}`,
);
} else if (inference.modelType === ModelType.RCNN) {
await core.server.request(
`${baseURL}/tensorflow/annotation/cancel/task/${taskID}`,
);
} else if (inference.modelType === ModelType.MASK_RCNN) {
await core.server.request(
`${baseURL}/tensorflow/segmentation/cancel/task/${taskID}`,
);
}
return action; if (timers[taskID]) {
} clearTimeout(timers[taskID]);
delete timers[taskID];
}
}
export function showRunModelDialog(taskInstance: any): AnyAction { dispatch(modelsActions.cancelInferenceSuccess(taskID));
const action = { } catch (error) {
type: ModelsActionTypes.SHOW_RUN_MODEL_DIALOG, dispatch(modelsActions.cancelInferenceFaild(taskID, error));
payload: { }
taskInstance,
},
}; };
return action;
} }

@ -87,37 +87,11 @@
padding-top: 20px; padding-top: 20px;
background: $background-color-1; background: $background-color-1;
/* description */
> div:nth-child(2) {
word-break: break-all;
max-height: 100%;
overflow: hidden;
}
/* open, actions */
div:nth-child(4) {
> div {
margin-right: 20px;
}
/* actions */
> div:nth-child(2) {
margin-right: 5px;
margin-top: 10px;
> div {
display: flex;
align-items: center;
}
}
}
&:hover { &:hover {
border: 1px solid $border-color-hover; border: 1px solid $border-color-hover;
} }
} }
.cvat-task-item-preview-wrapper { .cvat-task-item-preview-wrapper {
display: flex; display: flex;
justify-content: center; justify-content: center;
@ -131,6 +105,12 @@
} }
} }
.cvat-task-item-description {
word-break: break-all;
max-height: 100%;
overflow: hidden;
}
.cvat-task-progress { .cvat-task-progress {
width: 100%; width: 100%;
} }
@ -159,6 +139,26 @@
margin-right: 5px; margin-right: 5px;
} }
.close-auto-annotation-icon {
color: $danger-icon-color;
opacity: 0.7;
&:hover {
opacity: 1;
}
}
.cvat-item-open-task-actions {
margin-right: 5px;
margin-top: 10px;
display: flex;
align-items: center;
}
.cvat-item-open-task-button {
margin-right: 20px;
}
#cvat-create-task-button { #cvat-create-task-button {
padding: 0 30px; padding: 0 30px;
} }

@ -14,6 +14,8 @@ import {
Icon, Icon,
Progress, Progress,
Dropdown, Dropdown,
Tooltip,
Modal,
} from 'antd'; } from 'antd';
import moment from 'moment'; import moment from 'moment';
@ -28,6 +30,7 @@ export interface TaskItemProps {
deleted: boolean; deleted: boolean;
hidden: boolean; hidden: boolean;
activeInference: ActiveInference | null; activeInference: ActiveInference | null;
cancelAutoAnnotation(): void;
} }
class TaskItemComponent extends React.PureComponent<TaskItemProps & RouteComponentProps> { class TaskItemComponent extends React.PureComponent<TaskItemProps & RouteComponentProps> {
@ -54,7 +57,7 @@ class TaskItemComponent extends React.PureComponent<TaskItemProps & RouteCompone
const name = `${taskInstance.name.substring(0, 70)}${taskInstance.name.length > 70 ? '...' : ''}`; const name = `${taskInstance.name.substring(0, 70)}${taskInstance.name.length > 70 ? '...' : ''}`;
return ( return (
<Col span={10}> <Col span={10} className='cvat-task-item-description'>
<Text strong type='secondary'>{`#${id}: `}</Text> <Text strong type='secondary'>{`#${id}: `}</Text>
<Text strong className='cvat-text-color'>{name}</Text> <Text strong className='cvat-text-color'>{name}</Text>
<br /> <br />
@ -76,6 +79,7 @@ class TaskItemComponent extends React.PureComponent<TaskItemProps & RouteCompone
const { const {
taskInstance, taskInstance,
activeInference, activeInference,
cancelAutoAnnotation,
} = this.props; } = this.props;
// Count number of jobs and performed jobs // Count number of jobs and performed jobs
const numOfJobs = taskInstance.jobs.length; const numOfJobs = taskInstance.jobs.length;
@ -132,8 +136,8 @@ class TaskItemComponent extends React.PureComponent<TaskItemProps & RouteCompone
<Text strong>Automatic annotation</Text> <Text strong>Automatic annotation</Text>
</Col> </Col>
</Row> </Row>
<Row> <Row type='flex' justify='space-between'>
<Col> <Col span={22}>
<Progress <Progress
percent={Math.floor(activeInference.progress)} percent={Math.floor(activeInference.progress)}
strokeColor={{ strokeColor={{
@ -145,6 +149,23 @@ class TaskItemComponent extends React.PureComponent<TaskItemProps & RouteCompone
size='small' size='small'
/> />
</Col> </Col>
<Col span={1} className='close-auto-annotation-icon'>
<Tooltip title='Cancel automatic annotation'>
<Icon
type='close'
onClick={() => {
Modal.confirm({
title: 'You are going to cancel automatic annotation?',
content: 'Reached progress will be lost. Continue?',
okType: 'danger',
onOk() {
cancelAutoAnnotation();
},
});
}}
/>
</Tooltip>
</Col>
</Row> </Row>
</> </>
)} )}
@ -164,6 +185,7 @@ class TaskItemComponent extends React.PureComponent<TaskItemProps & RouteCompone
<Row type='flex' justify='end'> <Row type='flex' justify='end'>
<Col> <Col>
<Button <Button
className='cvat-item-open-task-button'
type='primary' type='primary'
size='large' size='large'
ghost ghost
@ -174,7 +196,7 @@ class TaskItemComponent extends React.PureComponent<TaskItemProps & RouteCompone
</Col> </Col>
</Row> </Row>
<Row type='flex' justify='end'> <Row type='flex' justify='end'>
<Col> <Col className='cvat-item-open-task-actions'>
<Text className='cvat-text-color'>Actions</Text> <Text className='cvat-text-color'>Actions</Text>
<Dropdown overlay={<ActionsMenuContainer taskInstance={taskInstance} />}> <Dropdown overlay={<ActionsMenuContainer taskInstance={taskInstance} />}>
<Icon className='cvat-menu-icon' component={MenuIcon} /> <Icon className='cvat-menu-icon' component={MenuIcon} />

@ -10,7 +10,7 @@ import {
CombinedState, CombinedState,
} from 'reducers/interfaces'; } from 'reducers/interfaces';
import { showRunModelDialog } from 'actions/models-actions'; import { modelsActions } from 'actions/models-actions';
import { import {
dumpAnnotationsAsync, dumpAnnotationsAsync,
loadAnnotationsAsync, loadAnnotationsAsync,
@ -99,7 +99,7 @@ function mapDispatchToProps(dispatch: any): DispatchToProps {
dispatch(deleteTaskAsync(taskInstance)); dispatch(deleteTaskAsync(taskInstance));
}, },
openRunModelWindow: (taskInstance: any): void => { openRunModelWindow: (taskInstance: any): void => {
dispatch(showRunModelDialog(taskInstance)); dispatch(modelsActions.showRunModelDialog(taskInstance));
}, },
}; };
} }

@ -12,8 +12,8 @@ import {
} from 'reducers/interfaces'; } from 'reducers/interfaces';
import { import {
getModelsAsync, getModelsAsync,
inferModelAsync, startInferenceAsync,
closeRunModelDialog, modelsActions,
} from 'actions/models-actions'; } from 'actions/models-actions';
@ -64,13 +64,13 @@ function mapDispatchToProps(dispatch: any): DispatchToProps {
}, },
cleanOut: boolean, cleanOut: boolean,
): void { ): void {
dispatch(inferModelAsync(taskInstance, model, mapping, cleanOut)); dispatch(startInferenceAsync(taskInstance, model, mapping, cleanOut));
}, },
getModels(): void { getModels(): void {
dispatch(getModelsAsync()); dispatch(getModelsAsync());
}, },
closeDialog(): void { closeDialog(): void {
dispatch(closeRunModelDialog()); dispatch(modelsActions.closeRunModelDialog());
}, },
}); });
} }

@ -13,9 +13,8 @@ import {
import TaskItemComponent from 'components/tasks-page/task-item'; import TaskItemComponent from 'components/tasks-page/task-item';
import { import { getTasksAsync } from 'actions/tasks-actions';
getTasksAsync, import { cancelInferenceAsync } from 'actions/models-actions';
} from 'actions/tasks-actions';
interface StateToProps { interface StateToProps {
deleted: boolean; deleted: boolean;
@ -26,7 +25,8 @@ interface StateToProps {
} }
interface DispatchToProps { interface DispatchToProps {
getTasks: (query: TasksQuery) => void; getTasks(query: TasksQuery): void;
cancelAutoAnnotation(): void;
} }
interface OwnProps { interface OwnProps {
@ -48,23 +48,18 @@ function mapStateToProps(state: CombinedState, own: OwnProps): StateToProps {
}; };
} }
function mapDispatchToProps(dispatch: any): DispatchToProps { function mapDispatchToProps(dispatch: any, own: OwnProps): DispatchToProps {
return { return {
getTasks: (query: TasksQuery): void => { getTasks(query: TasksQuery): void {
dispatch(getTasksAsync(query)); dispatch(getTasksAsync(query));
}, },
cancelAutoAnnotation(): void {
dispatch(cancelInferenceAsync(own.taskID));
},
}; };
} }
type TasksItemContainerProps = StateToProps & DispatchToProps & OwnProps;
function TaskItemContainer(props: TasksItemContainerProps): JSX.Element {
return (
<TaskItemComponent {...props} />
);
}
export default connect( export default connect(
mapStateToProps, mapStateToProps,
mapDispatchToProps, mapDispatchToProps,
)(TaskItemContainer); )(TaskItemComponent);

@ -134,10 +134,17 @@ export enum RQStatus {
failed = 'failed', failed = 'failed',
} }
export enum ModelType {
OPENVINO = 'openvino',
RCNN = 'rcnn',
MASK_RCNN = 'mask_rcnn',
}
export interface ActiveInference { export interface ActiveInference {
status: RQStatus; status: RQStatus;
progress: number; progress: number;
error: string; error: string;
modelType: ModelType;
} }
export interface ModelsState { export interface ModelsState {
@ -199,6 +206,7 @@ export interface NotificationsState {
starting: null | ErrorState; starting: null | ErrorState;
deleting: null | ErrorState; deleting: null | ErrorState;
fetching: null | ErrorState; fetching: null | ErrorState;
canceling: null | ErrorState;
metaFetching: null | ErrorState; metaFetching: null | ErrorState;
inferenceStatusFetching: null | ErrorState; inferenceStatusFetching: null | ErrorState;
}; };

@ -2,10 +2,8 @@
// //
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
import { AnyAction } from 'redux'; import { ModelsActionTypes, ModelsActions } from 'actions/models-actions';
import { AuthActionTypes, AuthActions } from 'actions/auth-actions';
import { ModelsActionTypes } from 'actions/models-actions';
import { AuthActionTypes } from 'actions/auth-actions';
import { ModelsState } from './interfaces'; import { ModelsState } from './interfaces';
const defaultState: ModelsState = { const defaultState: ModelsState = {
@ -18,7 +16,7 @@ const defaultState: ModelsState = {
inferences: {}, inferences: {},
}; };
export default function (state = defaultState, action: AnyAction): ModelsState { export default function (state = defaultState, action: ModelsActions | AuthActions): ModelsState {
switch (action.type) { switch (action.type) {
case ModelsActionTypes.GET_MODELS: { case ModelsActionTypes.GET_MODELS: {
return { return {
@ -90,7 +88,7 @@ export default function (state = defaultState, action: AnyAction): ModelsState {
}; };
} }
case ModelsActionTypes.GET_INFERENCE_STATUS_SUCCESS: { case ModelsActionTypes.GET_INFERENCE_STATUS_SUCCESS: {
const inferences = { ...state.inferences }; const { inferences } = state;
if (action.payload.activeInference.status === 'finished') { if (action.payload.activeInference.status === 'finished') {
delete inferences[action.payload.taskID]; delete inferences[action.payload.taskID];
} else { } else {
@ -99,16 +97,25 @@ export default function (state = defaultState, action: AnyAction): ModelsState {
return { return {
...state, ...state,
inferences, inferences: { ...inferences },
}; };
} }
case ModelsActionTypes.GET_INFERENCE_STATUS_FAILED: { case ModelsActionTypes.GET_INFERENCE_STATUS_FAILED: {
const inferences = { ...state.inferences }; const { inferences } = state;
delete inferences[action.payload.taskID];
return {
...state,
inferences: { ...inferences },
};
}
case ModelsActionTypes.CANCEL_INFERENCE_SUCCESS: {
const { inferences } = state;
delete inferences[action.payload.taskID]; delete inferences[action.payload.taskID];
return { return {
...state, ...state,
inferences, inferences: { ...inferences },
}; };
} }
case AuthActionTypes.LOGOUT_SUCCESS: { case AuthActionTypes.LOGOUT_SUCCESS: {

@ -50,6 +50,7 @@ const defaultState: NotificationsState = {
starting: null, starting: null,
deleting: null, deleting: null,
fetching: null, fetching: null,
canceling: null,
metaFetching: null, metaFetching: null,
inferenceStatusFetching: null, inferenceStatusFetching: null,
}, },
@ -432,7 +433,7 @@ export default function (state = defaultState, action: AnyAction): Notifications
}, },
}; };
} }
case ModelsActionTypes.INFER_MODEL_FAILED: { case ModelsActionTypes.START_INFERENCE_FAILED: {
const { taskID } = action.payload; const { taskID } = action.payload;
return { return {
...state, ...state,
@ -449,6 +450,23 @@ export default function (state = defaultState, action: AnyAction): Notifications
}, },
}; };
} }
case ModelsActionTypes.CANCEL_INFERENCE_FAILED: {
const { taskID } = action.payload;
return {
...state,
errors: {
...state.errors,
models: {
...state.errors.models,
canceling: {
message: 'Could not cancel model inference for the '
+ `<a href="/tasks/${taskID}" target="_blank">task ${taskID}</a>`,
reason: action.payload.error.toString(),
},
},
},
};
}
case AnnotationActionTypes.GET_JOB_FAILED: { case AnnotationActionTypes.GET_JOB_FAILED: {
return { return {
...state, ...state,

Loading…
Cancel
Save