Merge branch 'develop' into bs/fixed_issue_2516

main
Boris Sekachev 5 years ago committed by GitHub
commit 7ebc8d02df
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -11,6 +11,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Manual review pipeline: issues/comments/workspace (<https://github.com/openvinotoolkit/cvat/pull/2357>) - Manual review pipeline: issues/comments/workspace (<https://github.com/openvinotoolkit/cvat/pull/2357>)
- Added basic projects implementation (<https://github.com/openvinotoolkit/cvat/pull/2255>) - Added basic projects implementation (<https://github.com/openvinotoolkit/cvat/pull/2255>)
- Added documentation on how to mount cloud starage(AWS S3 bucket, Azure container, Google Drive) as FUSE (<https://github.com/openvinotoolkit/cvat/pull/2377>)
- Added ability to work with share files without copying inside (<https://github.com/openvinotoolkit/cvat/pull/2377>)
### Changed ### Changed
@ -37,6 +39,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Projects view layout fix (<https://github.com/openvinotoolkit/cvat/pull/2503>) - Projects view layout fix (<https://github.com/openvinotoolkit/cvat/pull/2503>)
- Fixed the tasks view (infinite loading) when it is impossible to get a preview of the task (<https://github.com/openvinotoolkit/cvat/pull/2504>) - Fixed the tasks view (infinite loading) when it is impossible to get a preview of the task (<https://github.com/openvinotoolkit/cvat/pull/2504>)
- Empty frames navigation (<https://github.com/openvinotoolkit/cvat/pull/2505>) - Empty frames navigation (<https://github.com/openvinotoolkit/cvat/pull/2505>)
- Disabled position editing in AAM (<https://github.com/openvinotoolkit/cvat/pull/2506>)
- TypeError: Cannot read property 'toString' of undefined (<https://github.com/openvinotoolkit/cvat/pull/2517>) - TypeError: Cannot read property 'toString' of undefined (<https://github.com/openvinotoolkit/cvat/pull/2517>)
### Security ### Security

@ -188,6 +188,7 @@ Standard JS events are used.
- canvas.roiselected => {points: number[]} - canvas.roiselected => {points: number[]}
- canvas.resizeshape => {id: number} - canvas.resizeshape => {id: number}
- canvas.contextmenu => { mouseEvent: MouseEvent, objectState: ObjectState, pointID: number } - canvas.contextmenu => { mouseEvent: MouseEvent, objectState: ObjectState, pointID: number }
- canvas.error => { exception: Error }
``` ```
### WEB ### WEB

@ -1,6 +1,6 @@
{ {
"name": "cvat-canvas", "name": "cvat-canvas",
"version": "2.2.0", "version": "2.2.1",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {

@ -1,6 +1,6 @@
{ {
"name": "cvat-canvas", "name": "cvat-canvas",
"version": "2.2.0", "version": "2.2.1",
"description": "Part of Computer Vision Annotation Tool which presents its canvas library", "description": "Part of Computer Vision Annotation Tool which presents its canvas library",
"main": "src/canvas.ts", "main": "src/canvas.ts",
"scripts": { "scripts": {

@ -133,6 +133,7 @@ export enum UpdateReasons {
DRAG_CANVAS = 'drag_canvas', DRAG_CANVAS = 'drag_canvas',
ZOOM_CANVAS = 'zoom_canvas', ZOOM_CANVAS = 'zoom_canvas',
CONFIG_UPDATED = 'config_updated', CONFIG_UPDATED = 'config_updated',
DATA_FAILED = 'data_failed',
} }
export enum Mode { export enum Mode {
@ -168,6 +169,7 @@ export interface CanvasModel {
readonly selected: any; readonly selected: any;
geometry: Geometry; geometry: Geometry;
mode: Mode; mode: Mode;
exception: Error | null;
zoom(x: number, y: number, direction: number): void; zoom(x: number, y: number, direction: number): void;
move(topOffset: number, leftOffset: number): void; move(topOffset: number, leftOffset: number): void;
@ -224,6 +226,7 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel {
splitData: SplitData; splitData: SplitData;
selected: any; selected: any;
mode: Mode; mode: Mode;
exception: Error | null;
}; };
public constructor() { public constructor() {
@ -284,6 +287,7 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel {
}, },
selected: null, selected: null,
mode: Mode.IDLE, mode: Mode.IDLE,
exception: null,
}; };
} }
@ -411,6 +415,8 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel {
this.notify(UpdateReasons.OBJECTS_UPDATED); this.notify(UpdateReasons.OBJECTS_UPDATED);
}) })
.catch((exception: any): void => { .catch((exception: any): void => {
this.data.exception = exception;
this.notify(UpdateReasons.DATA_FAILED);
throw exception; throw exception;
}); });
} }
@ -743,4 +749,7 @@ export class CanvasModelImpl extends MasterImpl implements CanvasModel {
public get mode(): Mode { public get mode(): Mode {
return this.data.mode; return this.data.mode;
} }
public get exception(): Error {
return this.data.exception;
}
} }

@ -1342,6 +1342,13 @@ export class CanvasViewImpl implements CanvasView, Listener {
} }
this.mode = Mode.IDLE; this.mode = Mode.IDLE;
this.canvas.style.cursor = ''; this.canvas.style.cursor = '';
} else if (reason === UpdateReasons.DATA_FAILED) {
const event: CustomEvent = new CustomEvent('canvas.error', {
detail: {
exception: model.exception,
},
});
this.canvas.dispatchEvent(event);
} }
if (model.imageBitmap && [UpdateReasons.IMAGE_CHANGED, UpdateReasons.OBJECTS_UPDATED].includes(reason)) { if (model.imageBitmap && [UpdateReasons.IMAGE_CHANGED, UpdateReasons.OBJECTS_UPDATED].includes(reason)) {

@ -20,7 +20,9 @@ onmessage = (e) => {
.catch((error) => { .catch((error) => {
postMessage({ postMessage({
id: e.data.id, id: e.data.id,
error, error: error,
status: error.response.status,
responseData: error.response.data,
isSuccess: false, isSuccess: false,
}); });
}); });

@ -31,7 +31,13 @@
if (e.data.isSuccess) { if (e.data.isSuccess) {
requests[e.data.id].resolve(e.data.responseData); requests[e.data.id].resolve(e.data.responseData);
} else { } else {
requests[e.data.id].reject(e.data.error); requests[e.data.id].reject({
error: e.data.error,
response: {
status: e.data.status,
data: e.data.responseData,
},
});
} }
delete requests[e.data.id]; delete requests[e.data.id];
@ -725,7 +731,14 @@
}, },
); );
} catch (errorData) { } catch (errorData) {
throw generateError(errorData); throw generateError({
...errorData,
message: '',
response: {
...errorData.response,
data: String.fromCharCode.apply(null, new Uint8Array(errorData.response.data)),
},
});
} }
return response; return response;

@ -973,6 +973,7 @@
data_original_chunk_type: undefined, data_original_chunk_type: undefined,
use_zip_chunks: undefined, use_zip_chunks: undefined,
use_cache: undefined, use_cache: undefined,
copy_data: undefined,
}; };
let updatedFields = { let updatedFields = {
@ -1239,6 +1240,22 @@
data.use_cache = useCache; data.use_cache = useCache;
}, },
}, },
/**
* @name copyData
* @type {boolean}
* @memberof module:API.cvat.classes.Task
* @instance
* @throws {module:API.cvat.exceptions.ArgumentError}
*/
copyData: {
get: () => data.copy_data,
set: (copyData) => {
if (typeof copyData !== 'boolean') {
throw new ArgumentError('Value must be a boolean');
}
data.copy_data = copyData;
},
},
/** /**
* After task has been created value can be appended only. * After task has been created value can be appended only.
* @name labels * @name labels
@ -1908,6 +1925,9 @@
if (typeof this.dataChunkSize !== 'undefined') { if (typeof this.dataChunkSize !== 'undefined') {
taskDataSpec.chunk_size = this.dataChunkSize; taskDataSpec.chunk_size = this.dataChunkSize;
} }
if (typeof this.copyData !== 'undefined') {
taskDataSpec.copy_data = this.copyData;
}
const task = await serverProxy.tasks.createTask(taskSpec, taskDataSpec, onUpdate); const task = await serverProxy.tasks.createTask(taskSpec, taskDataSpec, onUpdate);
return new Task(task); return new Task(task);

@ -185,6 +185,7 @@ export enum AnnotationActionTypes {
SAVE_LOGS_FAILED = 'SAVE_LOGS_FAILED', SAVE_LOGS_FAILED = 'SAVE_LOGS_FAILED',
INTERACT_WITH_CANVAS = 'INTERACT_WITH_CANVAS', INTERACT_WITH_CANVAS = 'INTERACT_WITH_CANVAS',
SET_AI_TOOLS_REF = 'SET_AI_TOOLS_REF', SET_AI_TOOLS_REF = 'SET_AI_TOOLS_REF',
GET_DATA_FAILED = 'GET_DATA_FAILED',
SWITCH_REQUEST_REVIEW_DIALOG = 'SWITCH_REQUEST_REVIEW_DIALOG', SWITCH_REQUEST_REVIEW_DIALOG = 'SWITCH_REQUEST_REVIEW_DIALOG',
SWITCH_SUBMIT_REVIEW_DIALOG = 'SWITCH_SUBMIT_REVIEW_DIALOG', SWITCH_SUBMIT_REVIEW_DIALOG = 'SWITCH_SUBMIT_REVIEW_DIALOG',
SET_FORCE_EXIT_ANNOTATION_PAGE_FLAG = 'SET_FORCE_EXIT_ANNOTATION_PAGE_FLAG', SET_FORCE_EXIT_ANNOTATION_PAGE_FLAG = 'SET_FORCE_EXIT_ANNOTATION_PAGE_FLAG',
@ -218,6 +219,15 @@ export function changeWorkspace(workspace: Workspace): AnyAction {
}; };
} }
export function getDataFailed(error: any): AnyAction {
return {
type: AnnotationActionTypes.GET_DATA_FAILED,
payload: {
error,
},
};
}
export function addZLayer(): AnyAction { export function addZLayer(): AnyAction {
return { return {
type: AnnotationActionTypes.ADD_Z_LAYER, type: AnnotationActionTypes.ADD_Z_LAYER,
@ -913,7 +923,16 @@ export function getJobAsync(tid: number, jid: number, initialFrame: number, init
const frameData = await job.frames.get(frameNumber); const frameData = await job.frames.get(frameNumber);
// call first getting of frame data before rendering interface // call first getting of frame data before rendering interface
// to load and decode first chunk // to load and decode first chunk
await frameData.data(); try {
await frameData.data();
} catch (error) {
dispatch({
type: AnnotationActionTypes.GET_DATA_FAILED,
payload: {
error,
},
});
}
const states = await job.annotations.get(frameNumber, showAllInterpolationTracks, filters); const states = await job.annotations.get(frameNumber, showAllInterpolationTracks, filters);
const issues = await job.issues(); const issues = await job.issues();
const reviews = await job.reviews(); const reviews = await job.reviews();

@ -394,6 +394,9 @@ export function createTaskAsync(data: any): ThunkAction<Promise<void>, {}, {}, A
if (data.advanced.dataChunkSize) { if (data.advanced.dataChunkSize) {
description.data_chunk_size = data.advanced.dataChunkSize; description.data_chunk_size = data.advanced.dataChunkSize;
} }
if (data.advanced.copyData) {
description.copy_data = data.advanced.copyData;
}
const taskInstance = new cvat.classes.Task(description); const taskInstance = new cvat.classes.Task(description);
taskInstance.clientFiles = data.files.local; taskInstance.clientFiles = data.files.local;

@ -90,6 +90,7 @@ interface Props {
onSwitchGrid(enabled: boolean): void; onSwitchGrid(enabled: boolean): void;
onSwitchAutomaticBordering(enabled: boolean): void; onSwitchAutomaticBordering(enabled: boolean): void;
onFetchAnnotation(): void; onFetchAnnotation(): void;
onGetDataFailed(error: any): void;
onStartIssue(position: number[]): void; onStartIssue(position: number[]): void;
} }
@ -108,7 +109,7 @@ export default class CanvasWrapperComponent extends React.PureComponent<Props> {
autoborders: automaticBordering, autoborders: automaticBordering,
undefinedAttrValue: consts.UNDEFINED_ATTRIBUTE_VALUE, undefinedAttrValue: consts.UNDEFINED_ATTRIBUTE_VALUE,
displayAllText: showObjectsTextAlways, displayAllText: showObjectsTextAlways,
forceDisableEditing: workspace === Workspace.REVIEW_WORKSPACE, forceDisableEditing: [Workspace.ATTRIBUTE_ANNOTATION, Workspace.REVIEW_WORKSPACE].includes(workspace),
}); });
this.initialSetup(); this.initialSetup();
@ -259,11 +260,11 @@ export default class CanvasWrapperComponent extends React.PureComponent<Props> {
} }
if (prevProps.workspace !== workspace) { if (prevProps.workspace !== workspace) {
if (workspace === Workspace.REVIEW_WORKSPACE) { if ([Workspace.ATTRIBUTE_ANNOTATION, Workspace.REVIEW_WORKSPACE].includes(workspace)) {
canvasInstance.configure({ canvasInstance.configure({
forceDisableEditing: true, forceDisableEditing: true,
}); });
} else if (prevProps.workspace === Workspace.REVIEW_WORKSPACE) { } else if ([Workspace.ATTRIBUTE_ANNOTATION, Workspace.REVIEW_WORKSPACE].includes(prevProps.workspace)) {
canvasInstance.configure({ canvasInstance.configure({
forceDisableEditing: false, forceDisableEditing: false,
}); });
@ -322,10 +323,17 @@ export default class CanvasWrapperComponent extends React.PureComponent<Props> {
canvasInstance.html().removeEventListener('canvas.splitted', this.onCanvasTrackSplitted); canvasInstance.html().removeEventListener('canvas.splitted', this.onCanvasTrackSplitted);
canvasInstance.html().removeEventListener('canvas.contextmenu', this.onCanvasPointContextMenu); canvasInstance.html().removeEventListener('canvas.contextmenu', this.onCanvasPointContextMenu);
canvasInstance.html().removeEventListener('canvas.error', this.onCanvasErrorOccurrence);
window.removeEventListener('resize', this.fitCanvas); window.removeEventListener('resize', this.fitCanvas);
} }
private onCanvasErrorOccurrence = (event: any): void => {
const { exception } = event.detail;
const { onGetDataFailed } = this.props;
onGetDataFailed(exception);
};
private onCanvasShapeDrawn = (event: any): void => { private onCanvasShapeDrawn = (event: any): void => {
const { const {
jobInstance, activeLabelID, activeObjectType, frame, onShapeDrawn, onCreateAnnotations, jobInstance, activeLabelID, activeObjectType, frame, onShapeDrawn, onCreateAnnotations,
@ -745,6 +753,7 @@ export default class CanvasWrapperComponent extends React.PureComponent<Props> {
canvasInstance.html().addEventListener('canvas.splitted', this.onCanvasTrackSplitted); canvasInstance.html().addEventListener('canvas.splitted', this.onCanvasTrackSplitted);
canvasInstance.html().addEventListener('canvas.contextmenu', this.onCanvasPointContextMenu); canvasInstance.html().addEventListener('canvas.contextmenu', this.onCanvasPointContextMenu);
canvasInstance.html().addEventListener('canvas.error', this.onCanvasErrorOccurrence);
} }
public render(): JSX.Element { public render(): JSX.Element {

@ -26,11 +26,13 @@ export interface AdvancedConfiguration {
useZipChunks: boolean; useZipChunks: boolean;
dataChunkSize?: number; dataChunkSize?: number;
useCache: boolean; useCache: boolean;
copyData?: boolean;
} }
type Props = FormComponentProps & { type Props = FormComponentProps & {
onSubmit(values: AdvancedConfiguration): void; onSubmit(values: AdvancedConfiguration): void;
installedGit: boolean; installedGit: boolean;
activeFileManagerTab: string;
}; };
function isPositiveInteger(_: any, value: any, callback: any): void { function isPositiveInteger(_: any, value: any, callback: any): void {
@ -114,6 +116,26 @@ class AdvancedConfigurationForm extends React.PureComponent<Props> {
form.resetFields(); form.resetFields();
} }
renderCopyDataChechbox(): JSX.Element {
const { form } = this.props;
return (
<Row>
<Col>
<Form.Item help='If you have a low data transfer rate over the network you can copy data into CVAT to speed up work'>
{form.getFieldDecorator('copyData', {
initialValue: false,
valuePropName: 'checked',
})(
<Checkbox>
<Text className='cvat-text-color'>Copy data into CVAT</Text>
</Checkbox>,
)}
</Form.Item>
</Col>
</Row>
);
}
private renderImageQuality(): JSX.Element { private renderImageQuality(): JSX.Element {
const { form } = this.props; const { form } = this.props;
@ -386,10 +408,12 @@ class AdvancedConfigurationForm extends React.PureComponent<Props> {
} }
public render(): JSX.Element { public render(): JSX.Element {
const { installedGit } = this.props; const { installedGit, activeFileManagerTab } = this.props;
return ( return (
<Form> <Form>
{activeFileManagerTab === 'share' ? this.renderCopyDataChechbox() : null}
<Row> <Row>
<Col>{this.renderUzeZipChunks()}</Col> <Col>{this.renderUzeZipChunks()}</Col>
</Row> </Row>

@ -25,6 +25,7 @@ export interface CreateTaskData {
advanced: AdvancedConfiguration; advanced: AdvancedConfiguration;
labels: any[]; labels: any[];
files: Files; files: Files;
activeFileManagerTab: string;
} }
interface Props { interface Props {
@ -53,6 +54,7 @@ const defaultState = {
share: [], share: [],
remote: [], remote: [],
}, },
activeFileManagerTab: 'local',
}; };
class CreateTaskContent extends React.PureComponent<Props & RouteComponentProps, State> { class CreateTaskContent extends React.PureComponent<Props & RouteComponentProps, State> {
@ -132,6 +134,14 @@ class CreateTaskContent extends React.PureComponent<Props & RouteComponentProps,
}); });
}; };
private changeFileManagerTab = (key: string): void => {
const values = this.state;
this.setState({
...values,
activeFileManagerTab: key
});
};
private handleSubmitClick = (): void => { private handleSubmitClick = (): void => {
if (!this.validateLabelsOrProject()) { if (!this.validateLabelsOrProject()) {
notification.error({ notification.error({
@ -238,6 +248,7 @@ class CreateTaskContent extends React.PureComponent<Props & RouteComponentProps,
<Text type='danger'>* </Text> <Text type='danger'>* </Text>
<Text className='cvat-text-color'>Select files:</Text> <Text className='cvat-text-color'>Select files:</Text>
<ConnectedFileManager <ConnectedFileManager
onChangeActiveKey={this.changeFileManagerTab}
ref={(container: any): void => { ref={(container: any): void => {
this.fileManagerContainer = container; this.fileManagerContainer = container;
}} }}
@ -255,6 +266,7 @@ class CreateTaskContent extends React.PureComponent<Props & RouteComponentProps,
<Collapse.Panel key='1' header={<Text className='cvat-title'>Advanced configuration</Text>}> <Collapse.Panel key='1' header={<Text className='cvat-title'>Advanced configuration</Text>}>
<AdvancedConfigurationForm <AdvancedConfigurationForm
installedGit={installedGit} installedGit={installedGit}
activeFileManagerTab={this.state.activeFileManagerTab}
wrappedComponentRef={(component: any): void => { wrappedComponentRef={(component: any): void => {
this.advancedConfigurationComponent = component; this.advancedConfigurationComponent = component;
}} }}

@ -31,6 +31,7 @@ interface Props {
withRemote: boolean; withRemote: boolean;
treeData: TreeNodeNormal[]; treeData: TreeNodeNormal[];
onLoadData: (key: string, success: () => void, failure: () => void) => void; onLoadData: (key: string, success: () => void, failure: () => void) => void;
onChangeActiveKey(key: string): void;
} }
export default class FileManager extends React.PureComponent<Props, State> { export default class FileManager extends React.PureComponent<Props, State> {
@ -215,7 +216,7 @@ export default class FileManager extends React.PureComponent<Props, State> {
} }
public render(): JSX.Element { public render(): JSX.Element {
const { withRemote } = this.props; const { withRemote, onChangeActiveKey } = this.props;
const { active } = this.state; const { active } = this.state;
return ( return (
@ -224,11 +225,12 @@ export default class FileManager extends React.PureComponent<Props, State> {
type='card' type='card'
activeKey={active} activeKey={active}
tabBarGutter={5} tabBarGutter={5}
onChange={(activeKey: string): void => onChange={(activeKey: string): void => {
onChangeActiveKey(activeKey);
this.setState({ this.setState({
active: activeKey as any, active: activeKey as any,
}) });
} }}
> >
{this.renderLocalSelector()} {this.renderLocalSelector()}
{this.renderShareSelector()} {this.renderShareSelector()}

@ -27,6 +27,7 @@ import {
addZLayer, addZLayer,
switchZLayer, switchZLayer,
fetchAnnotationsAsync, fetchAnnotationsAsync,
getDataFailed,
} from 'actions/annotation-actions'; } from 'actions/annotation-actions';
import { import {
switchGrid, switchGrid,
@ -121,6 +122,7 @@ interface DispatchToProps {
onSwitchGrid(enabled: boolean): void; onSwitchGrid(enabled: boolean): void;
onSwitchAutomaticBordering(enabled: boolean): void; onSwitchAutomaticBordering(enabled: boolean): void;
onFetchAnnotation(): void; onFetchAnnotation(): void;
onGetDataFailed(error: any): void;
onStartIssue(position: number[]): void; onStartIssue(position: number[]): void;
} }
@ -308,6 +310,9 @@ function mapDispatchToProps(dispatch: any): DispatchToProps {
onFetchAnnotation(): void { onFetchAnnotation(): void {
dispatch(fetchAnnotationsAsync()); dispatch(fetchAnnotationsAsync());
}, },
onGetDataFailed(error: any): void {
dispatch(getDataFailed(error));
},
onStartIssue(position: number[]): void { onStartIssue(position: number[]): void {
dispatch(reviewActions.startIssue(position)); dispatch(reviewActions.startIssue(position));
}, },

@ -14,6 +14,7 @@ import { ShareItem, CombinedState } from 'reducers/interfaces';
interface OwnProps { interface OwnProps {
ref: any; ref: any;
withRemote: boolean; withRemote: boolean;
onChangeActiveKey(key: string): void;
} }
interface StateToProps { interface StateToProps {
@ -68,12 +69,13 @@ export class FileManagerContainer extends React.PureComponent<Props> {
} }
public render(): JSX.Element { public render(): JSX.Element {
const { treeData, getTreeData, withRemote } = this.props; const { treeData, getTreeData, withRemote, onChangeActiveKey } = this.props;
return ( return (
<FileManagerComponent <FileManagerComponent
treeData={treeData} treeData={treeData}
onLoadData={getTreeData} onLoadData={getTreeData}
onChangeActiveKey={onChangeActiveKey}
withRemote={withRemote} withRemote={withRemote}
ref={(component): void => { ref={(component): void => {
this.managerComponentRef = component; this.managerComponentRef = component;

@ -192,6 +192,18 @@ export default (state = defaultState, action: AnyAction): AnnotationState => {
}, },
}; };
} }
case AnnotationActionTypes.GET_DATA_FAILED: {
return {
...state,
player: {
...state.player,
frame: {
...state.player.frame,
fetching: false,
},
},
}
}
case AnnotationActionTypes.CHANGE_FRAME: { case AnnotationActionTypes.CHANGE_FRAME: {
return { return {
...state, ...state,

@ -1076,6 +1076,21 @@ export default function (state = defaultState, action: AnyAction): Notifications
}, },
}; };
} }
case AnnotationActionTypes.GET_DATA_FAILED: {
return {
...state,
errors: {
...state.errors,
annotation: {
...state.errors.annotation,
jobFetching: {
message: 'Could not fetch frame data from the server',
reason: action.payload.error,
},
},
},
};
}
case BoundariesActionTypes.RESET_AFTER_ERROR: case BoundariesActionTypes.RESET_AFTER_ERROR:
case AuthActionTypes.LOGOUT_SUCCESS: { case AuthActionTypes.LOGOUT_SUCCESS: {
return { ...defaultState }; return { ...defaultState };

@ -373,6 +373,9 @@ You can change the share device path to your actual share. For user convenience
we have defined the environment variable \$CVAT_SHARE_URL. This variable we have defined the environment variable \$CVAT_SHARE_URL. This variable
contains a text (url for example) which is shown in the client-share browser. contains a text (url for example) which is shown in the client-share browser.
You can [mount](/cvat/apps/documentation/mounting_cloud_storages.md)
your cloud storage as a FUSE and use it later as a share.
### Email verification ### Email verification
You can enable email verification for newly registered users. You can enable email verification for newly registered users.

@ -0,0 +1,385 @@
- [Mounting cloud storage](#mounting-cloud-storage)
- [AWS S3 bucket](#aws-s3-bucket-as-filesystem)
- [Ubuntu 20.04](#aws_s3_ubuntu_2004)
- [Mount](#aws_s3_mount)
- [Automatically mount](#aws_s3_automatically_mount)
- [Using /etc/fstab](#aws_s3_using_fstab)
- [Using systemd](#aws_s3_using_systemd)
- [Check](#aws_s3_check)
- [Unmount](#aws_s3_unmount_filesystem)
- [Azure container](#microsoft-azure-container-as-filesystem)
- [Ubuntu 20.04](#azure_ubuntu_2004)
- [Mount](#azure_mount)
- [Automatically mount](#azure_automatically_mount)
- [Using /etc/fstab](#azure_using_fstab)
- [Using systemd](#azure_using_systemd)
- [Check](#azure_check)
- [Unmount](#azure_unmount_filesystem)
- [Google Drive](#google-drive-as-filesystem)
- [Ubuntu 20.04](#google_drive_ubuntu_2004)
- [Mount](#google_drive_mount)
- [Automatically mount](#google_drive_automatically_mount)
- [Using /etc/fstab](#google_drive_using_fstab)
- [Using systemd](#google_drive_using_systemd)
- [Check](#google_drive_check)
- [Unmount](#google_drive_unmount_filesystem)
# Mounting cloud storage
## AWS S3 bucket as filesystem
### <a name="aws_s3_ubuntu_2004">Ubuntu 20.04</a>
#### <a name="aws_s3_mount">Mount</a>
1. Install s3fs:
```bash
sudo apt install s3fs
```
1. Enter your credentials in a file `${HOME}/.passwd-s3fs` and set owner-only permissions:
```bash
echo ACCESS_KEY_ID:SECRET_ACCESS_KEY > ${HOME}/.passwd-s3fs
chmod 600 ${HOME}/.passwd-s3fs
```
1. Uncomment `user_allow_other` in the `/etc/fuse.conf` file: `sudo nano /etc/fuse.conf`
1. Run s3fs, replace `bucket_name`, `mount_point`:
```bash
s3fs <bucket_name> <mount_point> -o allow_other
```
For more details see [here](https://github.com/s3fs-fuse/s3fs-fuse).
#### <a name="aws_s3_automatically_mount">Automatically mount</a>
Follow the first 3 mounting steps above.
##### <a name="aws_s3_using_fstab">Using fstab</a>
1. Create a bash script named aws_s3_fuse(e.g in /usr/bin, as root) with this content
(replace `user_name` on whose behalf the disk will be mounted, `backet_name`, `mount_point`, `/path/to/.passwd-s3fs`):
```bash
#!/bin/bash
sudo -u <user_name> s3fs <backet_name> <mount_point> -o passwd_file=/path/to/.passwd-s3fs -o allow_other
exit 0
```
1. Give it the execution permission:
```bash
sudo chmod +x /usr/bin/aws_s3_fuse
```
1. Edit `/etc/fstab` adding a line like this, replace `mount_point`):
```bash
/absolute/path/to/aws_s3_fuse <mount_point> fuse allow_other,user,_netdev 0 0
```
##### <a name="aws_s3_using_systemd">Using systemd</a>
1. Create unit file `sudo nano /etc/systemd/system/s3fs.service`
(replace `user_name`, `bucket_name`, `mount_point`, `/path/to/.passwd-s3fs`):
```bash
[Unit]
Description=FUSE filesystem over AWS S3 bucket
After=network.target
[Service]
Environment="MOUNT_POINT=<mount_point>"
User=<user_name>
Group=<user_name>
ExecStart=s3fs <bucket_name> ${MOUNT_POINT} -o passwd_file=/path/to/.passwd-s3fs -o allow_other
ExecStop=fusermount -u ${MOUNT_POINT}
Restart=always
Type=forking
[Install]
WantedBy=multi-user.target
```
1. Update the system configurations, enable unit autorun when the system boots, mount the bucket:
```bash
sudo systemctl daemon-reload
sudo systemctl enable s3fs.service
sudo systemctl start s3fs.service
```
#### <a name="aws_s3_check">Check</a>
A file `/etc/mtab` contains records of currently mounted filesystems.
```bash
cat /etc/mtab | grep 's3fs'
```
#### <a name="aws_s3_unmount_filesystem">Unmount filesystem</a>
```bash
fusermount -u <mount_point>
```
If you used [systemd](#aws_s3_using_systemd) to mount a bucket:
```bash
sudo systemctl stop s3fs.service
sudo systemctl disable s3fs.service
```
## Microsoft Azure container as filesystem
### <a name="azure_ubuntu_2004">Ubuntu 20.04</a>
#### <a name="azure_mount">Mount</a>
1. Set up the Microsoft package repository.(More [here](https://docs.microsoft.com/en-us/windows-server/administration/Linux-Package-Repository-for-Microsoft-Software#configuring-the-repositories))
```bash
wget https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb
sudo dpkg -i packages-microsoft-prod.deb
sudo apt-get update
```
1. Install `blobfuse` and `fuse`:
```bash
sudo apt-get install blobfuse fuse
```
For more details see [here](https://github.com/Azure/azure-storage-fuse/wiki/1.-Installation)
1. Create enviroments(replace `account_name`, `account_key`, `mount_point`):
```bash
export AZURE_STORAGE_ACCOUNT=<account_name>
export AZURE_STORAGE_ACCESS_KEY=<account_key>
MOUNT_POINT=<mount_point>
```
1. Create a folder for cache:
```bash
sudo mkdir -p /mnt/blobfusetmp
```
1. Make sure the file must be owned by the user who mounts the container:
```bash
sudo chown <user> /mnt/blobfusetmp
```
1. Create the mount point, if it doesn't exists:
```bash
mkdir -p ${MOUNT_POINT}
```
1. Uncomment `user_allow_other` in the `/etc/fuse.conf` file: `sudo nano /etc/fuse.conf`
1. Mount container(replace `your_container`):
```bash
blobfuse ${MOUNT_POINT} --container-name=<your_container> --tmp-path=/mnt/blobfusetmp -o allow_other
```
#### <a name="azure_automatically_mount">Automatically mount</a>
Follow the first 7 mounting steps above.
##### <a name="azure_using_fstab">Using fstab</a>
1. Create configuration file `connection.cfg` with same content, change accountName,
select one from accountKey or sasToken and replace with your value:
```bash
accountName <account-name-here>
# Please provide either an account key or a SAS token, and delete the other line.
accountKey <account-key-here-delete-next-line>
#change authType to specify only 1
sasToken <shared-access-token-here-delete-previous-line>
authType <MSI/SAS/SPN/Key/empty>
containerName <insert-container-name-here>
```
1. Create a bash script named `azure_fuse`(e.g in /usr/bin, as root) with content below
(replace `user_name` on whose behalf the disk will be mounted, `mount_point`, `/path/to/blobfusetmp`,`/path/to/connection.cfg`):
```bash
#!/bin/bash
sudo -u <user_name> blobfuse <mount_point> --tmp-path=/path/to/blobfusetmp --config-file=/path/to/connection.cfg -o allow_other
exit 0
```
1. Give it the execution permission:
```bash
sudo chmod +x /usr/bin/azure_fuse
```
1. Edit `/etc/fstab` with the blobfuse script. Add the following line(replace paths):
```bash
/absolute/path/to/azure_fuse </path/to/desired/mountpoint> fuse allow_other,user,_netdev
```
##### <a name="azure_using_systemd">Using systemd</a>
1. Create unit file `sudo nano /etc/systemd/system/blobfuse.service`.
(replace `user_name`, `mount_point`, `container_name`,`/path/to/connection.cfg`):
```bash
[Unit]
Description=FUSE filesystem over Azure container
After=network.target
[Service]
Environment="MOUNT_POINT=<mount_point>"
User=<user_name>
Group=<user_name>
ExecStart=blobfuse ${MOUNT_POINT} --container-name=<container_name> --tmp-path=/mnt/blobfusetmp --config-file=/path/to/connection.cfg -o allow_other
ExecStop=fusermount -u ${MOUNT_POINT}
Restart=always
Type=forking
[Install]
WantedBy=multi-user.target
```
1. Update the system configurations, enable unit autorun when the system boots, mount the container:
```bash
sudo systemctl daemon-reload
sudo systemctl enable blobfuse.service
sudo systemctl start blobfuse.service
```
Or for more detail [see here](https://github.com/Azure/azure-storage-fuse/tree/master/systemd)
#### <a name="azure_check">Check</a>
A file `/etc/mtab` contains records of currently mounted filesystems.
```bash
cat /etc/mtab | grep 'blobfuse'
```
#### <a name="azure_unmount_filesystem">Unmount filesystem</a>
```bash
fusermount -u <mount_point>
```
If you used [systemd](#azure_using_systemd) to mount a container:
```bash
sudo systemctl stop blobfuse.service
sudo systemctl disable blobfuse.service
```
If you have any mounting problems, check out the [answers](https://github.com/Azure/azure-storage-fuse/wiki/3.-Troubleshoot-FAQ)
to common problems
## Google Drive as filesystem
### <a name="google_drive_ubuntu_2004">Ubuntu 20.04</a>
#### <a name="google_drive_mount">Mount</a>
To mount a google drive as a filesystem in user space(FUSE)
you can use [google-drive-ocamlfuse](https://github.com/astrada/google-drive-ocamlfuse)
To do this follow the instructions below:
1. Install google-drive-ocamlfuse:
```bash
sudo add-apt-repository ppa:alessandro-strada/ppa
sudo apt-get update
sudo apt-get install google-drive-ocamlfuse
```
1. Run `google-drive-ocamlfuse` without parameters:
```bash
google-drive-ocamlfuse
```
This command will create the default application directory (~/.gdfuse/default),
containing the configuration file config (see the [wiki](https://github.com/astrada/google-drive-ocamlfuse/wiki)
page for more details about configuration).
And it will start a web browser to obtain authorization to access your Google Drive.
This will let you modify default configuration before mounting the filesystem.
Then you can choose a local directory to mount your Google Drive (e.g.: ~/GoogleDrive).
1. Create the mount point, if it doesn't exist(replace mount_point):
```bash
mountpoint="<mount_point>"
mkdir -p $mountpoint
```
1. Uncomment `user_allow_other` in the `/etc/fuse.conf` file: `sudo nano /etc/fuse.conf`
1. Mount the filesystem:
```bash
google-drive-ocamlfuse -o allow_other $mountpoint
```
#### <a name="google_drive_automatically_mount">Automatically mount</a>
Follow the first 4 mounting steps above.
##### <a name="google_drive_using_fstab">Using fstab</a>
1. Create a bash script named gdfuse(e.g in /usr/bin, as root) with this content
(replace `user_name` on whose behalf the disk will be mounted, `label`, `mount_point`):
```bash
#!/bin/bash
sudo -u <user_name> google-drive-ocamlfuse -o allow_other -label <label> <mount_point>
exit 0
```
1. Give it the execution permission:
```bash
sudo chmod +x /usr/bin/gdfuse
```
1. Edit `/etc/fstab` adding a line like this, replace `mount_point`):
```bash
/absolute/path/to/gdfuse <mount_point> fuse allow_other,user,_netdev 0 0
```
For more details see [here](https://github.com/astrada/google-drive-ocamlfuse/wiki/Automounting)
##### <a name="google_drive_using_systemd">Using systemd</a>
1. Create unit file `sudo nano /etc/systemd/system/google-drive-ocamlfuse.service`.
(replace `user_name`, `label`(default `label=default`), `mount_point`):
```bash
[Unit]
Description=FUSE filesystem over Google Drive
After=network.target
[Service]
Environment="MOUNT_POINT=<mount_point>"
User=<user_name>
Group=<user_name>
ExecStart=google-drive-ocamlfuse -label <label> ${MOUNT_POINT}
ExecStop=fusermount -u ${MOUNT_POINT}
Restart=always
Type=forking
[Install]
WantedBy=multi-user.target
```
1. Update the system configurations, enable unit autorun when the system boots, mount the drive:
```bash
sudo systemctl daemon-reload
sudo systemctl enable google-drive-ocamlfuse.service
sudo systemctl start google-drive-ocamlfuse.service
```
For more details see [here](https://github.com/astrada/google-drive-ocamlfuse/wiki/Automounting)
#### <a name="google_drive_check">Check</a>
A file `/etc/mtab` contains records of currently mounted filesystems.
```bash
cat /etc/mtab | grep 'google-drive-ocamlfuse'
```
#### <a name="google_drive_unmount_filesystem">Unmount filesystem</a>
```bash
fusermount -u <mount_point>
```
If you used [systemd](#google_drive_using_systemd) to mount a drive:
```bash
sudo systemctl stop google-drive-ocamlfuse.service
sudo systemctl disable google-drive-ocamlfuse.service
```

@ -10,10 +10,9 @@ from django.conf import settings
from cvat.apps.engine.media_extractors import (Mpeg4ChunkWriter, from cvat.apps.engine.media_extractors import (Mpeg4ChunkWriter,
Mpeg4CompressedChunkWriter, ZipChunkWriter, ZipCompressedChunkWriter) Mpeg4CompressedChunkWriter, ZipChunkWriter, ZipCompressedChunkWriter)
from cvat.apps.engine.models import DataChoice from cvat.apps.engine.models import DataChoice, StorageChoice
from cvat.apps.engine.prepare import PrepareInfo from cvat.apps.engine.prepare import PrepareInfo
class CacheInteraction: class CacheInteraction:
def __init__(self): def __init__(self):
self._cache = Cache(settings.CACHE_ROOT) self._cache = Cache(settings.CACHE_ROOT)
@ -31,28 +30,33 @@ class CacheInteraction:
def prepare_chunk_buff(self, db_data, quality, chunk_number): def prepare_chunk_buff(self, db_data, quality, chunk_number):
from cvat.apps.engine.frame_provider import FrameProvider # TODO: remove circular dependency from cvat.apps.engine.frame_provider import FrameProvider # TODO: remove circular dependency
extractor_classes = { writer_classes = {
FrameProvider.Quality.COMPRESSED : Mpeg4CompressedChunkWriter if db_data.compressed_chunk_type == DataChoice.VIDEO else ZipCompressedChunkWriter, FrameProvider.Quality.COMPRESSED : Mpeg4CompressedChunkWriter if db_data.compressed_chunk_type == DataChoice.VIDEO else ZipCompressedChunkWriter,
FrameProvider.Quality.ORIGINAL : Mpeg4ChunkWriter if db_data.original_chunk_type == DataChoice.VIDEO else ZipChunkWriter, FrameProvider.Quality.ORIGINAL : Mpeg4ChunkWriter if db_data.original_chunk_type == DataChoice.VIDEO else ZipChunkWriter,
} }
image_quality = 100 if extractor_classes[quality] in [Mpeg4ChunkWriter, ZipChunkWriter] else db_data.image_quality image_quality = 100 if writer_classes[quality] in [Mpeg4ChunkWriter, ZipChunkWriter] else db_data.image_quality
mime_type = 'video/mp4' if extractor_classes[quality] in [Mpeg4ChunkWriter, Mpeg4CompressedChunkWriter] else 'application/zip' mime_type = 'video/mp4' if writer_classes[quality] in [Mpeg4ChunkWriter, Mpeg4CompressedChunkWriter] else 'application/zip'
extractor = extractor_classes[quality](image_quality) writer = writer_classes[quality](image_quality)
images = [] images = []
buff = BytesIO() buff = BytesIO()
upload_dir = {
StorageChoice.LOCAL: db_data.get_upload_dirname(),
StorageChoice.SHARE: settings.SHARE_ROOT
}[db_data.storage]
if os.path.exists(db_data.get_meta_path()): if os.path.exists(db_data.get_meta_path()):
source_path = os.path.join(db_data.get_upload_dirname(), db_data.video.path) source_path = os.path.join(upload_dir, db_data.video.path)
meta = PrepareInfo(source_path=source_path, meta_path=db_data.get_meta_path()) meta = PrepareInfo(source_path=source_path, meta_path=db_data.get_meta_path())
for frame in meta.decode_needed_frames(chunk_number, db_data): for frame in meta.decode_needed_frames(chunk_number, db_data):
images.append(frame) images.append(frame)
extractor.save_as_chunk([(image, source_path, None) for image in images], buff) writer.save_as_chunk([(image, source_path, None) for image in images], buff)
else: else:
with open(db_data.get_dummy_chunk_path(chunk_number), 'r') as dummy_file: with open(db_data.get_dummy_chunk_path(chunk_number), 'r') as dummy_file:
images = [os.path.join(db_data.get_upload_dirname(), line.strip()) for line in dummy_file] images = [os.path.join(upload_dir, line.strip()) for line in dummy_file]
extractor.save_as_chunk([(image, image, None) for image in images], buff) writer.save_as_chunk([(image, image, None) for image in images], buff)
buff.seek(0) buff.seek(0)
return buff, mime_type return buff, mime_type

@ -135,10 +135,12 @@ class DirectoryReader(ImageListReader):
class ArchiveReader(DirectoryReader): class ArchiveReader(DirectoryReader):
def __init__(self, source_path, step=1, start=0, stop=None): def __init__(self, source_path, step=1, start=0, stop=None):
self._archive_source = source_path[0] self._archive_source = source_path[0]
Archive(self._archive_source).extractall(os.path.dirname(source_path[0])) extract_dir = source_path[1] if len(source_path) > 1 else os.path.dirname(source_path[0])
os.remove(self._archive_source) Archive(self._archive_source).extractall(extract_dir)
if extract_dir == os.path.dirname(source_path[0]):
os.remove(self._archive_source)
super().__init__( super().__init__(
source_path=[os.path.dirname(source_path[0])], source_path=[extract_dir],
step=step, step=step,
start=start, start=start,
stop=stop, stop=stop,
@ -178,6 +180,7 @@ class PdfReader(ImageListReader):
class ZipReader(ImageListReader): class ZipReader(ImageListReader):
def __init__(self, source_path, step=1, start=0, stop=None): def __init__(self, source_path, step=1, start=0, stop=None):
self._zip_source = zipfile.ZipFile(source_path[0], mode='r') self._zip_source = zipfile.ZipFile(source_path[0], mode='r')
self.extract_dir = source_path[1] if len(source_path) > 1 else None
file_list = [f for f in self._zip_source.namelist() if get_mime(f) == 'image'] file_list = [f for f in self._zip_source.namelist() if get_mime(f) == 'image']
super().__init__(file_list, step, start, stop) super().__init__(file_list, step, start, stop)
@ -197,13 +200,15 @@ class ZipReader(ImageListReader):
def get_path(self, i): def get_path(self, i):
if self._zip_source.filename: if self._zip_source.filename:
return os.path.join(os.path.dirname(self._zip_source.filename), self._source_path[i]) return os.path.join(os.path.dirname(self._zip_source.filename), self._source_path[i]) \
if not self.extract_dir else os.path.join(self.extract_dir, self._source_path[i])
else: # necessary for mime_type definition else: # necessary for mime_type definition
return self._source_path[i] return self._source_path[i]
def extract(self): def extract(self):
self._zip_source.extractall(os.path.dirname(self._zip_source.filename)) self._zip_source.extractall(self.extract_dir if self.extract_dir else os.path.dirname(self._zip_source.filename))
os.remove(self._zip_source.filename) if not self.extract_dir:
os.remove(self._zip_source.filename)
class VideoReader(IMediaReader): class VideoReader(IMediaReader):
def __init__(self, source_path, step=1, start=0, stop=None): def __init__(self, source_path, step=1, start=0, stop=None):

@ -0,0 +1,19 @@
# Generated by Django 3.1.1 on 2020-12-02 06:47
import cvat.apps.engine.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('engine', '0034_auto_20201125_1426'),
]
operations = [
migrations.AddField(
model_name='data',
name='storage',
field=models.CharField(choices=[('local', 'LOCAL'), ('share', 'SHARE')], default=cvat.apps.engine.models.StorageChoice['LOCAL'], max_length=15),
),
]

@ -54,6 +54,18 @@ class StorageMethodChoice(str, Enum):
def __str__(self): def __str__(self):
return self.value return self.value
class StorageChoice(str, Enum):
#AWS_S3 = 'aws_s3_bucket'
LOCAL = 'local'
SHARE = 'share'
@classmethod
def choices(cls):
return tuple((x.value, x.name) for x in cls)
def __str__(self):
return self.value
class Data(models.Model): class Data(models.Model):
chunk_size = models.PositiveIntegerField(null=True) chunk_size = models.PositiveIntegerField(null=True)
size = models.PositiveIntegerField(default=0) size = models.PositiveIntegerField(default=0)
@ -66,6 +78,7 @@ class Data(models.Model):
original_chunk_type = models.CharField(max_length=32, choices=DataChoice.choices(), original_chunk_type = models.CharField(max_length=32, choices=DataChoice.choices(),
default=DataChoice.IMAGESET) default=DataChoice.IMAGESET)
storage_method = models.CharField(max_length=15, choices=StorageMethodChoice.choices(), default=StorageMethodChoice.FILE_SYSTEM) storage_method = models.CharField(max_length=15, choices=StorageMethodChoice.choices(), default=StorageMethodChoice.FILE_SYSTEM)
storage = models.CharField(max_length=15, choices=StorageChoice.choices(), default=StorageChoice.LOCAL)
class Meta: class Meta:
default_permissions = () default_permissions = ()

@ -180,8 +180,10 @@ class PrepareInfo(WorkWithVideo):
class UploadedMeta(PrepareInfo): class UploadedMeta(PrepareInfo):
def __init__(self, **kwargs): def __init__(self, **kwargs):
super().__init__(**kwargs) super().__init__(**kwargs)
uploaded_meta = kwargs.get('uploaded_meta')
assert uploaded_meta is not None , 'No uploaded meta path'
with open(self.meta_path, 'r') as meta_file: with open(uploaded_meta, 'r') as meta_file:
lines = meta_file.read().strip().split('\n') lines = meta_file.read().strip().split('\n')
self.frames = int(lines.pop()) self.frames = int(lines.pop())

@ -254,12 +254,13 @@ class DataSerializer(serializers.ModelSerializer):
server_files = ServerFileSerializer(many=True, default=[]) server_files = ServerFileSerializer(many=True, default=[])
remote_files = RemoteFileSerializer(many=True, default=[]) remote_files = RemoteFileSerializer(many=True, default=[])
use_cache = serializers.BooleanField(default=False) use_cache = serializers.BooleanField(default=False)
copy_data = serializers.BooleanField(default=False)
class Meta: class Meta:
model = models.Data model = models.Data
fields = ('chunk_size', 'size', 'image_quality', 'start_frame', 'stop_frame', 'frame_filter', fields = ('chunk_size', 'size', 'image_quality', 'start_frame', 'stop_frame', 'frame_filter',
'compressed_chunk_type', 'original_chunk_type', 'client_files', 'server_files', 'remote_files', 'use_zip_chunks', 'compressed_chunk_type', 'original_chunk_type', 'client_files', 'server_files', 'remote_files', 'use_zip_chunks',
'use_cache') 'use_cache', 'copy_data')
# pylint: disable=no-self-use # pylint: disable=no-self-use
def validate_frame_filter(self, value): def validate_frame_filter(self, value):
@ -288,6 +289,7 @@ class DataSerializer(serializers.ModelSerializer):
remote_files = validated_data.pop('remote_files') remote_files = validated_data.pop('remote_files')
validated_data.pop('use_zip_chunks') validated_data.pop('use_zip_chunks')
validated_data.pop('use_cache') validated_data.pop('use_cache')
validated_data.pop('copy_data')
db_data = models.Data.objects.create(**validated_data) db_data = models.Data.objects.create(**validated_data)
data_path = db_data.get_data_dirname() data_path = db_data.get_data_dirname()

@ -15,7 +15,7 @@ from urllib import parse as urlparse
from urllib import request as urlrequest from urllib import request as urlrequest
from cvat.apps.engine.media_extractors import get_mime, MEDIA_TYPES, Mpeg4ChunkWriter, ZipChunkWriter, Mpeg4CompressedChunkWriter, ZipCompressedChunkWriter from cvat.apps.engine.media_extractors import get_mime, MEDIA_TYPES, Mpeg4ChunkWriter, ZipChunkWriter, Mpeg4CompressedChunkWriter, ZipCompressedChunkWriter
from cvat.apps.engine.models import DataChoice, StorageMethodChoice from cvat.apps.engine.models import DataChoice, StorageMethodChoice, StorageChoice
from cvat.apps.engine.utils import av_scan_paths from cvat.apps.engine.utils import av_scan_paths
from cvat.apps.engine.prepare import prepare_meta from cvat.apps.engine.prepare import prepare_meta
@ -232,7 +232,10 @@ def _create_thread(tid, data):
"File with meta information can be uploaded if 'Use cache' option is also selected" "File with meta information can be uploaded if 'Use cache' option is also selected"
if data['server_files']: if data['server_files']:
_copy_data_from_share(data['server_files'], upload_dir) if db_data.storage == StorageChoice.LOCAL:
_copy_data_from_share(data['server_files'], upload_dir)
else:
upload_dir = settings.SHARE_ROOT
av_scan_paths(upload_dir) av_scan_paths(upload_dir)
@ -247,8 +250,11 @@ def _create_thread(tid, data):
if media_files: if media_files:
if extractor is not None: if extractor is not None:
raise Exception('Combined data types are not supported') raise Exception('Combined data types are not supported')
source_paths=[os.path.join(upload_dir, f) for f in media_files]
if media_type in ('archive', 'zip') and db_data.storage == StorageChoice.SHARE:
source_paths.append(db_data.get_upload_dirname())
extractor = MEDIA_TYPES[media_type]['extractor']( extractor = MEDIA_TYPES[media_type]['extractor'](
source_path=[os.path.join(upload_dir, f) for f in media_files], source_path=source_paths,
step=db_data.get_frame_step(), step=db_data.get_frame_step(),
start=db_data.start_frame, start=db_data.start_frame,
stop=data['stop_frame'], stop=data['stop_frame'],
@ -303,13 +309,9 @@ def _create_thread(tid, data):
if meta_info_file: if meta_info_file:
try: try:
from cvat.apps.engine.prepare import UploadedMeta from cvat.apps.engine.prepare import UploadedMeta
if os.path.split(meta_info_file[0])[0]:
os.replace(
os.path.join(upload_dir, meta_info_file[0]),
db_data.get_meta_path()
)
meta_info = UploadedMeta(source_path=os.path.join(upload_dir, media_files[0]), meta_info = UploadedMeta(source_path=os.path.join(upload_dir, media_files[0]),
meta_path=db_data.get_meta_path()) meta_path=db_data.get_meta_path(),
uploaded_meta=os.path.join(upload_dir, meta_info_file[0]))
meta_info.check_seek_key_frames() meta_info.check_seek_key_frames()
meta_info.check_frames_numbers() meta_info.check_frames_numbers()
meta_info.save_meta_info() meta_info.save_meta_info()
@ -322,6 +324,7 @@ def _create_thread(tid, data):
meta_info, smooth_decoding = prepare_meta( meta_info, smooth_decoding = prepare_meta(
media_file=media_files[0], media_file=media_files[0],
upload_dir=upload_dir, upload_dir=upload_dir,
meta_dir=os.path.dirname(db_data.get_meta_path()),
chunk_size=db_data.chunk_size chunk_size=db_data.chunk_size
) )
assert smooth_decoding == True, 'Too few keyframes for smooth video decoding.' assert smooth_decoding == True, 'Too few keyframes for smooth video decoding.'
@ -329,6 +332,7 @@ def _create_thread(tid, data):
meta_info, smooth_decoding = prepare_meta( meta_info, smooth_decoding = prepare_meta(
media_file=media_files[0], media_file=media_files[0],
upload_dir=upload_dir, upload_dir=upload_dir,
meta_dir=os.path.dirname(db_data.get_meta_path()),
chunk_size=db_data.chunk_size chunk_size=db_data.chunk_size
) )
assert smooth_decoding == True, 'Too few keyframes for smooth video decoding.' assert smooth_decoding == True, 'Too few keyframes for smooth video decoding.'

@ -29,7 +29,7 @@ from rest_framework import status
from rest_framework.test import APIClient, APITestCase from rest_framework.test import APIClient, APITestCase
from cvat.apps.engine.models import (AttributeType, Data, Job, Project, from cvat.apps.engine.models import (AttributeType, Data, Job, Project,
Segment, StatusChoice, Task, Label, StorageMethodChoice) Segment, StatusChoice, Task, Label, StorageMethodChoice, StorageChoice)
from cvat.apps.engine.prepare import prepare_meta, prepare_meta_for_upload from cvat.apps.engine.prepare import prepare_meta, prepare_meta_for_upload
def create_db_users(cls): def create_db_users(cls):
@ -1916,7 +1916,8 @@ class TaskDataAPITestCase(APITestCase):
return [f.to_image() for f in container.decode(stream)] return [f.to_image() for f in container.decode(stream)]
def _test_api_v1_tasks_id_data_spec(self, user, spec, data, expected_compressed_type, expected_original_type, image_sizes, def _test_api_v1_tasks_id_data_spec(self, user, spec, data, expected_compressed_type, expected_original_type, image_sizes,
expected_storage_method=StorageMethodChoice.FILE_SYSTEM): expected_storage_method=StorageMethodChoice.FILE_SYSTEM,
expected_uploaded_data_location=StorageChoice.LOCAL):
# create task # create task
response = self._create_task(user, spec) response = self._create_task(user, spec)
self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(response.status_code, status.HTTP_201_CREATED)
@ -1940,7 +1941,13 @@ class TaskDataAPITestCase(APITestCase):
self.assertEqual(expected_compressed_type, task["data_compressed_chunk_type"]) self.assertEqual(expected_compressed_type, task["data_compressed_chunk_type"])
self.assertEqual(expected_original_type, task["data_original_chunk_type"]) self.assertEqual(expected_original_type, task["data_original_chunk_type"])
self.assertEqual(len(image_sizes), task["size"]) self.assertEqual(len(image_sizes), task["size"])
self.assertEqual(expected_storage_method, Task.objects.get(pk=task_id).data.storage_method) db_data = Task.objects.get(pk=task_id).data
self.assertEqual(expected_storage_method, db_data.storage_method)
self.assertEqual(expected_uploaded_data_location, db_data.storage)
# check if used share without copying inside and files doesn`t exist in ../raw/
if expected_uploaded_data_location is StorageChoice.SHARE:
self.assertEqual(False,
os.path.exists(os.path.join(db_data.get_upload_dirname(), next(iter(data.values())))))
# check preview # check preview
response = self._get_preview(task_id, user) response = self._get_preview(task_id, user)
@ -2034,7 +2041,7 @@ class TaskDataAPITestCase(APITestCase):
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes) self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
task_spec = { task_spec = {
"name": "my task #2", "name": "my task without copying #2",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2057,10 +2064,16 @@ class TaskDataAPITestCase(APITestCase):
self._image_sizes[task_data["server_files[2]"]], self._image_sizes[task_data["server_files[2]"]],
] ]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes) self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes,
expected_uploaded_data_location=StorageChoice.SHARE)
task_spec.update([('name', 'my task #3')])
task_data.update([('copy_data', True)])
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET,
image_sizes, expected_uploaded_data_location=StorageChoice.LOCAL)
task_spec = { task_spec = {
"name": "my video task #1", "name": "my video task #4",
"overlap": 0, "overlap": 0,
"segment_size": 100, "segment_size": 100,
"labels": [ "labels": [
@ -2077,7 +2090,7 @@ class TaskDataAPITestCase(APITestCase):
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes) self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes)
task_spec = { task_spec = {
"name": "my video task #2", "name": "my video task without copying #5",
"overlap": 0, "overlap": 0,
"segment_size": 5, "segment_size": 5,
"labels": [ "labels": [
@ -2092,10 +2105,16 @@ class TaskDataAPITestCase(APITestCase):
} }
image_sizes = self._image_sizes[task_data["server_files[0]"]] image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes) self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes,
expected_uploaded_data_location=StorageChoice.SHARE)
task_spec.update([('name', 'my video task #6')])
task_data.update([('copy_data', True)])
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO,
image_sizes, expected_uploaded_data_location=StorageChoice.LOCAL)
task_spec = { task_spec = {
"name": "my video task #3", "name": "my video task without copying #7",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2109,10 +2128,16 @@ class TaskDataAPITestCase(APITestCase):
} }
image_sizes = self._image_sizes[task_data["server_files[0]"]] image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes) self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes,
expected_uploaded_data_location=StorageChoice.SHARE)
task_spec.update([("name", "my video task #8")])
task_data.update([("copy_data", True)])
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO,
image_sizes, expected_uploaded_data_location=StorageChoice.LOCAL)
task_spec = { task_spec = {
"name": "my video task #4", "name": "my video task without copying #9",
"overlap": 0, "overlap": 0,
"segment_size": 5, "segment_size": 5,
"labels": [ "labels": [
@ -2128,10 +2153,16 @@ class TaskDataAPITestCase(APITestCase):
} }
image_sizes = self._image_sizes[task_data["server_files[0]"]] image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.VIDEO, image_sizes) self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.VIDEO, image_sizes,
expected_uploaded_data_location=StorageChoice.SHARE)
task_spec.update([('name', 'my video task #10')])
task_data.update([('copy_data', True)])
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.VIDEO,
image_sizes, expected_uploaded_data_location=StorageChoice.LOCAL)
task_spec = { task_spec = {
"name": "my archive task #6", "name": "my archive task without copying #11",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2145,10 +2176,16 @@ class TaskDataAPITestCase(APITestCase):
} }
image_sizes = self._image_sizes[task_data["server_files[0]"]] image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes) self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes,
expected_uploaded_data_location=StorageChoice.SHARE)
task_spec.update([('name', 'my archive task #12')])
task_data.update([('copy_data', True)])
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET,
image_sizes, expected_uploaded_data_location=StorageChoice.LOCAL)
task_spec = { task_spec = {
"name": "my archive task #7", "name": "my archive task #13",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2165,7 +2202,7 @@ class TaskDataAPITestCase(APITestCase):
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes) self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
task_spec = { task_spec = {
"name": "cached video task #8", "name": "cached video task without copying #14",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2183,10 +2220,15 @@ class TaskDataAPITestCase(APITestCase):
image_sizes = self._image_sizes[task_data["server_files[0]"]] image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO,
self.ChunkType.VIDEO, image_sizes, StorageMethodChoice.CACHE) self.ChunkType.VIDEO, image_sizes, StorageMethodChoice.CACHE, StorageChoice.SHARE)
task_spec.update([('name', 'cached video task #15')])
task_data.update([('copy_data', True)])
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO,
image_sizes, StorageMethodChoice.CACHE, StorageChoice.LOCAL)
task_spec = { task_spec = {
"name": "cached images task #9", "name": "cached images task without copying #16",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2209,10 +2251,15 @@ class TaskDataAPITestCase(APITestCase):
] ]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET,
self.ChunkType.IMAGESET, image_sizes, StorageMethodChoice.CACHE) self.ChunkType.IMAGESET, image_sizes, StorageMethodChoice.CACHE, StorageChoice.SHARE)
task_spec.update([('name', 'cached images task #17')])
task_data.update([('copy_data', True)])
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET,
image_sizes, StorageMethodChoice.CACHE, StorageChoice.LOCAL)
task_spec = { task_spec = {
"name": "my cached zip archive task #10", "name": "my cached zip archive task without copying #18",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2230,10 +2277,15 @@ class TaskDataAPITestCase(APITestCase):
image_sizes = self._image_sizes[task_data["server_files[0]"]] image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET,
self.ChunkType.IMAGESET, image_sizes, StorageMethodChoice.CACHE) self.ChunkType.IMAGESET, image_sizes, StorageMethodChoice.CACHE, StorageChoice.SHARE)
task_spec.update([('name', 'my cached zip archive task #19')])
task_data.update([('copy_data', True)])
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET,
image_sizes, StorageMethodChoice.CACHE, StorageChoice.LOCAL)
task_spec = { task_spec = {
"name": "my cached pdf task #11", "name": "my cached pdf task #20",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2255,7 +2307,7 @@ class TaskDataAPITestCase(APITestCase):
image_sizes, StorageMethodChoice.CACHE) image_sizes, StorageMethodChoice.CACHE)
task_spec = { task_spec = {
"name": "my pdf task #12", "name": "my pdf task #21",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2280,7 +2332,7 @@ class TaskDataAPITestCase(APITestCase):
os.path.join(settings.SHARE_ROOT, "videos") os.path.join(settings.SHARE_ROOT, "videos")
) )
task_spec = { task_spec = {
"name": "my video with meta info task #13", "name": "my video with meta info task without copying #22",
"overlap": 0, "overlap": 0,
"segment_size": 0, "segment_size": 0,
"labels": [ "labels": [
@ -2297,7 +2349,13 @@ class TaskDataAPITestCase(APITestCase):
image_sizes = self._image_sizes[task_data['server_files[0]']] image_sizes = self._image_sizes[task_data['server_files[0]']]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO,
self.ChunkType.VIDEO, image_sizes, StorageMethodChoice.CACHE) self.ChunkType.VIDEO, image_sizes, StorageMethodChoice.CACHE,
StorageChoice.SHARE)
task_spec.update([('name', 'my video with meta info task #23')])
task_data.update([('copy_data', True)])
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO,
image_sizes, StorageMethodChoice.CACHE, StorageChoice.LOCAL)
task_spec = { task_spec = {
"name": "my cached video task #14", "name": "my cached video task #14",

@ -39,7 +39,7 @@ from cvat.apps.dataset_manager.serializers import DatasetFormatsSerializer
from cvat.apps.engine.frame_provider import FrameProvider from cvat.apps.engine.frame_provider import FrameProvider
from cvat.apps.engine.models import ( from cvat.apps.engine.models import (
Job, StatusChoice, Task, Project, Review, Issue, Job, StatusChoice, Task, Project, Review, Issue,
Comment, StorageMethodChoice, ReviewStatus Comment, StorageMethodChoice, ReviewStatus, StorageChoice
) )
from cvat.apps.engine.serializers import ( from cvat.apps.engine.serializers import (
AboutSerializer, AnnotationFileSerializer, BasicUserSerializer, AboutSerializer, AnnotationFileSerializer, BasicUserSerializer,
@ -412,10 +412,13 @@ class TaskViewSet(auth.TaskGetQuerySetMixin, viewsets.ModelViewSet):
data = {k:v for k, v in serializer.data.items()} data = {k:v for k, v in serializer.data.items()}
data['use_zip_chunks'] = serializer.validated_data['use_zip_chunks'] data['use_zip_chunks'] = serializer.validated_data['use_zip_chunks']
data['use_cache'] = serializer.validated_data['use_cache'] data['use_cache'] = serializer.validated_data['use_cache']
data['copy_data'] = serializer.validated_data['copy_data']
if data['use_cache']: if data['use_cache']:
db_task.data.storage_method = StorageMethodChoice.CACHE db_task.data.storage_method = StorageMethodChoice.CACHE
db_task.data.save(update_fields=['storage_method']) db_task.data.save(update_fields=['storage_method'])
if data['server_files'] and data.get('copy_data') == False:
db_task.data.storage = StorageChoice.SHARE
db_task.data.save(update_fields=['storage'])
# if the value of stop_frame is 0, then inside the function we cannot know # if the value of stop_frame is 0, then inside the function we cannot know
# the value specified by the user or it's default value from the database # the value specified by the user or it's default value from the database
if 'stop_frame' not in serializer.validated_data: if 'stop_frame' not in serializer.validated_data:
@ -476,6 +479,10 @@ class TaskViewSet(auth.TaskGetQuerySetMixin, viewsets.ModelViewSet):
return Response(data='unknown data type {}.'.format(data_type), status=status.HTTP_400_BAD_REQUEST) return Response(data='unknown data type {}.'.format(data_type), status=status.HTTP_400_BAD_REQUEST)
except APIException as e: except APIException as e:
return Response(data=e.get_full_details(), status=e.status_code) return Response(data=e.get_full_details(), status=e.status_code)
except FileNotFoundError as ex:
msg = f"{ex.strerror} {ex.filename}"
slogger.task[pk].error(msg, exc_info=True)
return Response(data=msg, status=status.HTTP_404_NOT_FOUND)
except Exception as e: except Exception as e:
msg = 'cannot get requested data type: {}, number: {}, quality: {}'.format(data_type, data_id, data_quality) msg = 'cannot get requested data type: {}, number: {}, quality: {}'.format(data_type, data_id, data_quality)
slogger.task[pk].error(msg, exc_info=True) slogger.task[pk].error(msg, exc_info=True)

@ -14,28 +14,28 @@ context('Reset zoom in tag annotation', () => {
function scaleFrame() { function scaleFrame() {
cy.get('.cvat-canvas-container').trigger('wheel', { deltaY: 5 }); cy.get('.cvat-canvas-container').trigger('wheel', { deltaY: 5 });
}; }
function changeCheckboxResetZoom(value) { function changeCheckboxResetZoom(value) {
cy.openSettings(); cy.openSettings();
cy.get('.ant-modal-content').within(() => { cy.get('.ant-modal-content').within(() => {
cy.contains('Player').click(); cy.contains('Player').click();
cy.get('.cvat-player-settings-reset-zoom-checkbox').within(() => { cy.get('.cvat-player-settings-reset-zoom-checkbox').within(() => {
if (value == "check") { if (value == 'check') {
cy.get('[type="checkbox"]').check(); cy.get('[type="checkbox"]').check();
} else if (value == "uncheck") { } else if (value == 'uncheck') {
cy.get('[type="checkbox"]').uncheck(); cy.get('[type="checkbox"]').uncheck();
}; }
}); });
}); });
cy.closeSettings(); cy.closeSettings();
}; }
function checkFrameNum(frameNum) { function checkFrameNum(frameNum) {
cy.get('.cvat-player-frame-selector').within(() => { cy.get('.cvat-player-frame-selector').within(() => {
cy.get('input[role="spinbutton"]').should('have.value', frameNum); cy.get('input[role="spinbutton"]').should('have.value', frameNum);
}); });
}; }
before(() => { before(() => {
cy.openTaskJob(taskName); cy.openTaskJob(taskName);
@ -43,7 +43,7 @@ context('Reset zoom in tag annotation', () => {
describe(`Testing issue "${issueId}"`, () => { describe(`Testing issue "${issueId}"`, () => {
it('Set "reset zoom" to true', () => { it('Set "reset zoom" to true', () => {
changeCheckboxResetZoom("check"); changeCheckboxResetZoom('check');
}); });
it('Go to tag annotation', () => { it('Go to tag annotation', () => {
@ -71,7 +71,7 @@ context('Reset zoom in tag annotation', () => {
}); });
it('Set "reset zoom" to false', () => { it('Set "reset zoom" to false', () => {
changeCheckboxResetZoom("uncheck"); changeCheckboxResetZoom('uncheck');
}); });
it('Scale frame', () => { it('Scale frame', () => {

@ -1,4 +1,4 @@
version: "3.3" version: '3.3'
services: services:
cvat: cvat:

Loading…
Cancel
Save