Refactor CI (#26)

main
Kirill Sizov 4 years ago committed by GitHub
parent 4dc01b0441
commit 2e161b0e9f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -24,7 +24,7 @@ jobs:
${{ runner.os }}-build-ui- ${{ runner.os }}-build-ui-
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1.1.2 uses: docker/setup-buildx-action@v2
- name: Caching CVAT server - name: Caching CVAT server
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2

@ -7,62 +7,51 @@ on:
pull_request: pull_request:
types: [edited, ready_for_review, opened, synchronize, reopened] types: [edited, ready_for_review, opened, synchronize, reopened]
env:
API_ABOUT_PAGE: "localhost:8080/api/server/about"
jobs: jobs:
Unit_testing: cache:
if: | if: |
github.event.pull_request.draft == false && github.event.pull_request.draft == false &&
!startsWith(github.event.pull_request.title, '[WIP]') && !startsWith(github.event.pull_request.title, '[WIP]') &&
!startsWith(github.event.pull_request.title, '[Dependent]') !startsWith(github.event.pull_request.title, '[Dependent]')
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs:
sha: ${{ steps.get-sha.outputs.sha}}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPO: ${{ github.repository }}
steps: steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Getting SHA from the default branch - name: Getting SHA from the default branch
id: get-sha id: get-sha
run: | run: |
DEFAULT_BRANCH=$(curl -s \ DEFAULT_BRANCH=$(gh api /repos/$REPO | jq -r '.default_branch')
--request GET \ SHA=$(gh api /repos/$REPO/git/ref/heads/$DEFAULT_BRANCH | jq -r '.object.sha')
--url https://api.github.com/repos/${{ github.repository }} \
--header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | \
jq -r '.default_branch')
SHA=$(curl -s \
--request GET \
--url https://api.github.com/repos/${{ github.repository }}/git/ref/heads/${DEFAULT_BRANCH} \
--header 'authorization: token ${{ secrets.GITHUB_TOKEN }}' | \
jq -r '.object.sha')
echo ::set-output name=default_branch::${DEFAULT_BRANCH} echo ::set-output name=default_branch::${DEFAULT_BRANCH}
echo ::set-output name=sha::${SHA} echo ::set-output name=sha::${SHA}
- name: Waiting a cache creation in the default branch - name: Waiting a cache creation in the default branch
if: ${{ github.ref_name != 'develop' }} env:
DEFAULT_BRANCH: ${{ steps.get-sha.outputs.default_branch }}
SHA: ${{ steps.get-sha.outputs.sha }}
run: | run: |
SLEEP=45 SLEEP=45
NUMBER_ATTEMPTS=10 NUMBER_ATTEMPTS=10
while [[ ${NUMBER_ATTEMPTS} -gt 0 ]]; do while [[ ${NUMBER_ATTEMPTS} -gt 0 ]]; do
RUN_status=$(curl -s \
--request GET \ RUN_status=$(gh api /repos/${REPO}/actions/workflows/cache.yml/runs | \
--header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' \ jq -r ".workflow_runs[]? |
--url https://api.github.com/repos/${{ github.repository }}/actions/workflows/cache.yml/runs | \ select((.head_sha == \"${SHA}\")) | .status")
jq -r '.workflow_runs[]? |
select(
(.head_sha == "${{ steps.get-sha.outputs.sha }}")
and (.event == "push")
and (.name == "Cache")
and (.head_branch == "${{ steps.get-sha.outputs.default_branch }}")
) | .status')
if [[ ${RUN_status} == "completed" ]]; then if [[ ${RUN_status} == "completed" ]]; then
echo "The cache creation on the '${{ steps.get-sha.outputs.default_branch }}' branch has finished. Status: ${RUN_status}" echo "The cache creation on the ${DEFAULT_BRANCH} branch has finished. Status: ${RUN_status}"
break break
else else
echo "The creation of the cache is not yet complete." echo "The creation of the cache is not yet complete."
echo "There are still attempts to check the cache: ${NUMBER_ATTEMPTS}" echo "There are still attempts to check the cache: ${NUMBER_ATTEMPTS}"
echo "Status of caching in the '${{ steps.get-sha.outputs.default_branch }}' branch: ${RUN_status}" echo "Status of caching in the ${DEFAULT_BRANCH} branch: ${RUN_status}"
echo "sleep ${SLEEP}" echo "sleep ${SLEEP}"
sleep ${SLEEP} sleep ${SLEEP}
((NUMBER_ATTEMPTS--)) ((NUMBER_ATTEMPTS--))
@ -73,20 +62,30 @@ jobs:
echo "Probably the creation of the cache is not yet complete. Will continue working without the cache." echo "Probably the creation of the cache is not yet complete. Will continue working without the cache."
fi fi
Unit_testing:
needs: cache
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Getting CVAT server cache from the default branch - name: Getting CVAT server cache from the default branch
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
path: /tmp/cvat_cache_server path: /tmp/cvat_cache_server
key: ${{ runner.os }}-build-server-${{ steps.get-sha.outputs.sha }} key: ${{ runner.os }}-build-server-${{ needs.cache.outputs.sha }}
- name: Getting CVAT UI cache from the default branch - name: Getting CVAT UI cache from the default branch
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
path: /tmp/cvat_cache_ui path: /tmp/cvat_cache_ui
key: ${{ runner.os }}-build-ui-${{ steps.get-sha.outputs.sha }} key: ${{ runner.os }}-build-ui-${{ needs.cache.outputs.sha }}
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1.1.2 uses: docker/setup-buildx-action@master
- name: Building CVAT server image - name: Building CVAT server image
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
@ -113,19 +112,10 @@ jobs:
./opa test cvat/apps/iam/rules ./opa test cvat/apps/iam/rules
- name: Running REST API tests - name: Running REST API tests
env:
API_ABOUT_PAGE: "localhost:8080/api/server/about"
# Access key length should be at least 3, and secret key length at least 8 characters
MINIO_ACCESS_KEY: "minio_access_key"
MINIO_SECRET_KEY: "minio_secret_key"
run: | run: |
docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f components/serverless/docker-compose.serverless.yml -f components/analytics/docker-compose.analytics.yml -f tests/rest_api/docker-compose.minio.yml up -d
/bin/bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' ${API_ABOUT_PAGE})" != "401" ]]; do sleep 5; done'
pip3 install --user -r tests/rest_api/requirements.txt pip3 install --user -r tests/rest_api/requirements.txt
pytest tests/rest_api/ -k 'GET' pytest tests/rest_api/ -k 'GET' -s
pytest tests/rest_api/ --stop-services
docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f components/serverless/docker-compose.serverless.yml -f components/analytics/docker-compose.analytics.yml -f tests/rest_api/docker-compose.minio.yml down -v
- name: Running unit tests - name: Running unit tests
env: env:
@ -137,6 +127,7 @@ jobs:
docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f docker-compose.ci.yml run cvat_ci /bin/bash \ docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f docker-compose.ci.yml run cvat_ci /bin/bash \
-c 'cd cvat-data && npm ci --ignore-scripts && cd ../cvat-core && npm ci --ignore-scripts && npm run test && mv ./reports/coverage/lcov.info ${CONTAINER_COVERAGE_DATA_DIR} && chmod a+rwx ${CONTAINER_COVERAGE_DATA_DIR}/lcov.info' -c 'cd cvat-data && npm ci --ignore-scripts && cd ../cvat-core && npm ci --ignore-scripts && npm run test && mv ./reports/coverage/lcov.info ${CONTAINER_COVERAGE_DATA_DIR} && chmod a+rwx ${CONTAINER_COVERAGE_DATA_DIR}/lcov.info'
- name: Uploading code coverage results as an artifact - name: Uploading code coverage results as an artifact
if: github.ref == 'refs/heads/develop' if: github.ref == 'refs/heads/develop'
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
@ -147,10 +138,7 @@ jobs:
${{ github.workspace }}/lcov.info ${{ github.workspace }}/lcov.info
E2E_testing: E2E_testing:
if: | needs: cache
github.event.pull_request.draft == false &&
!startsWith(github.event.pull_request.title, '[WIP]') &&
!startsWith(github.event.pull_request.title, '[Dependent]')
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
fail-fast: false fail-fast: false
@ -159,76 +147,24 @@ jobs:
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Getting SHA from the default branch
id: get-sha
run: |
DEFAULT_BRANCH=$(curl -s \
--request GET \
--url https://api.github.com/repos/${{ github.repository }} \
--header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | \
jq -r '.default_branch')
SHA=$(curl -s \
--request GET \
--url https://api.github.com/repos/${{ github.repository }}/git/ref/heads/${DEFAULT_BRANCH} \
--header 'authorization: token ${{ secrets.GITHUB_TOKEN }}' | \
jq -r '.object.sha')
echo ::set-output name=default_branch::${DEFAULT_BRANCH}
echo ::set-output name=sha::${SHA}
- name: Waiting a cache creation in the default branch
run: |
URL_runs="https://api.github.com/repos/${{ github.repository }}/actions/workflows/cache.yml/runs"
SLEEP=45
NUMBER_ATTEMPTS=10
while [[ ${NUMBER_ATTEMPTS} -gt 0 ]]; do
RUN_status=$(curl -s \
--request GET \
--header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' \
--url https://api.github.com/repos/${{ github.repository }}/actions/workflows/cache.yml/runs | \
jq -r '.workflow_runs[]? |
select(
(.head_sha == "${{ steps.get-sha.outputs.sha }}")
and (.event == "push")
and (.name == "Cache")
and (.head_branch == "${{ steps.get-sha.outputs.default_branch }}")
) | .status')
if [[ ${RUN_status} == "completed" ]]; then
echo "The cache creation on the '${{ steps.get-sha.outputs.default_branch }}' branch has finished. Status: ${RUN_status}"
break
else
echo "The creation of the cache is not yet complete."
echo "There are still attempts to check the cache: ${NUMBER_ATTEMPTS}"
echo "Status of caching in the '${{ steps.get-sha.outputs.default_branch }}' branch: ${RUN_status}"
echo "sleep ${SLEEP}"
sleep ${SLEEP}
((NUMBER_ATTEMPTS--))
fi
done
if [[ ${NUMBER_ATTEMPTS} -eq 0 ]]; then
echo "Number of attempts expired!"
echo "Probably the creation of the cache is not yet complete. Will continue working without the cache."
fi
- name: Getting CVAT server cache from the default branch - name: Getting CVAT server cache from the default branch
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
path: /tmp/cvat_cache_server path: /tmp/cvat_cache_server
key: ${{ runner.os }}-build-server-${{ steps.get-sha.outputs.sha }} key: ${{ runner.os }}-build-server-${{ needs.cache.outputs.sha }}
- name: Getting CVAT UI cache from the default branch - name: Getting CVAT UI cache from the default branch
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
path: /tmp/cvat_cache_ui path: /tmp/cvat_cache_ui
key: ${{ runner.os }}-build-ui-${{ steps.get-sha.outputs.sha }} key: ${{ runner.os }}-build-ui-${{ needs.cache.outputs.sha }}
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: '16.x' node-version: '16.x'
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1.1.2 uses: docker/setup-buildx-action@master
- name: Building CVAT server image - name: Building CVAT server image
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
@ -260,35 +196,27 @@ jobs:
DJANGO_SU_NAME: 'admin' DJANGO_SU_NAME: 'admin'
DJANGO_SU_EMAIL: 'admin@localhost.company' DJANGO_SU_EMAIL: 'admin@localhost.company'
DJANGO_SU_PASSWORD: '12qwaszx' DJANGO_SU_PASSWORD: '12qwaszx'
API_ABOUT_PAGE: "localhost:8080/api/server/about"
run: | run: |
docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f components/serverless/docker-compose.serverless.yml -f tests/docker-compose.file_share.yml up -d docker-compose \
/bin/bash -c 'while [[ $(curl -s -o /dev/null -w "%{http_code}" ${API_ABOUT_PAGE}) != "401" ]]; do sleep 5; done' -f docker-compose.yml \
docker exec -i cvat /bin/bash -c "echo \"from django.contrib.auth.models import User; User.objects.create_superuser('${DJANGO_SU_NAME}', '${DJANGO_SU_EMAIL}', '${DJANGO_SU_PASSWORD}')\" | python3 ~/manage.py shell" -f docker-compose.dev.yml \
-f components/serverless/docker-compose.serverless.yml \
-f tests/docker-compose.file_share.yml up -d
/bin/bash -c \
'while [[ $(curl -s -o /dev/null -w "%{http_code}" ${{ env.API_ABOUT_PAGE }}) != "401" ]]; do sleep 5; done'
docker exec -i cvat \
/bin/bash -c \
"echo \"from django.contrib.auth.models import User; User.objects.create_superuser('${DJANGO_SU_NAME}', '${DJANGO_SU_EMAIL}', '${DJANGO_SU_PASSWORD}')\" | python3 ~/manage.py shell"
cd ./tests cd ./tests
npm ci npm ci
if [ ${{ matrix.specs }} == 'canvas3d_functionality' ]; then if [ ${{ matrix.specs }} == 'canvas3d_functionality' ]; then
# Choosing 5 test files npx cypress run --headed --browser chrome --config-file pr_cypress_canvas3d.json
selected_files=$(find ./cypress/integration | grep -e 'case.*\|issue.*' | grep js | grep 3d | sort | head -5 | tr '\n' ',')
npx cypress run \
--headed \
--browser chrome \
--env coverage=false \
--config-file cypress_canvas3d.json \
--spec "${selected_files} cypress/integration/remove_users_tasks_projects_organizations.js"
else else
# Choosing 20 test files npx cypress run --browser chrome --config-file pr_cypress.json
find ./cypress/integration | grep -e 'case.*\|issue.*' | grep js | sed '/.*3d.*/d' | sort > test_files
selected_files=$({ head -10; tail -10;} < test_files | tr '\n' ',')
rm test_files
npx cypress run \
--browser chrome \
--env coverage=false \
--spec "${selected_files} cypress/integration/remove_users_tasks_projects_organizations.js"
fi fi
- name: Creating a log file from "cvat" container logs - name: Creating a log file from "cvat" container logs
@ -323,30 +251,28 @@ jobs:
needs: [Unit_testing, E2E_testing] needs: [Unit_testing, E2E_testing]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Getting SHA from the default branch - name: Getting SHA from the default branch
id: get-sha id: get-sha
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPO: ${{ github.repository }}
run: | run: |
DEFAULT_BRANCH=$(curl -s \ DEFAULT_BRANCH=$(gh api /repos/$REPO | jq -r '.default_branch')
--request GET \ SHA=$(gh api /repos/$REPO/git/ref/heads/$DEFAULT_BRANCH | jq -r '.object.sha')
--url https://api.github.com/repos/${{ github.repository }} \
--header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | \
jq -r '.default_branch')
SHA=$(curl -s \
--request GET \
--url https://api.github.com/repos/${{ github.repository }}/git/ref/heads/${DEFAULT_BRANCH} \
--header 'authorization: token ${{ secrets.GITHUB_TOKEN }}' | \
jq -r '.object.sha')
echo ::set-output name=default_branch::${DEFAULT_BRANCH} echo ::set-output name=default_branch::${DEFAULT_BRANCH}
echo ::set-output name=sha::${SHA} echo ::set-output name=sha::${SHA}
- name: Getting CVAT server cache from the default branch - name: Getting CVAT server cache from the default branch
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
path: /tmp/cvat_cache_server path: /tmp/cvat_cache_server
key: ${{ runner.os }}-build-server-${{ steps.get-sha.outputs.sha }} key: ${{ runner.os }}-build-server-${{ steps.get-sha.outputs.sha }}
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1.1.2 uses: docker/setup-buildx-action@v1.1.2
- name: Building CVAT server image - name: Building CVAT server image
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
with: with:
@ -355,10 +281,12 @@ jobs:
cache-from: type=local,src=/tmp/cvat_cache_server cache-from: type=local,src=/tmp/cvat_cache_server
tags: openvino/cvat_server:latest tags: openvino/cvat_server:latest
load: true load: true
- name: Downloading coverage results - name: Downloading coverage results
uses: actions/download-artifact@v2 uses: actions/download-artifact@v2
with: with:
name: coverage_results name: coverage_results
- name: Combining coverage results - name: Combining coverage results
run: | run: |
mkdir -p ./nyc_output_tmp mkdir -p ./nyc_output_tmp
@ -366,6 +294,7 @@ jobs:
mkdir -p ./.nyc_output mkdir -p ./.nyc_output
npm ci npm ci
npx nyc merge ./nyc_output_tmp ./.nyc_output/out.json npx nyc merge ./nyc_output_tmp ./.nyc_output/out.json
- name: Sending results to Coveralls - name: Sending results to Coveralls
env: env:
HOST_COVERAGE_DATA_DIR: ${{ github.workspace }} HOST_COVERAGE_DATA_DIR: ${{ github.workspace }}

@ -4,13 +4,132 @@ on:
- cron: '0 22 * * *' - cron: '0 22 * * *'
workflow_dispatch: workflow_dispatch:
jobs: jobs:
build: check_updates:
runs-on: ubuntu-latest
outputs:
last_commit_time: ${{ steps.check_updates.outputs.last_commit_time }}
last_night_time: ${{ steps.check_updates.outputs.last_night_time }}
steps:
- id: check_updates
env:
REPO: ${{ github.repository }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
default_branch=$(gh api /repos/$REPO | jq -r '.default_branch')
last_commit_time=$(date +%s \
-d $(gh api /repos/${REPO}/branches/${default_branch} | jq -r '.commit.commit.author.date'))
last_night_time=$(date +%s \
-d $(gh api /repos/${REPO}/actions/workflows/caching.yml/runs | jq -r '.workflow_runs[].updated_at' | sort | tail -1))
echo ::set-output name=last_commit_time::${last_commit_time}
echo ::set-output name=last_night_time::${last_night_time}
cache:
needs: check_updates
if:
needs.check_updates.outputs.last_commit_time > needs.check_updates.outputs.last_night_time
runs-on: ubuntu-latest
outputs:
sha: ${{ steps.get-sha.outputs.sha}}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPO: ${{ github.repository }}
steps:
- name: Getting SHA from the default branch
id: get-sha
run: |
DEFAULT_BRANCH=$(gh api /repos/$REPO | jq -r '.default_branch')
SHA=$(gh api /repos/$REPO/git/ref/heads/$DEFAULT_BRANCH | jq -r '.object.sha')
echo ::set-output name=default_branch::${DEFAULT_BRANCH}
echo ::set-output name=sha::${SHA}
- name: Waiting a cache creation in the default branch
if: ${{ github.ref_name != 'develop' }}
env:
DEFAULT_BRANCH: ${{ steps.get-sha.outputs.default_branch }}
SHA: ${{ steps.get-sha.outputs.sha }}
run: |
SLEEP=45
NUMBER_ATTEMPTS=10
while [[ ${NUMBER_ATTEMPTS} -gt 0 ]]; do
RUN_status=$(gh api /repos/${REPO}/actions/workflows/cache.yml/runs | \
jq -r ".workflow_runs[]? |
select((.head_sha == \"${SHA}\")) | .status")
if [[ ${RUN_status} == "completed" ]]; then
echo "The cache creation on the ${DEFAULT_BRANCH} branch has finished. Status: ${RUN_status}"
break
else
echo "The creation of the cache is not yet complete."
echo "There are still attempts to check the cache: ${NUMBER_ATTEMPTS}"
echo "Status of caching in the ${DEFAULT_BRANCH} branch: ${RUN_status}"
echo "sleep ${SLEEP}"
sleep ${SLEEP}
((NUMBER_ATTEMPTS--))
fi
done
if [[ ${NUMBER_ATTEMPTS} -eq 0 ]]; then
echo "Number of attempts expired!"
echo "Probably the creation of the cache is not yet complete. Will continue working without the cache."
fi
run_tests:
needs: cache
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: '16.x' node-version: '16.x'
- uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Getting CVAT server cache from the default branch
uses: actions/cache@v2
with:
path: /tmp/cvat_cache_server
key: ${{ runner.os }}-build-server-${{ needs.cache.outputs.sha }}
- name: Getting CVAT UI cache from the default branch
uses: actions/cache@v2
with:
path: /tmp/cvat_cache_ui
key: ${{ runner.os }}-build-ui-${{ needs.cache.outputs.sha }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Building CVAT server image
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile
cache-from: type=local,src=/tmp/cvat_cache_server
tags: openvino/cvat_server:latest
load: true
- name: Building CVAT UI image
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile.ui
cache-from: type=local,src=/tmp/cvat_cache_ui
tags: openvino/cvat_ui:latest
load: true
- name: Running REST API tests
run: |
pip3 install --user -r tests/rest_api/requirements.txt
pytest tests/rest_api/
pytest tests/rest_api/ --stop-services
- name: Build CVAT - name: Build CVAT
env: env:
DJANGO_SU_NAME: "admin" DJANGO_SU_NAME: "admin"
@ -21,11 +140,21 @@ jobs:
docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f ./tests/docker-compose.email.yml -f tests/docker-compose.file_share.yml -f components/serverless/docker-compose.serverless.yml up -d --build docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f ./tests/docker-compose.email.yml -f tests/docker-compose.file_share.yml -f components/serverless/docker-compose.serverless.yml up -d --build
/bin/bash -c 'while [[ $(curl -s -o /dev/null -w "%{http_code}" ${API_ABOUT_PAGE}) != "401" ]]; do sleep 5; done' /bin/bash -c 'while [[ $(curl -s -o /dev/null -w "%{http_code}" ${API_ABOUT_PAGE}) != "401" ]]; do sleep 5; done'
docker exec -i cvat /bin/bash -c "echo \"from django.contrib.auth.models import User; User.objects.create_superuser('${DJANGO_SU_NAME}', '${DJANGO_SU_EMAIL}', '${DJANGO_SU_PASSWORD}')\" | python3 ~/manage.py shell" docker exec -i cvat /bin/bash -c "echo \"from django.contrib.auth.models import User; User.objects.create_superuser('${DJANGO_SU_NAME}', '${DJANGO_SU_EMAIL}', '${DJANGO_SU_PASSWORD}')\" | python3 ~/manage.py shell"
- name: End-to-end testing - name: End-to-end testing
run: | run: |
cd ./tests cd ./tests
npm ci npm ci
npm run cypress:run:firefox npm run cypress:run:firefox
- name: Unit tests
run: |
python manage.py test cvat/apps utils/cli
npm ci
cd cvat-core
npm run test
- name: Uploading cypress screenshots as an artifact - name: Uploading cypress screenshots as an artifact
if: failure() if: failure()
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2

@ -15,6 +15,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed ### Changed
- Bumped nuclio version to 1.8.14 (<https://github.com/cvat-ai/cvat/pull/29>) - Bumped nuclio version to 1.8.14 (<https://github.com/cvat-ai/cvat/pull/29>)
- Simplified running REST API tests. Extended CI-nightly workflow (<https://github.com/cvat-ai/cvat/pull/26>)
### Deprecated ### Deprecated
- TDB - TDB

@ -134,6 +134,7 @@ description: 'Installing a development environment for different operating syste
You have done! Now it is possible to insert breakpoints and debug server and client of the tool. You have done! Now it is possible to insert breakpoints and debug server and client of the tool.
Instructions for running tests locally are available [here](/site/content/en/docs/contributing/running-tests.md).
## Note for Windows users ## Note for Windows users

@ -0,0 +1,94 @@
---
title: 'Running tests'
linkTitle: 'Running tests'
weight: 11
description: 'Instructions on how to run all existence tests.'
---
# E2E tests
**Initial steps**:
1. Run CVAT instance:
```
docker-compose \
-f docker-compose.yml \
-f docker-compose.dev.yml \
-f components/serverless/docker-compose.serverless.yml \
-f tests/docker-compose.file_share.yml up -d
```
1. Add test user in CVAT:
```
docker exec -i cvat \
/bin/bash -c \
"echo \"from django.contrib.auth.models import User; User.objects.create_superuser('admin', 'admin@localhost.company', '12qwaszx')\" | python3 ~/manage.py shell"
```
1. Install npm dependencies:
```
cd tests
npm ci
```
**Running tests**
```
npm run cypress:run:chrome
npm run cypress:run:chrome:canvas3d
```
# REST API tests
**Initial steps**
1. Install all necessary requirements before running REST API tests:
```
pip install -r ./tests/rest_api/requirements.txt
```
**Running tests**
Run all REST API tests:
```
pytest ./tests/rest_api
```
This command will automatically start all necessary docker containers.
If you want to start/stop these containers without running tests
use special options for it:
```
pytest ./tests/rest_api --start-services
pytest ./tests/rest_api --stop-services
```
If you need to rebuild your CVAT images add `--rebuild` option:
```
pytest ./tests/rest_api --rebuild
```
# Unit tests
**Initial steps**
1. Install necessary Python dependencies:
```
pip install -r cvat/requirements/testing.txt
```
1. Install npm dependencies:
```
npm ci
```
1. Run CVAT instance
```
docker-compose -f docker-compose.yml -f docker-compose.dev.yml up -d
```
**Running tests**
1. Python tests
```
python manage.py test --settings cvat.settings.testing cvat/apps utils/cli
```
1. JS tests
```
cd cvat-core
npm run test
```

@ -0,0 +1,37 @@
{
"video": false,
"baseUrl": "http://localhost:8080",
"viewportWidth": 1300,
"viewportHeight": 960,
"defaultCommandTimeout": 25000,
"downloadsFolder": "cypress/fixtures",
"env": {
"user": "admin",
"email": "admin@localhost.company",
"password": "12qwaszx",
"coverage": false
},
"testFiles": [
"actions_objects2/case_108_rotated_bounding_boxes.js",
"actions_objects2/case_10_polygon_shape_track_label_points.js",
"actions_objects2/case_115_ellipse_shape_track_label.js",
"actions_objects2/case_11_polylines_shape_track_label_points.js",
"actions_objects2/case_12_points_shape_track_label.js",
"actions_objects2/case_13_merge_split_features.js",
"actions_objects2/case_14_appearance_features.js",
"actions_objects2/case_15_group_features.js",
"actions_objects2/case_16_z_order_features.js",
"actions_objects2/case_17_lock_hide_features.js",
"issues_prs/issue_2418_object_tag_same_labels.js",
"issues_prs/issue_2485_navigation_empty_frames.js",
"issues_prs/issue_2486_not_edit_object_aam.js",
"issues_prs/issue_2487_extra_instances_canvas_grouping.js",
"issues_prs/issue_2661_displaying_attached_files_when_creating_task.js",
"issues_prs/issue_2753_call_HOC_component_each_render.js",
"issues_prs/issue_2807_polyline_editing.js",
"issues_prs/issue_2992_crop_polygon_properly.js",
"issues_prs/pr_1370_check_UI_fail_with_object_dragging_and_go_next_frame.js",
"issues_prs/pr_2203_error_cannot_read_property_at_saving_job.js",
"remove_users_tasks_projects_organizations.js"
]
}

@ -0,0 +1,22 @@
{
"video": false,
"baseUrl": "http://localhost:8080",
"viewportWidth": 1300,
"viewportHeight": 960,
"defaultCommandTimeout": 25000,
"downloadsFolder": "cypress/fixtures",
"env": {
"user": "admin",
"email": "admin@localhost.company",
"password": "12qwaszx",
"coverage": false
},
"testFiles": [
"actions_projects_models/case_104_project_export_3d.js",
"canvas3d_functionality_2/case_56_canvas3d_functionality_basic_actions.js",
"canvas3d_functionality_2/case_62_canvas3d_functionality_views_resize.js",
"canvas3d_functionality_2/case_63_canvas3d_functionality_control_button_mouse_interaction.js",
"canvas3d_functionality_2/case_64_canvas3d_functionality_cuboid.js",
"remove_users_tasks_projects_organizations.js"
]
}

@ -19,12 +19,6 @@ the server calling REST API directly (as it done by users).
## How to run? ## How to run?
1. Execute commands below to run docker containers:
```console
export MINIO_ACCESS_KEY="minio_access_key"
export MINIO_SECRET_KEY="minio_secret_key"
docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f components/analytics/docker-compose.analytics.yml -f tests/rest_api/docker-compose.minio.yml up -d --build
```
1. After that please look at documentation for [pytest](https://docs.pytest.org/en/6.2.x/). 1. After that please look at documentation for [pytest](https://docs.pytest.org/en/6.2.x/).
Generally, you have to install requirements and run the following command from Generally, you have to install requirements and run the following command from
the root directory of the cloned CVAT repository: the root directory of the cloned CVAT repository:
@ -34,6 +28,9 @@ the server calling REST API directly (as it done by users).
pytest tests/rest_api/ pytest tests/rest_api/
``` ```
See the [contributing guide](../../site/content/en/docs/contributing/running-tests.md)
to get more information about tests running.
## How to upgrade testing assets? ## How to upgrade testing assets?
When you have a new use case which cannot be expressed using objects already When you have a new use case which cannot be expressed using objects already
@ -69,8 +66,8 @@ for i, color in enumerate(colormap):
To backup DB and data volume, please use commands below. To backup DB and data volume, please use commands below.
```console ```console
docker exec cvat python manage.py dumpdata --indent 2 > assets/cvat_db/data.json docker exec test_cvat_1 python manage.py dumpdata --indent 2 > assets/cvat_db/data.json
docker exec cvat tar -cjv /home/django/data > assets/cvat_db/cvat_data.tar.bz2 docker exec test_cvat_1 tar -cjv /home/django/data > assets/cvat_db/cvat_data.tar.bz2
``` ```
> Note: if you won't be use --indent options or will be use with other value > Note: if you won't be use --indent options or will be use with other value
@ -90,8 +87,8 @@ python utils/dump_objects.py
To restore DB and data volume, please use commands below. To restore DB and data volume, please use commands below.
```console ```console
cat assets/cvat_db/data.json | docker exec -i cvat python manage.py loaddata --format=json - cat assets/cvat_db/data.json | docker exec -i test_cvat_1 python manage.py loaddata --format=json -
cat assets/cvat_db/cvat_data.tar.bz2 | docker exec -i cvat tar --strip 3 -C /home/django/data/ -xj cat assets/cvat_db/cvat_data.tar.bz2 | docker exec -i test_cvat_1 tar --strip 3 -C /home/django/data/ -xj
``` ```
## Assets directory structure ## Assets directory structure
@ -173,9 +170,9 @@ Assets directory has two parts:
1. If your test infrastructure has been corrupted and you have errors during db restoring. 1. If your test infrastructure has been corrupted and you have errors during db restoring.
You should to create (or recreate) `cvat` database: You should to create (or recreate) `cvat` database:
``` ```
docker exec cvat_db dropdb --if-exists cvat docker exec test_cvat_db_1 dropdb --if-exists cvat
docker exec cvat_db createdb cvat docker exec test_cvat_db_1 createdb cvat
docker exec cvat python manage.py migrate docker exec test_cvat_1 python manage.py migrate
``` ```
1. Perform migrate when some relation does not exists. Example of error message: 1. Perform migrate when some relation does not exists. Example of error message:
@ -184,7 +181,7 @@ Assets directory has two parts:
``` ```
Solution: Solution:
``` ```
docker exec cvat python manage.py migrate docker exec test_cvat_1 python manage.py migrate
``` ```
1. If for some reason you need to recreate cvat database, but using `dropdb` 1. If for some reason you need to recreate cvat database, but using `dropdb`
@ -196,6 +193,6 @@ Assets directory has two parts:
In this case you should terminate all existent connections for cvat database, In this case you should terminate all existent connections for cvat database,
you can perform it with command: you can perform it with command:
``` ```
docker exec cvat_db psql -U root -d postgres -v from=cvat -v to=test_db -f restore.sql docker exec test_cvat_db_1 psql -U root -d postgres -v from=cvat -v to=test_db -f restore.sql
``` ```

@ -1,306 +1,2 @@
# Copyright (C) 2021 Intel Corporation from .fixtures.init import *
# from .fixtures.data import *
# SPDX-License-Identifier: MIT
from subprocess import run, CalledProcessError
import pytest
import json
import os.path as osp
from .utils.config import ASSETS_DIR
CVAT_DB_DIR = osp.join(ASSETS_DIR, 'cvat_db')
def _run(command):
try:
run(command.split(), check=True) #nosec
except CalledProcessError:
pytest.exit(f'Command failed: {command}. Add `-s` option to see more details')
def restore_data_volume():
_run(f"docker container cp {osp.join(ASSETS_DIR, 'cvat_db', 'cvat_data.tar.bz2')} cvat:cvat_data.tar.bz2")
_run(f"docker exec -i cvat tar --strip 3 -xjf /cvat_data.tar.bz2 -C /home/django/data/")
def create_test_db():
_run(f"docker container cp {osp.join(CVAT_DB_DIR, 'restore.sql')} cvat_db:restore.sql")
_run(f"docker container cp {osp.join(CVAT_DB_DIR, 'data.json')} cvat:data.json")
_run('docker exec cvat python manage.py loaddata /data.json')
_run('docker exec cvat_db psql -U root -d postgres -v from=cvat -v to=test_db -f restore.sql')
@pytest.fixture(scope='session', autouse=True)
def init_test_db():
restore_data_volume()
create_test_db()
yield
_run('docker exec cvat_db psql -U root -d postgres -v from=test_db -v to=cvat -f restore.sql')
_run('docker exec cvat_db dropdb test_db')
@pytest.fixture(scope='function')
def restore():
_run('docker exec cvat_db psql -U root -d postgres -v from=test_db -v to=cvat -f restore.sql')
@pytest.fixture(scope='function')
def restore_cvat_data():
restore_data_volume()
class Container:
def __init__(self, data, key='id'):
self.raw_data = data
self.map_data = { obj[key]: obj for obj in data }
@property
def raw(self):
return self.raw_data
@property
def map(self):
return self.map_data
def __iter__(self):
return iter(self.raw_data)
def __len__(self):
return len(self.raw_data)
def __getitem__(self, key):
if isinstance(key, slice):
return self.raw_data[key]
return self.map_data[key]
@pytest.fixture(scope='module')
def users():
with open(osp.join(ASSETS_DIR, 'users.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='module')
def organizations():
with open(osp.join(ASSETS_DIR, 'organizations.json')) as f:
return Container(json.load(f))
@pytest.fixture(scope='module')
def memberships():
with open(osp.join(ASSETS_DIR, 'memberships.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='module')
def tasks():
with open(osp.join(ASSETS_DIR, 'tasks.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='module')
def projects():
with open(osp.join(ASSETS_DIR, 'projects.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='module')
def jobs():
with open(osp.join(ASSETS_DIR, 'jobs.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='module')
def invitations():
with open(osp.join(ASSETS_DIR, 'invitations.json')) as f:
return Container(json.load(f)['results'], key='key')
@pytest.fixture(scope='module')
def annotations():
with open(osp.join(ASSETS_DIR, 'annotations.json')) as f:
return json.load(f)
@pytest.fixture(scope='module')
def cloud_storages():
with open(osp.join(ASSETS_DIR, 'cloudstorages.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='module')
def issues():
with open(osp.join(ASSETS_DIR, 'issues.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='module')
def users_by_name(users):
return {user['username']: user for user in users}
@pytest.fixture(scope='module')
def jobs_by_org(tasks, jobs):
data = {}
for job in jobs:
data.setdefault(tasks[job['task_id']]['organization'], []).append(job)
data[''] = data.pop(None, [])
return data
@pytest.fixture(scope='module')
def tasks_by_org(tasks):
data = {}
for task in tasks:
data.setdefault(task['organization'], []).append(task)
data[''] = data.pop(None, [])
return data
@pytest.fixture(scope='module')
def issues_by_org(tasks, jobs, issues):
data = {}
for issue in issues:
data.setdefault(tasks[jobs[issue['job']]['task_id']]['organization'], []).append(issue)
data[''] = data.pop(None, [])
return data
@pytest.fixture(scope='module')
def assignee_id():
def get_id(data):
if data.get('assignee') is not None:
return data['assignee']['id']
return get_id
def ownership(func):
def wrap(user_id, resource_id):
if resource_id is None:
return False
return func(user_id, resource_id)
return wrap
@pytest.fixture(scope='module')
def is_project_staff(projects, assignee_id):
@ownership
def check(user_id, pid):
return user_id == projects[pid]['owner']['id'] or \
user_id == assignee_id(projects[pid])
return check
@pytest.fixture(scope='module')
def is_task_staff(tasks, is_project_staff, assignee_id):
@ownership
def check(user_id, tid):
return user_id == tasks[tid]['owner']['id'] or \
user_id == assignee_id(tasks[tid]) or \
is_project_staff(user_id, tasks[tid]['project_id'])
return check
@pytest.fixture(scope='module')
def is_job_staff(jobs, is_task_staff, assignee_id):
@ownership
def check(user_id, jid):
return user_id == assignee_id(jobs[jid]) or \
is_task_staff(user_id, jobs[jid]['task_id'])
return check
@pytest.fixture(scope='module')
def is_issue_staff(issues, jobs, assignee_id):
@ownership
def check(user_id, issue_id):
return user_id == issues[issue_id]['owner']['id'] or \
user_id == assignee_id(issues[issue_id]) or \
user_id == assignee_id(jobs[issues[issue_id]['job']])
return check
@pytest.fixture(scope='module')
def is_issue_admin(issues, jobs, is_task_staff):
@ownership
def check(user_id, issue_id):
return is_task_staff(user_id, jobs[issues[issue_id]['job']]['task_id'])
return check
@pytest.fixture(scope='module')
def find_users(test_db):
def find(**kwargs):
assert len(kwargs) > 0
assert any(kwargs.values())
data = test_db
kwargs = dict(filter(lambda a: a[1] is not None, kwargs.items()))
for field, value in kwargs.items():
if field.startswith('exclude_'):
field = field.split('_', maxsplit=1)[1]
exclude_rows = set(v['id'] for v in
filter(lambda a: a[field] == value, test_db))
data = list(filter(lambda a: a['id'] not in exclude_rows, data))
else:
data = list(filter(lambda a: a[field] == value, data))
return data
return find
@pytest.fixture(scope='module')
def test_db(users, users_by_name, memberships):
data = []
fields = ['username', 'id', 'privilege', 'role', 'org', 'membership_id']
def add_row(**kwargs):
data.append({field: kwargs.get(field) for field in fields})
for user in users:
for group in user['groups']:
add_row(username=user['username'], id=user['id'], privilege=group)
for membership in memberships:
username = membership['user']['username']
for group in users_by_name[username]['groups']:
add_row(username=username, role=membership['role'], privilege=group,
id=membership['user']['id'], org=membership['organization'],
membership_id=membership['id'])
return data
@pytest.fixture(scope='module')
def org_staff(memberships):
def find(org_id):
if org_id in ['', None]:
return set()
else:
return set(m['user']['id'] for m in memberships
if m['role'] in ['maintainer', 'owner'] and m['user'] is not None
and m['organization'] == org_id)
return find
@pytest.fixture(scope='module')
def is_org_member(memberships):
def check(user_id, org_id):
if org_id in ['', None]:
return True
else:
return user_id in set(m['user']['id'] for m in memberships
if m['user'] is not None and m['organization'] == org_id)
return check
@pytest.fixture(scope='module')
def find_job_staff_user(is_job_staff):
def find(jobs, users, is_staff):
for job in jobs:
for user in users:
if is_staff == is_job_staff(user['id'], job['id']):
return user['username'], job['id']
return None, None
return find
@pytest.fixture(scope='module')
def find_task_staff_user(is_task_staff):
def find(tasks, users, is_staff):
for task in tasks:
for user in users:
if is_staff == is_task_staff(user['id'], task['id']):
return user['username'], task['id']
return None, None
return find
@pytest.fixture(scope='module')
def find_issue_staff_user(is_issue_staff, is_issue_admin):
def find(issues, users, is_staff, is_admin):
for issue in issues:
for user in users:
i_admin, i_staff = is_issue_admin(user['id'], issue['id']), is_issue_staff(user['id'], issue['id'])
if (is_admin is None and (i_staff or i_admin) == is_staff) \
or (is_admin == i_admin and is_staff == i_staff):
return user['username'], issue['id']
return None, None
return find
@pytest.fixture(scope='module')
def filter_jobs_with_shapes(annotations):
def find(jobs):
return list(filter(lambda j: annotations['job'][str(j['id'])]['shapes'], jobs))
return find
@pytest.fixture(scope='module')
def filter_tasks_with_shapes(annotations):
def find(tasks):
return list(filter(lambda t: annotations['task'][str(t['id'])]['shapes'], tasks))
return find

@ -13,8 +13,8 @@ services:
- 9000:9000 - 9000:9000
- 9001:9001 - 9001:9001
environment: environment:
MINIO_ROOT_USER: ${MINIO_ACCESS_KEY} MINIO_ROOT_USER: "minio_access_key"
MINIO_ROOT_PASSWORD: ${MINIO_SECRET_KEY} MINIO_ROOT_PASSWORD: "minio_secret_key"
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s interval: 30s
@ -31,8 +31,8 @@ services:
environment: environment:
MC_PATH: "/usr/bin/mc" MC_PATH: "/usr/bin/mc"
MINIO_HOST: "http://minio:9000" MINIO_HOST: "http://minio:9000"
MINIO_ACCESS_KEY: MINIO_ACCESS_KEY: "minio_access_key"
MINIO_SECRET_KEY: MINIO_SECRET_KEY: "minio_secret_key"
MINIO_ALIAS: "local_minio" MINIO_ALIAS: "local_minio"
PRIVATE_BUCKET: "private" PRIVATE_BUCKET: "private"
PUBLIC_BUCKET: "public" PUBLIC_BUCKET: "public"

@ -0,0 +1,3 @@
# Copyright (C) 2021 Intel Corporation
#
# SPDX-License-Identifier: MIT

@ -0,0 +1,276 @@
# Copyright (C) 2021 Intel Corporation
#
# SPDX-License-Identifier: MIT
import pytest
import json
import os.path as osp
from rest_api.utils.config import ASSETS_DIR
CVAT_DB_DIR = osp.join(ASSETS_DIR, 'cvat_db')
class Container:
def __init__(self, data, key='id'):
self.raw_data = data
self.map_data = { obj[key]: obj for obj in data }
@property
def raw(self):
return self.raw_data
@property
def map(self):
return self.map_data
def __iter__(self):
return iter(self.raw_data)
def __len__(self):
return len(self.raw_data)
def __getitem__(self, key):
if isinstance(key, slice):
return self.raw_data[key]
return self.map_data[key]
@pytest.fixture(scope='session')
def users():
with open(osp.join(ASSETS_DIR, 'users.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='session')
def organizations():
with open(osp.join(ASSETS_DIR, 'organizations.json')) as f:
return Container(json.load(f))
@pytest.fixture(scope='session')
def memberships():
with open(osp.join(ASSETS_DIR, 'memberships.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='session')
def tasks():
with open(osp.join(ASSETS_DIR, 'tasks.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='session')
def projects():
with open(osp.join(ASSETS_DIR, 'projects.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='session')
def jobs():
with open(osp.join(ASSETS_DIR, 'jobs.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='session')
def invitations():
with open(osp.join(ASSETS_DIR, 'invitations.json')) as f:
return Container(json.load(f)['results'], key='key')
@pytest.fixture(scope='session')
def annotations():
with open(osp.join(ASSETS_DIR, 'annotations.json')) as f:
return json.load(f)
@pytest.fixture(scope='session')
def cloud_storages():
with open(osp.join(ASSETS_DIR, 'cloudstorages.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='session')
def issues():
with open(osp.join(ASSETS_DIR, 'issues.json')) as f:
return Container(json.load(f)['results'])
@pytest.fixture(scope='session')
def users_by_name(users):
return {user['username']: user for user in users}
@pytest.fixture(scope='session')
def jobs_by_org(tasks, jobs):
data = {}
for job in jobs:
data.setdefault(tasks[job['task_id']]['organization'], []).append(job)
data[''] = data.pop(None, [])
return data
@pytest.fixture(scope='session')
def tasks_by_org(tasks):
data = {}
for task in tasks:
data.setdefault(task['organization'], []).append(task)
data[''] = data.pop(None, [])
return data
@pytest.fixture(scope='session')
def issues_by_org(tasks, jobs, issues):
data = {}
for issue in issues:
data.setdefault(tasks[jobs[issue['job']]['task_id']]['organization'], []).append(issue)
data[''] = data.pop(None, [])
return data
@pytest.fixture(scope='session')
def assignee_id():
def get_id(data):
if data.get('assignee') is not None:
return data['assignee']['id']
return get_id
def ownership(func):
def wrap(user_id, resource_id):
if resource_id is None:
return False
return func(user_id, resource_id)
return wrap
@pytest.fixture(scope='session')
def is_project_staff(projects, assignee_id):
@ownership
def check(user_id, pid):
return user_id == projects[pid]['owner']['id'] or \
user_id == assignee_id(projects[pid])
return check
@pytest.fixture(scope='session')
def is_task_staff(tasks, is_project_staff, assignee_id):
@ownership
def check(user_id, tid):
return user_id == tasks[tid]['owner']['id'] or \
user_id == assignee_id(tasks[tid]) or \
is_project_staff(user_id, tasks[tid]['project_id'])
return check
@pytest.fixture(scope='session')
def is_job_staff(jobs, is_task_staff, assignee_id):
@ownership
def check(user_id, jid):
return user_id == assignee_id(jobs[jid]) or \
is_task_staff(user_id, jobs[jid]['task_id'])
return check
@pytest.fixture(scope='session')
def is_issue_staff(issues, jobs, assignee_id):
@ownership
def check(user_id, issue_id):
return user_id == issues[issue_id]['owner']['id'] or \
user_id == assignee_id(issues[issue_id]) or \
user_id == assignee_id(jobs[issues[issue_id]['job']])
return check
@pytest.fixture(scope='session')
def is_issue_admin(issues, jobs, is_task_staff):
@ownership
def check(user_id, issue_id):
return is_task_staff(user_id, jobs[issues[issue_id]['job']]['task_id'])
return check
@pytest.fixture(scope='session')
def find_users(test_db):
def find(**kwargs):
assert len(kwargs) > 0
assert any(kwargs.values())
data = test_db
kwargs = dict(filter(lambda a: a[1] is not None, kwargs.items()))
for field, value in kwargs.items():
if field.startswith('exclude_'):
field = field.split('_', maxsplit=1)[1]
exclude_rows = set(v['id'] for v in
filter(lambda a: a[field] == value, test_db))
data = list(filter(lambda a: a['id'] not in exclude_rows, data))
else:
data = list(filter(lambda a: a[field] == value, data))
return data
return find
@pytest.fixture(scope='session')
def test_db(users, users_by_name, memberships):
data = []
fields = ['username', 'id', 'privilege', 'role', 'org', 'membership_id']
def add_row(**kwargs):
data.append({field: kwargs.get(field) for field in fields})
for user in users:
for group in user['groups']:
add_row(username=user['username'], id=user['id'], privilege=group)
for membership in memberships:
username = membership['user']['username']
for group in users_by_name[username]['groups']:
add_row(username=username, role=membership['role'], privilege=group,
id=membership['user']['id'], org=membership['organization'],
membership_id=membership['id'])
return data
@pytest.fixture(scope='session')
def org_staff(memberships):
def find(org_id):
if org_id in ['', None]:
return set()
else:
return set(m['user']['id'] for m in memberships
if m['role'] in ['maintainer', 'owner'] and m['user'] != None
and m['organization'] == org_id)
return find
@pytest.fixture(scope='session')
def is_org_member(memberships):
def check(user_id, org_id):
if org_id in ['', None]:
return True
else:
return user_id in set(m['user']['id'] for m in memberships
if m['user'] != None and m['organization'] == org_id)
return check
@pytest.fixture(scope='session')
def find_job_staff_user(is_job_staff):
def find(jobs, users, is_staff):
for job in jobs:
for user in users:
if is_staff == is_job_staff(user['id'], job['id']):
return user['username'], job['id']
return None, None
return find
@pytest.fixture(scope='session')
def find_task_staff_user(is_task_staff):
def find(tasks, users, is_staff):
for task in tasks:
for user in users:
if is_staff == is_task_staff(user['id'], task['id']):
return user['username'], task['id']
return None, None
return find
@pytest.fixture(scope='session')
def find_issue_staff_user(is_issue_staff, is_issue_admin):
def find(issues, users, is_staff, is_admin):
for issue in issues:
for user in users:
i_admin, i_staff = is_issue_admin(user['id'], issue['id']), is_issue_staff(user['id'], issue['id'])
if (is_admin is None and (i_staff or i_admin) == is_staff) \
or (is_admin == i_admin and is_staff == i_staff):
return user['username'], issue['id']
return None, None
return find
@pytest.fixture(scope='session')
def filter_jobs_with_shapes(annotations):
def find(jobs):
return list(filter(lambda j: annotations['job'][str(j['id'])]['shapes'], jobs))
return find
@pytest.fixture(scope='session')
def filter_tasks_with_shapes(annotations):
def find(tasks):
return list(filter(lambda t: annotations['task'][str(t['id'])]['shapes'], tasks))
return find
@pytest.fixture(scope='session')
def tasks_with_shapes(tasks, filter_tasks_with_shapes):
return filter_tasks_with_shapes(tasks)

@ -0,0 +1,177 @@
import os.path as osp
import re
from http import HTTPStatus
from subprocess import PIPE, CalledProcessError, run
import pytest
import os
import requests
from rest_api.utils.config import ASSETS_DIR, get_api_url
CVAT_ROOT_DIR = __file__[: __file__.rfind(osp.join("tests", ""))]
CVAT_DB_DIR = osp.join(ASSETS_DIR, "cvat_db")
PREFIX = "test"
CONTAINER_NAME_FILES = [
osp.join(CVAT_ROOT_DIR, dc_file)
for dc_file in (
"components/analytics/docker-compose.analytics.tests.yml",
"docker-compose.tests.yml",
)
]
DC_FILES = [
osp.join(CVAT_ROOT_DIR, dc_file)
for dc_file in ("docker-compose.dev.yml", "tests/rest_api/docker-compose.minio.yml")
] + CONTAINER_NAME_FILES
def pytest_addoption(parser):
group = parser.getgroup("CVAT REST API testing options")
group._addoption(
"--start-services",
action="store_true",
help="Start all necessary CVAT containers without running tests. (default: %(default)s)",
)
group._addoption(
"--stop-services",
action="store_true",
help="Stop all testing containers without running tests. (default: %(default)s)",
)
group._addoption(
"--rebuild",
action="store_true",
help="Rebuild CVAT images and then start containers. (default: %(default)s)",
)
group._addoption(
"--cleanup",
action="store_true",
help="Delete files that was create by tests without running tests. (default: %(default)s)",
)
def _run(command):
try:
proc = run(command.split(), check=True, stdout=PIPE, stderr=PIPE) # nosec
return proc.stdout.decode(), proc.stderr.decode()
except CalledProcessError as exc:
pytest.exit(
f"Command failed: {command}.\n"
f"Error message: {exc.stderr.decode()}.\n"
f"Add `-s` option to see more details"
)
def docker_cp(source, target):
_run(f"docker container cp {source} {target}")
def exec_cvat(command):
_run(f"docker exec {PREFIX}_cvat_1 {command}")
def exec_cvat_db(command):
_run(f"docker exec {PREFIX}_cvat_db_1 {command}")
def restore_db():
exec_cvat_db("psql -U root -d postgres -v from=test_db -v to=cvat -f /tmp/restore.sql")
def create_compose_files():
for filename in CONTAINER_NAME_FILES:
with open(filename.replace(".tests.yml", ".yml"), "r") as dcf, open(filename, "w") as ndcf:
ndcf.writelines(
[line for line in dcf.readlines() if not re.match("^.+container_name.+$", line)]
)
def delete_compose_files():
for filename in CONTAINER_NAME_FILES:
if osp.exists(filename):
os.remove(filename)
def wait_for_server():
while True:
response = requests.get(get_api_url("users/self"))
if response.status_code == HTTPStatus.UNAUTHORIZED:
break
def restore_data_volumes():
docker_cp(osp.join(CVAT_DB_DIR, "cvat_data.tar.bz2"), f"{PREFIX}_cvat_1:/tmp/cvat_data.tar.bz2")
exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/")
def start_services(rebuild=False):
running_containers = [cn for cn in _run("docker ps --format {{.Names}}")[0].split("\n") if cn]
if any([cn in ["cvat", "cvat_db"] for cn in running_containers]):
pytest.exit(
"It's looks like you already have running cvat containers. Stop them and try again. "
f"List of running containers: {', '.join(running_containers)}"
)
out = _run(f"docker-compose -p {PREFIX} -f {' -f '.join(DC_FILES)} up -d " + "--build" * rebuild)[1]
restore_data_volumes()
docker_cp(osp.join(CVAT_DB_DIR, "restore.sql"), f"{PREFIX}_cvat_db_1:/tmp/restore.sql")
docker_cp(osp.join(CVAT_DB_DIR, "data.json"), f"{PREFIX}_cvat_1:/tmp/data.json")
return out
@pytest.fixture(autouse=True, scope="session")
def services(request):
stop = request.config.getoption("--stop-services")
start = request.config.getoption("--start-services")
rebuild = request.config.getoption("--rebuild")
cleanup = request.config.getoption("--cleanup")
if start and stop:
raise Exception("--start-services and --stop-services are incompatible")
if cleanup:
delete_compose_files()
pytest.exit(f"All generated test files have been deleted", returncode=0)
if not all([osp.exists(f) for f in CONTAINER_NAME_FILES]):
create_compose_files()
if stop:
out = _run(f"docker-compose -p {PREFIX} -f {' -f '.join(DC_FILES)} down -v")[1]
out = set(l.split()[1] for l in out.split("\n") if "done" in l.split())
pytest.exit(f"All testing containers are stopped: {', '.join(out)}", returncode=0)
started_services = start_services(rebuild)
wait_for_server()
exec_cvat("python manage.py loaddata /tmp/data.json")
exec_cvat_db("psql -U root -d postgres -v from=cvat -v to=test_db -f /tmp/restore.sql")
if start:
pytest.exit(
f"All necessary containers have been created and started: {started_services}",
returncode=0,
)
yield
restore_db()
exec_cvat_db("dropdb test_db")
@pytest.fixture(scope="function")
def changedb():
restore_db()
@pytest.fixture(scope="class")
def dontchangedb():
restore_db()
@pytest.fixture(scope="function")
def restore_cvat_data():
restore_data_volumes()

@ -4,8 +4,9 @@
import pytest import pytest
from http import HTTPStatus from http import HTTPStatus
from .utils.config import server_get from rest_api.utils.config import server_get
@pytest.mark.usefixtures('dontchangedb')
class TestGetAnalytics: class TestGetAnalytics:
endpoint = 'analytics/app/kibana' endpoint = 'analytics/app/kibana'
def _test_can_see(self, user): def _test_can_see(self, user):

@ -4,7 +4,7 @@
from http import HTTPStatus from http import HTTPStatus
import re import re
from .utils.config import server_get from rest_api.utils.config import server_get
class TestCachePolicy: class TestCachePolicy:

@ -6,11 +6,14 @@ import os.path as osp
import glob import glob
import json import json
from deepdiff import DeepDiff from deepdiff import DeepDiff
from .utils import config from rest_api.utils import config
import pytest import pytest
@pytest.mark.usefixtures('dontchangedb')
class TestGetResources:
@pytest.mark.parametrize('path', glob.glob(osp.join(config.ASSETS_DIR, '*.json'))) @pytest.mark.parametrize('path', glob.glob(osp.join(config.ASSETS_DIR, '*.json')))
def test_check_objects_integrity(path): def test_check_objects_integrity(self, path):
with open(path) as f: with open(path) as f:
endpoint = osp.basename(path).rsplit('.')[0] endpoint = osp.basename(path).rsplit('.')[0]
if endpoint == 'annotations': if endpoint == 'annotations':

@ -6,8 +6,9 @@ import pytest
from http import HTTPStatus from http import HTTPStatus
from deepdiff import DeepDiff from deepdiff import DeepDiff
from .utils.config import get_method, patch_method, post_method from rest_api.utils.config import get_method, patch_method, post_method
@pytest.mark.usefixtures('dontchangedb')
class TestGetCloudStorage: class TestGetCloudStorage:
def _test_can_see(self, user, storage_id, data, **kwargs): def _test_can_see(self, user, storage_id, data, **kwargs):
@ -59,8 +60,8 @@ class TestGetCloudStorage:
self._test_cannot_see(username, storage_id, org_id=org_id) self._test_cannot_see(username, storage_id, org_id=org_id)
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestPostCloudStorage: class TestPostCloudStorage():
_SPEC = { _SPEC = {
'provider_type': 'AWS_S3_BUCKET', 'provider_type': 'AWS_S3_BUCKET',
'resource': 'test', 'resource': 'test',
@ -121,7 +122,7 @@ class TestPostCloudStorage:
else: else:
self._test_cannot_create(username, self._SPEC, org_id=org_id) self._test_cannot_create(username, self._SPEC, org_id=org_id)
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestPatchCloudStorage: class TestPatchCloudStorage:
_SPEC = { _SPEC = {
'display_name': 'New display name', 'display_name': 'New display name',

@ -4,9 +4,9 @@
from http import HTTPStatus from http import HTTPStatus
import pytest import pytest
from .utils.config import post_method from rest_api.utils.config import post_method
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestCreateInvitations: class TestCreateInvitations:
def _test_post_invitation_201(self, user, data, invitee, **kwargs): def _test_post_invitation_201(self, user, data, invitee, **kwargs):
response = post_method(user, 'invitations', data, **kwargs) response = post_method(user, 'invitations', data, **kwargs)

@ -7,9 +7,9 @@ from http import HTTPStatus
from deepdiff import DeepDiff from deepdiff import DeepDiff
from copy import deepcopy from copy import deepcopy
from .utils.config import post_method, patch_method from rest_api.utils.config import post_method, patch_method
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestPostIssues: class TestPostIssues:
def _test_check_response(self, user, data, is_allow, **kwargs): def _test_check_response(self, user, data, is_allow, **kwargs):
response = post_method(user, 'issues', data, **kwargs) response = post_method(user, 'issues', data, **kwargs)
@ -78,9 +78,7 @@ class TestPostIssues:
} }
self._test_check_response(username, data, is_allow, org_id=org) self._test_check_response(username, data, is_allow, org_id=org)
@pytest.mark.usefixtures('changedb')
@pytest.mark.usefixtures("restore")
class TestPatchIssues: class TestPatchIssues:
def _test_check_response(self, user, issue_id, data, is_allow, **kwargs): def _test_check_response(self, user, issue_id, data, is_allow, **kwargs):
response = patch_method(user, f'issues/{issue_id}', data, response = patch_method(user, f'issues/{issue_id}', data,

@ -5,7 +5,8 @@
from http import HTTPStatus from http import HTTPStatus
from deepdiff import DeepDiff from deepdiff import DeepDiff
import pytest import pytest
from .utils.config import get_method, patch_method from copy import deepcopy
from rest_api.utils.config import get_method, patch_method
def get_job_staff(job, tasks, projects): def get_job_staff(job, tasks, projects):
job_staff = [] job_staff = []
@ -37,6 +38,7 @@ def filter_jobs(jobs, tasks, org):
return jobs, kwargs return jobs, kwargs
@pytest.mark.usefixtures('dontchangedb')
class TestGetJobs: class TestGetJobs:
def _test_get_job_200(self, user, jid, data, **kwargs): def _test_get_job_200(self, user, jid, data, **kwargs):
response = get_method(user, f'jobs/{jid}', **kwargs) response = get_method(user, f'jobs/{jid}', **kwargs)
@ -75,6 +77,7 @@ class TestGetJobs:
else: else:
self._test_get_job_403(user['username'], job['id'], **kwargs) self._test_get_job_403(user['username'], job['id'], **kwargs)
@pytest.mark.usefixtures('dontchangedb')
class TestListJobs: class TestListJobs:
def _test_list_jobs_200(self, user, data, **kwargs): def _test_list_jobs_200(self, user, data, **kwargs):
response = get_method(user, 'jobs', **kwargs, page_size='all') response = get_method(user, 'jobs', **kwargs, page_size='all')
@ -110,6 +113,7 @@ class TestListJobs:
else: else:
self._test_list_jobs_403(user['username'], **kwargs) self._test_list_jobs_403(user['username'], **kwargs)
@pytest.mark.usefixtures('dontchangedb')
class TestGetAnnotations: class TestGetAnnotations:
def _test_get_job_annotations_200(self, user, jid, data, **kwargs): def _test_get_job_annotations_200(self, user, jid, data, **kwargs):
response = get_method(user, f'jobs/{jid}/annotations', **kwargs) response = get_method(user, f'jobs/{jid}/annotations', **kwargs)
@ -180,7 +184,8 @@ class TestGetAnnotations:
job_id, annotations['job'][str(job_id)], **kwargs) job_id, annotations['job'][str(job_id)], **kwargs)
else: else:
self._test_get_job_annotations_403(username, job_id, **kwargs) self._test_get_job_annotations_403(username, job_id, **kwargs)
@pytest.mark.usefixtures("restore")
@pytest.mark.usefixtures('changedb')
class TestPatchJobAnnotations: class TestPatchJobAnnotations:
_ORG = 2 _ORG = 2
@ -195,7 +200,7 @@ class TestPatchJobAnnotations:
@pytest.fixture(scope='class') @pytest.fixture(scope='class')
def request_data(self, annotations): def request_data(self, annotations):
def get_data(jid): def get_data(jid):
data = annotations['job'][str(jid)].copy() data = deepcopy(annotations['job'][str(jid)])
data['shapes'][0].update({'points': [2.0, 3.0, 4.0, 5.0, 6.0, 7.0]}) data['shapes'][0].update({'points': [2.0, 3.0, 4.0, 5.0, 6.0, 7.0]})
data['version'] += 1 data['version'] += 1
return data return data
@ -259,7 +264,7 @@ class TestPatchJobAnnotations:
self._test_check_respone(is_allow, response, data) self._test_check_respone(is_allow, response, data)
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestPatchJob: class TestPatchJob:
_ORG = 2 _ORG = 2
@ -277,7 +282,7 @@ class TestPatchJob:
def expected_data(self, jobs, users): def expected_data(self, jobs, users):
keys = ['url', 'id', 'username', 'first_name', 'last_name'] keys = ['url', 'id', 'username', 'first_name', 'last_name']
def find(job_id, assignee_id): def find(job_id, assignee_id):
data = jobs[job_id].copy() data = deepcopy(jobs[job_id])
data['assignee'] = dict(filter(lambda a: a[0] in keys, data['assignee'] = dict(filter(lambda a: a[0] in keys,
users[assignee_id].items())) users[assignee_id].items()))
return data return data
@ -290,7 +295,6 @@ class TestPatchJob:
members -= {assignee_id(jobs[jid]), user_id} members -= {assignee_id(jobs[jid]), user_id}
return members.pop() return members.pop()
return find_new_assignee return find_new_assignee
@pytest.mark.parametrize('org', [2]) @pytest.mark.parametrize('org', [2])
@pytest.mark.parametrize('role, task_staff, is_allow', [ @pytest.mark.parametrize('role, task_staff, is_allow', [
('maintainer', False, True), ('owner', False, True), ('maintainer', False, True), ('owner', False, True),

@ -6,8 +6,9 @@ import pytest
from http import HTTPStatus from http import HTTPStatus
from deepdiff import DeepDiff from deepdiff import DeepDiff
from .utils.config import get_method, patch_method from rest_api.utils.config import get_method, patch_method
@pytest.mark.usefixtures('dontchangedb')
class TestGetMemberships: class TestGetMemberships:
def _test_can_see_memberships(self, user, data, **kwargs): def _test_can_see_memberships(self, user, data, **kwargs):
response = get_method(user, 'memberships', **kwargs) response = get_method(user, 'memberships', **kwargs)
@ -40,9 +41,7 @@ class TestGetMemberships:
non_org1_users = ['user2', 'worker3'] non_org1_users = ['user2', 'worker3']
for user in non_org1_users: for user in non_org1_users:
self._test_cannot_see_memberships(user, org_id=1) self._test_cannot_see_memberships(user, org_id=1)
@pytest.mark.usefixtures('changedb')
@pytest.mark.usefixtures("restore")
class TestPatchMemberships: class TestPatchMemberships:
_ORG = 2 _ORG = 2

@ -4,8 +4,9 @@
from http import HTTPStatus from http import HTTPStatus
import pytest import pytest
from .utils.config import get_method, options_method, patch_method, delete_method from rest_api.utils.config import get_method, options_method, patch_method, delete_method
from deepdiff import DeepDiff from deepdiff import DeepDiff
from copy import deepcopy
class TestMetadataOrganizations: class TestMetadataOrganizations:
_ORG = 2 _ORG = 2
@ -33,6 +34,7 @@ class TestMetadataOrganizations:
response = options_method(user, f'organizations/{self._ORG}') response = options_method(user, f'organizations/{self._ORG}')
assert response.status_code == HTTPStatus.OK assert response.status_code == HTTPStatus.OK
@pytest.mark.usefixtures('dontchangedb')
class TestGetOrganizations: class TestGetOrganizations:
_ORG = 2 _ORG = 2
@ -60,7 +62,7 @@ class TestGetOrganizations:
else: else:
assert response.status_code == HTTPStatus.NOT_FOUND assert response.status_code == HTTPStatus.NOT_FOUND
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestPatchOrganizations: class TestPatchOrganizations:
_ORG = 2 _ORG = 2
@ -71,7 +73,7 @@ class TestPatchOrganizations:
@pytest.fixture(scope='class') @pytest.fixture(scope='class')
def expected_data(self, organizations, request_data): def expected_data(self, organizations, request_data):
data = organizations[self._ORG].copy() data = deepcopy(organizations[self._ORG])
data.update(request_data) data.update(request_data)
return data return data
@ -101,7 +103,7 @@ class TestPatchOrganizations:
else: else:
assert response.status_code != HTTPStatus.OK assert response.status_code != HTTPStatus.OK
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestDeleteOrganizations: class TestDeleteOrganizations:
_ORG = 2 _ORG = 2

@ -11,6 +11,7 @@ import pytest
from .utils.config import get_method, post_files_method, post_method from .utils.config import get_method, post_files_method, post_method
@pytest.mark.usefixtures('dontchangedb')
class TestGetProjects: class TestGetProjects:
def _find_project_by_user_org(self, user, projects, is_project_staff_flag, is_project_staff): def _find_project_by_user_org(self, user, projects, is_project_staff_flag, is_project_staff):
if is_project_staff_flag: if is_project_staff_flag:
@ -112,7 +113,7 @@ class TestGetProjects:
self._test_response_200(user_in_project['username'], project_id, org_id=user_in_project['org']) self._test_response_200(user_in_project['username'], project_id, org_id=user_in_project['org'])
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestPostProjects: class TestPostProjects:
def _test_create_project_201(self, user, spec, **kwargs): def _test_create_project_201(self, user, spec, **kwargs):
response = post_method(user, '/projects', spec, **kwargs) response = post_method(user, '/projects', spec, **kwargs)
@ -199,7 +200,7 @@ class TestPostProjects:
} }
self._test_create_project_201(user['username'], spec, org_id=user['org']) self._test_create_project_201(user['username'], spec, org_id=user['org'])
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures("changedb")
@pytest.mark.usefixtures("restore_cvat_data") @pytest.mark.usefixtures("restore_cvat_data")
class TestImportExportDatasetProject: class TestImportExportDatasetProject:
def _test_export_project(self, username, project_id, format_name): def _test_export_project(self, username, project_id, format_name):
@ -217,7 +218,7 @@ class TestImportExportDatasetProject:
return response return response
def _test_import_project(self, username, project_id, format_name, data): def _test_import_project(self, username, project_id, format_name, data):
response = post_files_method(username, f'projects/{project_id}/dataset', data, response = post_files_method(username, f'projects/{project_id}/dataset', None, data,
format=format_name) format=format_name)
assert response.status_code == HTTPStatus.ACCEPTED assert response.status_code == HTTPStatus.ACCEPTED

@ -7,7 +7,7 @@ from time import sleep
import pytest import pytest
from .utils.config import get_method, post_method from rest_api.utils.config import get_method, post_method
def _post_task_remote_data(username, task_id, resources): def _post_task_remote_data(username, task_id, resources):
@ -29,7 +29,7 @@ def _wait_until_task_is_created(username, task_id):
sleep(1) sleep(1)
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestGetAnalytics: class TestGetAnalytics:
task_id = 12 task_id = 12
def _test_can_create(self, user, task_id, resources): def _test_can_create(self, user, task_id, resources):

@ -2,6 +2,7 @@
# #
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
from copy import deepcopy
from http import HTTPStatus from http import HTTPStatus
from io import BytesIO from io import BytesIO
from time import sleep from time import sleep
@ -32,6 +33,7 @@ def generate_image_files(count):
return images return images
@pytest.mark.usefixtures('dontchangedb')
class TestGetTasks: class TestGetTasks:
def _test_task_list_200(self, user, project_id, data, exclude_paths = '', **kwargs): def _test_task_list_200(self, user, project_id, data, exclude_paths = '', **kwargs):
response = get_method(user, f'projects/{project_id}/tasks', **kwargs) response = get_method(user, f'projects/{project_id}/tasks', **kwargs)
@ -66,9 +68,6 @@ class TestGetTasks:
assert response.status_code == HTTPStatus.OK assert response.status_code == HTTPStatus.OK
assert any(_task['id'] == task['id'] for _task in response_data['results']) assert any(_task['id'] == task['id'] for _task in response_data['results'])
# [sandbox] admin can see task data in project even he has no ownerships in this project
# [sandbox] business cannot see task data in project if he has no ownerships in this project
# [sandbox] user that has one of these ownerships: [Project:owner, Project:assignee] can see task data
@pytest.mark.parametrize('project_id', [1]) @pytest.mark.parametrize('project_id', [1])
@pytest.mark.parametrize('groups, is_staff, is_allow', [ @pytest.mark.parametrize('groups, is_staff, is_allow', [
('admin', False, True), ('admin', False, True),
@ -81,18 +80,14 @@ class TestGetTasks:
self._test_users_to_see_task_list(project_id, tasks, users, is_staff, is_allow, is_project_staff) self._test_users_to_see_task_list(project_id, tasks, users, is_staff, is_allow, is_project_staff)
# [sandbox] user that has one of these ownerships: [Owner, Assignee] can see task data
@pytest.mark.parametrize('project_id, groups', [(1, 'user')]) @pytest.mark.parametrize('project_id, groups', [(1, 'user')])
def test_task_assigneed_to_see_task(self, project_id, groups, users, tasks, find_users, is_task_staff): def test_task_assigned_to_see_task(self, project_id, groups, users, tasks, find_users, is_task_staff):
users = find_users(privilege=groups) users = find_users(privilege=groups)
tasks = list(filter(lambda x: x['project_id'] == project_id and x['assignee'], tasks)) tasks = list(filter(lambda x: x['project_id'] == project_id and x['assignee'], tasks))
assert len(tasks) assert len(tasks)
self._test_assigned_users_to_see_task_data(tasks, users, is_task_staff) self._test_assigned_users_to_see_task_data(tasks, users, is_task_staff)
# [organization] maintainer can see task data even if he has no ownerships in corresponding Project, Task
# [organization] supervisor cannot see task data if he has no ownerships in corresponding Project, Task
# [organization] worker (as role) that has one of these ownerships: [Project:owner, Project:assignee], can see task data
@pytest.mark.parametrize('org, project_id', [({'id': 2, 'slug': 'org2'}, 2)]) @pytest.mark.parametrize('org, project_id', [({'id': 2, 'slug': 'org2'}, 2)])
@pytest.mark.parametrize('role, is_staff, is_allow', [ @pytest.mark.parametrize('role, is_staff, is_allow', [
('maintainer', False, True), ('maintainer', False, True),
@ -105,7 +100,6 @@ class TestGetTasks:
self._test_users_to_see_task_list(project_id, tasks, users, is_staff, is_allow, is_project_staff, org=org['slug']) self._test_users_to_see_task_list(project_id, tasks, users, is_staff, is_allow, is_project_staff, org=org['slug'])
# [organization] worker (as role) that has one of these ownerships: [Owner, Assignee], can see task data
@pytest.mark.parametrize('org, project_id, role', [ @pytest.mark.parametrize('org, project_id, role', [
({'id': 2, 'slug': 'org2'}, 2, 'worker') ({'id': 2, 'slug': 'org2'}, 2, 'worker')
]) ])
@ -117,7 +111,7 @@ class TestGetTasks:
self._test_assigned_users_to_see_task_data(tasks, users, is_task_staff, org=org['slug']) self._test_assigned_users_to_see_task_data(tasks, users, is_task_staff, org=org['slug'])
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestPostTasks: class TestPostTasks:
def _test_create_task_201(self, user, spec, **kwargs): def _test_create_task_201(self, user, spec, **kwargs):
response = post_method(user, '/tasks', spec, **kwargs) response = post_method(user, '/tasks', spec, **kwargs)
@ -127,31 +121,6 @@ class TestPostTasks:
response = post_method(user, '/tasks', spec, **kwargs) response = post_method(user, '/tasks', spec, **kwargs)
assert response.status_code == HTTPStatus.FORBIDDEN assert response.status_code == HTTPStatus.FORBIDDEN
@staticmethod
def _wait_until_task_is_created(username, task_id):
url = f'tasks/{task_id}/status'
while True:
response = get_method(username, url)
response_json = response.json()
if response_json['state'] == 'Finished' or response_json['state'] == 'Failed':
return response
sleep(1)
def _test_create_task_with_images(self, username, spec, data, files):
response = post_method(username, '/tasks', spec)
assert response.status_code == HTTPStatus.CREATED
task_id = response.json()['id']
response = post_files_method(username, f'/tasks/{task_id}/data', data, files)
assert response.status_code == HTTPStatus.ACCEPTED
response = self._wait_until_task_is_created(username, task_id)
response_json = response.json()
assert response_json['state'] == 'Finished'
return task_id
def _test_users_to_create_task_in_project(self, project_id, users, is_staff, is_allow, is_project_staff, **kwargs): def _test_users_to_create_task_in_project(self, project_id, users, is_staff, is_allow, is_project_staff, **kwargs):
if is_staff: if is_staff:
users = [user for user in users if is_project_staff(user['id'], project_id) ] users = [user for user in users if is_project_staff(user['id'], project_id) ]
@ -171,9 +140,6 @@ class TestPostTasks:
else: else:
self._test_create_task_403(username, spec, **kwargs) self._test_create_task_403(username, spec, **kwargs)
# [sandbox] admin can create task in project even he has no ownerships in this project
# [sandbox] business cannot create task in project if he has no ownerships in this project
# [sandbox] user that has one of these ownerships: [Project:owner, Project:assignee] and has less than 10 task can create task in project
@pytest.mark.parametrize('project_id', [1]) @pytest.mark.parametrize('project_id', [1])
@pytest.mark.parametrize('groups, is_staff, is_allow', [ @pytest.mark.parametrize('groups, is_staff, is_allow', [
('admin', False, True), ('admin', False, True),
@ -184,7 +150,6 @@ class TestPostTasks:
users = find_users(privilege=groups) users = find_users(privilege=groups)
self._test_users_to_create_task_in_project(project_id, users, is_staff, is_allow, is_project_staff) self._test_users_to_create_task_in_project(project_id, users, is_staff, is_allow, is_project_staff)
# [organization] worker cannot create task in project even he has no ownerships in this project
@pytest.mark.parametrize('org, project_id', [({'id': 2, 'slug': 'org2'}, 2)]) @pytest.mark.parametrize('org, project_id', [({'id': 2, 'slug': 'org2'}, 2)])
@pytest.mark.parametrize('role, is_staff, is_allow', [ @pytest.mark.parametrize('role, is_staff, is_allow', [
('worker', False, False), ('worker', False, False),
@ -193,32 +158,7 @@ class TestPostTasks:
users = find_users(org=org['id'], role=role) users = find_users(org=org['id'], role=role)
self._test_users_to_create_task_in_project(project_id, users, is_staff, is_allow, is_project_staff, org=org['slug']) self._test_users_to_create_task_in_project(project_id, users, is_staff, is_allow, is_project_staff, org=org['slug'])
def test_can_create_task_with_defined_start_and_stop_frames(self): @pytest.mark.usefixtures('dontchangedb')
username = 'admin1'
task_spec = {
'name': f'test {username} to create a task with defined start and stop frames',
"labels": [{
"name": "car",
"color": "#ff00ff"
}],
}
task_data = {
'image_quality': 75,
'start_frame': 2,
'stop_frame': 5
}
task_files = {
f'client_files[{i}]': image for i, image in enumerate(generate_image_files(7))
}
task_id = self._test_create_task_with_images(username, task_spec, task_data, task_files)
# check task size
response = get_method(username, f'tasks/{task_id}')
response_json = response.json()
assert response_json['size'] == 4
class TestGetData: class TestGetData:
_USERNAME = 'user1' _USERNAME = 'user1'
@ -232,9 +172,9 @@ class TestGetData:
assert response.status_code == HTTPStatus.OK assert response.status_code == HTTPStatus.OK
assert response.headers['Content-Type'] == content_type assert response.headers['Content-Type'] == content_type
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures('changedb')
class TestPatchTaskAnnotations: class TestPatchTaskAnnotations:
def _test_check_respone(self, is_allow, response, data=None): def _test_check_response(self, is_allow, response, data=None):
if is_allow: if is_allow:
assert response.status_code == HTTPStatus.OK assert response.status_code == HTTPStatus.OK
assert DeepDiff(data, response.json(), assert DeepDiff(data, response.json(),
@ -245,7 +185,7 @@ class TestPatchTaskAnnotations:
@pytest.fixture(scope='class') @pytest.fixture(scope='class')
def request_data(self, annotations): def request_data(self, annotations):
def get_data(tid): def get_data(tid):
data = annotations['task'][str(tid)].copy() data = deepcopy(annotations['task'][str(tid)])
data['shapes'][0].update({'points': [2.0, 3.0, 4.0, 5.0, 6.0, 7.0]}) data['shapes'][0].update({'points': [2.0, 3.0, 4.0, 5.0, 6.0, 7.0]})
data['version'] += 1 data['version'] += 1
return data return data
@ -269,7 +209,7 @@ class TestPatchTaskAnnotations:
response = patch_method(username, f'tasks/{tid}/annotations', data, response = patch_method(username, f'tasks/{tid}/annotations', data,
org_id=org, action='update') org_id=org, action='update')
self._test_check_respone(is_allow, response, data) self._test_check_response(is_allow, response, data)
@pytest.mark.parametrize('org', [2]) @pytest.mark.parametrize('org', [2])
@pytest.mark.parametrize('role, task_staff, is_allow', [ @pytest.mark.parametrize('role, task_staff, is_allow', [
@ -288,10 +228,26 @@ class TestPatchTaskAnnotations:
response = patch_method(username, f'tasks/{tid}/annotations', data, response = patch_method(username, f'tasks/{tid}/annotations', data,
org_id=org, action='update') org_id=org, action='update')
self._test_check_respone(is_allow, response, data) self._test_check_response(is_allow, response, data)
@pytest.mark.usefixtures('dontchangedb')
class TestGetTaskDataset:
def _test_export_project(self, username, tid, **kwargs):
response = get_method(username, f'tasks/{tid}/dataset', **kwargs)
assert response.status_code == HTTPStatus.ACCEPTED
response = get_method(username, f'tasks/{tid}/dataset', **kwargs)
assert response.status_code == HTTPStatus.CREATED
response = get_method(username, f'tasks/{tid}/dataset', action='download', **kwargs)
assert response.status_code == HTTPStatus.OK
def test_admin_can_export_task_dataset(self, tasks_with_shapes):
task = tasks_with_shapes[0]
self._test_export_project('admin1', task['id'], format='CVAT for images 1.1')
@pytest.mark.usefixtures("restore") @pytest.mark.usefixtures("changedb")
class TestExportDatasetTask: class TestPostTaskData:
@staticmethod @staticmethod
def _wait_until_task_is_created(username, task_id): def _wait_until_task_is_created(username, task_id):
url = f'tasks/{task_id}/status' url = f'tasks/{task_id}/status'

@ -3,10 +3,14 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
from http import HTTPStatus from http import HTTPStatus
import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
from .utils.config import get_method from rest_api.utils.config import get_method
@pytest.mark.usefixtures('dontchangedb')
class TestGetUsers: class TestGetUsers:
def _test_can_see(self, user, data, endpoint='users', exclude_paths='', **kwargs): def _test_can_see(self, user, data, endpoint='users', exclude_paths='', **kwargs):
response = get_method(user, endpoint, **kwargs) response = get_method(user, endpoint, **kwargs)

@ -0,0 +1,3 @@
# Copyright (C) 2021 Intel Corporation
#
# SPDX-License-Identifier: MIT

@ -5,8 +5,8 @@
import os.path as osp import os.path as osp
import requests import requests
ROOT_DIR = osp.dirname(__file__) ROOT_DIR = __file__[:__file__.rfind(osp.join("utils", ""))]
ASSETS_DIR = osp.abspath(osp.join(ROOT_DIR, '..', 'assets')) ASSETS_DIR = osp.abspath(osp.join(ROOT_DIR, 'assets'))
# Suppress the warning from Bandit about hardcoded passwords # Suppress the warning from Bandit about hardcoded passwords
USER_PASS = '!Q@W#E$R' # nosec USER_PASS = '!Q@W#E$R' # nosec
BASE_URL = 'http://localhost:8080/' BASE_URL = 'http://localhost:8080/'

Loading…
Cancel
Save