diff --git a/.github/workflows/cache.yml b/.github/workflows/cache.yml index 7401b3d5..eed46e8a 100644 --- a/.github/workflows/cache.yml +++ b/.github/workflows/cache.yml @@ -24,7 +24,7 @@ jobs: ${{ runner.os }}-build-ui- - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1.1.2 + uses: docker/setup-buildx-action@v2 - name: Caching CVAT server uses: docker/build-push-action@v2 diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9624807d..3a11171d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -7,62 +7,51 @@ on: pull_request: types: [edited, ready_for_review, opened, synchronize, reopened] -jobs: - Unit_testing: - if: | - github.event.pull_request.draft == false && - !startsWith(github.event.pull_request.title, '[WIP]') && - !startsWith(github.event.pull_request.title, '[Dependent]') - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - - uses: actions/setup-python@v2 - with: - python-version: '3.8' +env: + API_ABOUT_PAGE: "localhost:8080/api/server/about" +jobs: + cache: + if: | + github.event.pull_request.draft == false && + !startsWith(github.event.pull_request.title, '[WIP]') && + !startsWith(github.event.pull_request.title, '[Dependent]') + runs-on: ubuntu-latest + outputs: + sha: ${{ steps.get-sha.outputs.sha}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} + steps: - name: Getting SHA from the default branch id: get-sha run: | - DEFAULT_BRANCH=$(curl -s \ - --request GET \ - --url https://api.github.com/repos/${{ github.repository }} \ - --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | \ - jq -r '.default_branch') - - SHA=$(curl -s \ - --request GET \ - --url https://api.github.com/repos/${{ github.repository }}/git/ref/heads/${DEFAULT_BRANCH} \ - --header 'authorization: token ${{ secrets.GITHUB_TOKEN }}' | \ - jq -r '.object.sha') + DEFAULT_BRANCH=$(gh api /repos/$REPO | jq -r '.default_branch') + SHA=$(gh api /repos/$REPO/git/ref/heads/$DEFAULT_BRANCH | jq -r '.object.sha') echo ::set-output name=default_branch::${DEFAULT_BRANCH} echo ::set-output name=sha::${SHA} - name: Waiting a cache creation in the default branch - if: ${{ github.ref_name != 'develop' }} + env: + DEFAULT_BRANCH: ${{ steps.get-sha.outputs.default_branch }} + SHA: ${{ steps.get-sha.outputs.sha }} run: | SLEEP=45 NUMBER_ATTEMPTS=10 while [[ ${NUMBER_ATTEMPTS} -gt 0 ]]; do - RUN_status=$(curl -s \ - --request GET \ - --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' \ - --url https://api.github.com/repos/${{ github.repository }}/actions/workflows/cache.yml/runs | \ - jq -r '.workflow_runs[]? | - select( - (.head_sha == "${{ steps.get-sha.outputs.sha }}") - and (.event == "push") - and (.name == "Cache") - and (.head_branch == "${{ steps.get-sha.outputs.default_branch }}") - ) | .status') + + RUN_status=$(gh api /repos/${REPO}/actions/workflows/cache.yml/runs | \ + jq -r ".workflow_runs[]? | + select((.head_sha == \"${SHA}\")) | .status") + if [[ ${RUN_status} == "completed" ]]; then - echo "The cache creation on the '${{ steps.get-sha.outputs.default_branch }}' branch has finished. Status: ${RUN_status}" + echo "The cache creation on the ${DEFAULT_BRANCH} branch has finished. Status: ${RUN_status}" break else echo "The creation of the cache is not yet complete." echo "There are still attempts to check the cache: ${NUMBER_ATTEMPTS}" - echo "Status of caching in the '${{ steps.get-sha.outputs.default_branch }}' branch: ${RUN_status}" + echo "Status of caching in the ${DEFAULT_BRANCH} branch: ${RUN_status}" echo "sleep ${SLEEP}" sleep ${SLEEP} ((NUMBER_ATTEMPTS--)) @@ -73,20 +62,30 @@ jobs: echo "Probably the creation of the cache is not yet complete. Will continue working without the cache." fi + Unit_testing: + needs: cache + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - uses: actions/setup-python@v2 + with: + python-version: '3.8' + - name: Getting CVAT server cache from the default branch uses: actions/cache@v2 with: path: /tmp/cvat_cache_server - key: ${{ runner.os }}-build-server-${{ steps.get-sha.outputs.sha }} + key: ${{ runner.os }}-build-server-${{ needs.cache.outputs.sha }} - name: Getting CVAT UI cache from the default branch uses: actions/cache@v2 with: path: /tmp/cvat_cache_ui - key: ${{ runner.os }}-build-ui-${{ steps.get-sha.outputs.sha }} + key: ${{ runner.os }}-build-ui-${{ needs.cache.outputs.sha }} - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1.1.2 + uses: docker/setup-buildx-action@master - name: Building CVAT server image uses: docker/build-push-action@v2 @@ -113,19 +112,10 @@ jobs: ./opa test cvat/apps/iam/rules - name: Running REST API tests - env: - API_ABOUT_PAGE: "localhost:8080/api/server/about" - # Access key length should be at least 3, and secret key length at least 8 characters - MINIO_ACCESS_KEY: "minio_access_key" - MINIO_SECRET_KEY: "minio_secret_key" run: | - docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f components/serverless/docker-compose.serverless.yml -f components/analytics/docker-compose.analytics.yml -f tests/rest_api/docker-compose.minio.yml up -d - /bin/bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' ${API_ABOUT_PAGE})" != "401" ]]; do sleep 5; done' - pip3 install --user -r tests/rest_api/requirements.txt - pytest tests/rest_api/ -k 'GET' - - docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f components/serverless/docker-compose.serverless.yml -f components/analytics/docker-compose.analytics.yml -f tests/rest_api/docker-compose.minio.yml down -v + pytest tests/rest_api/ -k 'GET' -s + pytest tests/rest_api/ --stop-services - name: Running unit tests env: @@ -137,6 +127,7 @@ jobs: docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f docker-compose.ci.yml run cvat_ci /bin/bash \ -c 'cd cvat-data && npm ci --ignore-scripts && cd ../cvat-core && npm ci --ignore-scripts && npm run test && mv ./reports/coverage/lcov.info ${CONTAINER_COVERAGE_DATA_DIR} && chmod a+rwx ${CONTAINER_COVERAGE_DATA_DIR}/lcov.info' + - name: Uploading code coverage results as an artifact if: github.ref == 'refs/heads/develop' uses: actions/upload-artifact@v2 @@ -147,10 +138,7 @@ jobs: ${{ github.workspace }}/lcov.info E2E_testing: - if: | - github.event.pull_request.draft == false && - !startsWith(github.event.pull_request.title, '[WIP]') && - !startsWith(github.event.pull_request.title, '[Dependent]') + needs: cache runs-on: ubuntu-latest strategy: fail-fast: false @@ -159,76 +147,24 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Getting SHA from the default branch - id: get-sha - run: | - DEFAULT_BRANCH=$(curl -s \ - --request GET \ - --url https://api.github.com/repos/${{ github.repository }} \ - --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | \ - jq -r '.default_branch') - - SHA=$(curl -s \ - --request GET \ - --url https://api.github.com/repos/${{ github.repository }}/git/ref/heads/${DEFAULT_BRANCH} \ - --header 'authorization: token ${{ secrets.GITHUB_TOKEN }}' | \ - jq -r '.object.sha') - - echo ::set-output name=default_branch::${DEFAULT_BRANCH} - echo ::set-output name=sha::${SHA} - - - name: Waiting a cache creation in the default branch - run: | - URL_runs="https://api.github.com/repos/${{ github.repository }}/actions/workflows/cache.yml/runs" - SLEEP=45 - NUMBER_ATTEMPTS=10 - while [[ ${NUMBER_ATTEMPTS} -gt 0 ]]; do - RUN_status=$(curl -s \ - --request GET \ - --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' \ - --url https://api.github.com/repos/${{ github.repository }}/actions/workflows/cache.yml/runs | \ - jq -r '.workflow_runs[]? | - select( - (.head_sha == "${{ steps.get-sha.outputs.sha }}") - and (.event == "push") - and (.name == "Cache") - and (.head_branch == "${{ steps.get-sha.outputs.default_branch }}") - ) | .status') - if [[ ${RUN_status} == "completed" ]]; then - echo "The cache creation on the '${{ steps.get-sha.outputs.default_branch }}' branch has finished. Status: ${RUN_status}" - break - else - echo "The creation of the cache is not yet complete." - echo "There are still attempts to check the cache: ${NUMBER_ATTEMPTS}" - echo "Status of caching in the '${{ steps.get-sha.outputs.default_branch }}' branch: ${RUN_status}" - echo "sleep ${SLEEP}" - sleep ${SLEEP} - ((NUMBER_ATTEMPTS--)) - fi - done - if [[ ${NUMBER_ATTEMPTS} -eq 0 ]]; then - echo "Number of attempts expired!" - echo "Probably the creation of the cache is not yet complete. Will continue working without the cache." - fi - - name: Getting CVAT server cache from the default branch uses: actions/cache@v2 with: path: /tmp/cvat_cache_server - key: ${{ runner.os }}-build-server-${{ steps.get-sha.outputs.sha }} + key: ${{ runner.os }}-build-server-${{ needs.cache.outputs.sha }} - name: Getting CVAT UI cache from the default branch uses: actions/cache@v2 with: path: /tmp/cvat_cache_ui - key: ${{ runner.os }}-build-ui-${{ steps.get-sha.outputs.sha }} + key: ${{ runner.os }}-build-ui-${{ needs.cache.outputs.sha }} - uses: actions/setup-node@v2 with: node-version: '16.x' - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1.1.2 + uses: docker/setup-buildx-action@master - name: Building CVAT server image uses: docker/build-push-action@v2 @@ -260,35 +196,27 @@ jobs: DJANGO_SU_NAME: 'admin' DJANGO_SU_EMAIL: 'admin@localhost.company' DJANGO_SU_PASSWORD: '12qwaszx' - API_ABOUT_PAGE: "localhost:8080/api/server/about" run: | - docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f components/serverless/docker-compose.serverless.yml -f tests/docker-compose.file_share.yml up -d - /bin/bash -c 'while [[ $(curl -s -o /dev/null -w "%{http_code}" ${API_ABOUT_PAGE}) != "401" ]]; do sleep 5; done' - docker exec -i cvat /bin/bash -c "echo \"from django.contrib.auth.models import User; User.objects.create_superuser('${DJANGO_SU_NAME}', '${DJANGO_SU_EMAIL}', '${DJANGO_SU_PASSWORD}')\" | python3 ~/manage.py shell" + docker-compose \ + -f docker-compose.yml \ + -f docker-compose.dev.yml \ + -f components/serverless/docker-compose.serverless.yml \ + -f tests/docker-compose.file_share.yml up -d + + /bin/bash -c \ + 'while [[ $(curl -s -o /dev/null -w "%{http_code}" ${{ env.API_ABOUT_PAGE }}) != "401" ]]; do sleep 5; done' + + docker exec -i cvat \ + /bin/bash -c \ + "echo \"from django.contrib.auth.models import User; User.objects.create_superuser('${DJANGO_SU_NAME}', '${DJANGO_SU_EMAIL}', '${DJANGO_SU_PASSWORD}')\" | python3 ~/manage.py shell" cd ./tests npm ci if [ ${{ matrix.specs }} == 'canvas3d_functionality' ]; then - # Choosing 5 test files - selected_files=$(find ./cypress/integration | grep -e 'case.*\|issue.*' | grep js | grep 3d | sort | head -5 | tr '\n' ',') - - npx cypress run \ - --headed \ - --browser chrome \ - --env coverage=false \ - --config-file cypress_canvas3d.json \ - --spec "${selected_files} cypress/integration/remove_users_tasks_projects_organizations.js" + npx cypress run --headed --browser chrome --config-file pr_cypress_canvas3d.json else - # Choosing 20 test files - find ./cypress/integration | grep -e 'case.*\|issue.*' | grep js | sed '/.*3d.*/d' | sort > test_files - selected_files=$({ head -10; tail -10;} < test_files | tr '\n' ',') - rm test_files - - npx cypress run \ - --browser chrome \ - --env coverage=false \ - --spec "${selected_files} cypress/integration/remove_users_tasks_projects_organizations.js" + npx cypress run --browser chrome --config-file pr_cypress.json fi - name: Creating a log file from "cvat" container logs @@ -323,30 +251,28 @@ jobs: needs: [Unit_testing, E2E_testing] steps: - uses: actions/checkout@v2 + - name: Getting SHA from the default branch id: get-sha + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} run: | - DEFAULT_BRANCH=$(curl -s \ - --request GET \ - --url https://api.github.com/repos/${{ github.repository }} \ - --header 'authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' | \ - jq -r '.default_branch') - - SHA=$(curl -s \ - --request GET \ - --url https://api.github.com/repos/${{ github.repository }}/git/ref/heads/${DEFAULT_BRANCH} \ - --header 'authorization: token ${{ secrets.GITHUB_TOKEN }}' | \ - jq -r '.object.sha') + DEFAULT_BRANCH=$(gh api /repos/$REPO | jq -r '.default_branch') + SHA=$(gh api /repos/$REPO/git/ref/heads/$DEFAULT_BRANCH | jq -r '.object.sha') echo ::set-output name=default_branch::${DEFAULT_BRANCH} echo ::set-output name=sha::${SHA} + - name: Getting CVAT server cache from the default branch uses: actions/cache@v2 with: path: /tmp/cvat_cache_server key: ${{ runner.os }}-build-server-${{ steps.get-sha.outputs.sha }} + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1.1.2 + - name: Building CVAT server image uses: docker/build-push-action@v2 with: @@ -355,10 +281,12 @@ jobs: cache-from: type=local,src=/tmp/cvat_cache_server tags: openvino/cvat_server:latest load: true + - name: Downloading coverage results uses: actions/download-artifact@v2 with: name: coverage_results + - name: Combining coverage results run: | mkdir -p ./nyc_output_tmp @@ -366,6 +294,7 @@ jobs: mkdir -p ./.nyc_output npm ci npx nyc merge ./nyc_output_tmp ./.nyc_output/out.json + - name: Sending results to Coveralls env: HOST_COVERAGE_DATA_DIR: ${{ github.workspace }} diff --git a/.github/workflows/schedule.yml b/.github/workflows/schedule.yml index 7f6d7005..443fb76d 100644 --- a/.github/workflows/schedule.yml +++ b/.github/workflows/schedule.yml @@ -4,13 +4,132 @@ on: - cron: '0 22 * * *' workflow_dispatch: jobs: - build: + check_updates: + runs-on: ubuntu-latest + outputs: + last_commit_time: ${{ steps.check_updates.outputs.last_commit_time }} + last_night_time: ${{ steps.check_updates.outputs.last_night_time }} + steps: + - id: check_updates + env: + REPO: ${{ github.repository }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + default_branch=$(gh api /repos/$REPO | jq -r '.default_branch') + + last_commit_time=$(date +%s \ + -d $(gh api /repos/${REPO}/branches/${default_branch} | jq -r '.commit.commit.author.date')) + + last_night_time=$(date +%s \ + -d $(gh api /repos/${REPO}/actions/workflows/caching.yml/runs | jq -r '.workflow_runs[].updated_at' | sort | tail -1)) + + echo ::set-output name=last_commit_time::${last_commit_time} + echo ::set-output name=last_night_time::${last_night_time} + + cache: + needs: check_updates + if: + needs.check_updates.outputs.last_commit_time > needs.check_updates.outputs.last_night_time + runs-on: ubuntu-latest + outputs: + sha: ${{ steps.get-sha.outputs.sha}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} + steps: + - name: Getting SHA from the default branch + id: get-sha + run: | + DEFAULT_BRANCH=$(gh api /repos/$REPO | jq -r '.default_branch') + SHA=$(gh api /repos/$REPO/git/ref/heads/$DEFAULT_BRANCH | jq -r '.object.sha') + + echo ::set-output name=default_branch::${DEFAULT_BRANCH} + echo ::set-output name=sha::${SHA} + + - name: Waiting a cache creation in the default branch + if: ${{ github.ref_name != 'develop' }} + env: + DEFAULT_BRANCH: ${{ steps.get-sha.outputs.default_branch }} + SHA: ${{ steps.get-sha.outputs.sha }} + run: | + SLEEP=45 + NUMBER_ATTEMPTS=10 + while [[ ${NUMBER_ATTEMPTS} -gt 0 ]]; do + + RUN_status=$(gh api /repos/${REPO}/actions/workflows/cache.yml/runs | \ + jq -r ".workflow_runs[]? | + select((.head_sha == \"${SHA}\")) | .status") + + if [[ ${RUN_status} == "completed" ]]; then + echo "The cache creation on the ${DEFAULT_BRANCH} branch has finished. Status: ${RUN_status}" + break + else + echo "The creation of the cache is not yet complete." + echo "There are still attempts to check the cache: ${NUMBER_ATTEMPTS}" + echo "Status of caching in the ${DEFAULT_BRANCH} branch: ${RUN_status}" + echo "sleep ${SLEEP}" + sleep ${SLEEP} + ((NUMBER_ATTEMPTS--)) + fi + done + if [[ ${NUMBER_ATTEMPTS} -eq 0 ]]; then + echo "Number of attempts expired!" + echo "Probably the creation of the cache is not yet complete. Will continue working without the cache." + fi + + run_tests: + needs: cache runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + - uses: actions/setup-node@v2 with: node-version: '16.x' + + - uses: actions/setup-python@v2 + with: + python-version: '3.8' + + - name: Getting CVAT server cache from the default branch + uses: actions/cache@v2 + with: + path: /tmp/cvat_cache_server + key: ${{ runner.os }}-build-server-${{ needs.cache.outputs.sha }} + + - name: Getting CVAT UI cache from the default branch + uses: actions/cache@v2 + with: + path: /tmp/cvat_cache_ui + key: ${{ runner.os }}-build-ui-${{ needs.cache.outputs.sha }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Building CVAT server image + uses: docker/build-push-action@v2 + with: + context: . + file: ./Dockerfile + cache-from: type=local,src=/tmp/cvat_cache_server + tags: openvino/cvat_server:latest + load: true + + - name: Building CVAT UI image + uses: docker/build-push-action@v2 + with: + context: . + file: ./Dockerfile.ui + cache-from: type=local,src=/tmp/cvat_cache_ui + tags: openvino/cvat_ui:latest + load: true + + - name: Running REST API tests + run: | + pip3 install --user -r tests/rest_api/requirements.txt + pytest tests/rest_api/ + pytest tests/rest_api/ --stop-services + - name: Build CVAT env: DJANGO_SU_NAME: "admin" @@ -21,11 +140,21 @@ jobs: docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f ./tests/docker-compose.email.yml -f tests/docker-compose.file_share.yml -f components/serverless/docker-compose.serverless.yml up -d --build /bin/bash -c 'while [[ $(curl -s -o /dev/null -w "%{http_code}" ${API_ABOUT_PAGE}) != "401" ]]; do sleep 5; done' docker exec -i cvat /bin/bash -c "echo \"from django.contrib.auth.models import User; User.objects.create_superuser('${DJANGO_SU_NAME}', '${DJANGO_SU_EMAIL}', '${DJANGO_SU_PASSWORD}')\" | python3 ~/manage.py shell" + - name: End-to-end testing run: | cd ./tests npm ci npm run cypress:run:firefox + + - name: Unit tests + run: | + python manage.py test cvat/apps utils/cli + + npm ci + cd cvat-core + npm run test + - name: Uploading cypress screenshots as an artifact if: failure() uses: actions/upload-artifact@v2 diff --git a/CHANGELOG.md b/CHANGELOG.md index cfe6c3c3..c8c40a4e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - Bumped nuclio version to 1.8.14 () +- Simplified running REST API tests. Extended CI-nightly workflow () + ### Deprecated - TDB diff --git a/site/content/en/docs/contributing/development-environment.md b/site/content/en/docs/contributing/development-environment.md index 7150d1f4..3bca39ce 100644 --- a/site/content/en/docs/contributing/development-environment.md +++ b/site/content/en/docs/contributing/development-environment.md @@ -134,6 +134,7 @@ description: 'Installing a development environment for different operating syste You have done! Now it is possible to insert breakpoints and debug server and client of the tool. +Instructions for running tests locally are available [here](/site/content/en/docs/contributing/running-tests.md). ## Note for Windows users diff --git a/site/content/en/docs/contributing/running-tests.md b/site/content/en/docs/contributing/running-tests.md new file mode 100644 index 00000000..7b19ee07 --- /dev/null +++ b/site/content/en/docs/contributing/running-tests.md @@ -0,0 +1,94 @@ +--- +title: 'Running tests' +linkTitle: 'Running tests' +weight: 11 +description: 'Instructions on how to run all existence tests.' +--- + +# E2E tests + +**Initial steps**: +1. Run CVAT instance: + ``` + docker-compose \ + -f docker-compose.yml \ + -f docker-compose.dev.yml \ + -f components/serverless/docker-compose.serverless.yml \ + -f tests/docker-compose.file_share.yml up -d + ``` +1. Add test user in CVAT: + ``` + docker exec -i cvat \ + /bin/bash -c \ + "echo \"from django.contrib.auth.models import User; User.objects.create_superuser('admin', 'admin@localhost.company', '12qwaszx')\" | python3 ~/manage.py shell" + ``` +1. Install npm dependencies: + ``` + cd tests + npm ci + ``` + +**Running tests** + +``` +npm run cypress:run:chrome +npm run cypress:run:chrome:canvas3d +``` + +# REST API tests + +**Initial steps** +1. Install all necessary requirements before running REST API tests: + ``` + pip install -r ./tests/rest_api/requirements.txt + ``` + +**Running tests** + +Run all REST API tests: + +``` +pytest ./tests/rest_api +``` + +This command will automatically start all necessary docker containers. + +If you want to start/stop these containers without running tests +use special options for it: + +``` +pytest ./tests/rest_api --start-services +pytest ./tests/rest_api --stop-services +``` + +If you need to rebuild your CVAT images add `--rebuild` option: +``` +pytest ./tests/rest_api --rebuild +``` + +# Unit tests + +**Initial steps** +1. Install necessary Python dependencies: + ``` + pip install -r cvat/requirements/testing.txt + ``` +1. Install npm dependencies: + ``` + npm ci + ``` +1. Run CVAT instance + ``` + docker-compose -f docker-compose.yml -f docker-compose.dev.yml up -d + ``` + +**Running tests** +1. Python tests + ``` + python manage.py test --settings cvat.settings.testing cvat/apps utils/cli + ``` +1. JS tests + ``` + cd cvat-core + npm run test + ``` diff --git a/tests/cypress_cron_type.json b/tests/nightly_cypress.json similarity index 100% rename from tests/cypress_cron_type.json rename to tests/nightly_cypress.json diff --git a/tests/pr_cypress.json b/tests/pr_cypress.json new file mode 100644 index 00000000..97773d04 --- /dev/null +++ b/tests/pr_cypress.json @@ -0,0 +1,37 @@ +{ + "video": false, + "baseUrl": "http://localhost:8080", + "viewportWidth": 1300, + "viewportHeight": 960, + "defaultCommandTimeout": 25000, + "downloadsFolder": "cypress/fixtures", + "env": { + "user": "admin", + "email": "admin@localhost.company", + "password": "12qwaszx", + "coverage": false + }, + "testFiles": [ + "actions_objects2/case_108_rotated_bounding_boxes.js", + "actions_objects2/case_10_polygon_shape_track_label_points.js", + "actions_objects2/case_115_ellipse_shape_track_label.js", + "actions_objects2/case_11_polylines_shape_track_label_points.js", + "actions_objects2/case_12_points_shape_track_label.js", + "actions_objects2/case_13_merge_split_features.js", + "actions_objects2/case_14_appearance_features.js", + "actions_objects2/case_15_group_features.js", + "actions_objects2/case_16_z_order_features.js", + "actions_objects2/case_17_lock_hide_features.js", + "issues_prs/issue_2418_object_tag_same_labels.js", + "issues_prs/issue_2485_navigation_empty_frames.js", + "issues_prs/issue_2486_not_edit_object_aam.js", + "issues_prs/issue_2487_extra_instances_canvas_grouping.js", + "issues_prs/issue_2661_displaying_attached_files_when_creating_task.js", + "issues_prs/issue_2753_call_HOC_component_each_render.js", + "issues_prs/issue_2807_polyline_editing.js", + "issues_prs/issue_2992_crop_polygon_properly.js", + "issues_prs/pr_1370_check_UI_fail_with_object_dragging_and_go_next_frame.js", + "issues_prs/pr_2203_error_cannot_read_property_at_saving_job.js", + "remove_users_tasks_projects_organizations.js" + ] +} diff --git a/tests/pr_cypress_canvas3d.json b/tests/pr_cypress_canvas3d.json new file mode 100644 index 00000000..99a3363c --- /dev/null +++ b/tests/pr_cypress_canvas3d.json @@ -0,0 +1,22 @@ +{ + "video": false, + "baseUrl": "http://localhost:8080", + "viewportWidth": 1300, + "viewportHeight": 960, + "defaultCommandTimeout": 25000, + "downloadsFolder": "cypress/fixtures", + "env": { + "user": "admin", + "email": "admin@localhost.company", + "password": "12qwaszx", + "coverage": false + }, + "testFiles": [ + "actions_projects_models/case_104_project_export_3d.js", + "canvas3d_functionality_2/case_56_canvas3d_functionality_basic_actions.js", + "canvas3d_functionality_2/case_62_canvas3d_functionality_views_resize.js", + "canvas3d_functionality_2/case_63_canvas3d_functionality_control_button_mouse_interaction.js", + "canvas3d_functionality_2/case_64_canvas3d_functionality_cuboid.js", + "remove_users_tasks_projects_organizations.js" + ] +} diff --git a/tests/rest_api/README.md b/tests/rest_api/README.md index 4d50ea26..91fb84f6 100644 --- a/tests/rest_api/README.md +++ b/tests/rest_api/README.md @@ -19,12 +19,6 @@ the server calling REST API directly (as it done by users). ## How to run? -1. Execute commands below to run docker containers: - ```console - export MINIO_ACCESS_KEY="minio_access_key" - export MINIO_SECRET_KEY="minio_secret_key" - docker-compose -f docker-compose.yml -f docker-compose.dev.yml -f components/analytics/docker-compose.analytics.yml -f tests/rest_api/docker-compose.minio.yml up -d --build - ``` 1. After that please look at documentation for [pytest](https://docs.pytest.org/en/6.2.x/). Generally, you have to install requirements and run the following command from the root directory of the cloned CVAT repository: @@ -34,6 +28,9 @@ the server calling REST API directly (as it done by users). pytest tests/rest_api/ ``` + See the [contributing guide](../../site/content/en/docs/contributing/running-tests.md) + to get more information about tests running. + ## How to upgrade testing assets? When you have a new use case which cannot be expressed using objects already @@ -69,8 +66,8 @@ for i, color in enumerate(colormap): To backup DB and data volume, please use commands below. ```console -docker exec cvat python manage.py dumpdata --indent 2 > assets/cvat_db/data.json -docker exec cvat tar -cjv /home/django/data > assets/cvat_db/cvat_data.tar.bz2 +docker exec test_cvat_1 python manage.py dumpdata --indent 2 > assets/cvat_db/data.json +docker exec test_cvat_1 tar -cjv /home/django/data > assets/cvat_db/cvat_data.tar.bz2 ``` > Note: if you won't be use --indent options or will be use with other value @@ -90,8 +87,8 @@ python utils/dump_objects.py To restore DB and data volume, please use commands below. ```console -cat assets/cvat_db/data.json | docker exec -i cvat python manage.py loaddata --format=json - -cat assets/cvat_db/cvat_data.tar.bz2 | docker exec -i cvat tar --strip 3 -C /home/django/data/ -xj +cat assets/cvat_db/data.json | docker exec -i test_cvat_1 python manage.py loaddata --format=json - +cat assets/cvat_db/cvat_data.tar.bz2 | docker exec -i test_cvat_1 tar --strip 3 -C /home/django/data/ -xj ``` ## Assets directory structure @@ -173,9 +170,9 @@ Assets directory has two parts: 1. If your test infrastructure has been corrupted and you have errors during db restoring. You should to create (or recreate) `cvat` database: ``` - docker exec cvat_db dropdb --if-exists cvat - docker exec cvat_db createdb cvat - docker exec cvat python manage.py migrate + docker exec test_cvat_db_1 dropdb --if-exists cvat + docker exec test_cvat_db_1 createdb cvat + docker exec test_cvat_1 python manage.py migrate ``` 1. Perform migrate when some relation does not exists. Example of error message: @@ -184,7 +181,7 @@ Assets directory has two parts: ``` Solution: ``` - docker exec cvat python manage.py migrate + docker exec test_cvat_1 python manage.py migrate ``` 1. If for some reason you need to recreate cvat database, but using `dropdb` @@ -196,6 +193,6 @@ Assets directory has two parts: In this case you should terminate all existent connections for cvat database, you can perform it with command: ``` - docker exec cvat_db psql -U root -d postgres -v from=cvat -v to=test_db -f restore.sql + docker exec test_cvat_db_1 psql -U root -d postgres -v from=cvat -v to=test_db -f restore.sql ``` diff --git a/tests/rest_api/assets/cvat_db/cvat_data.tar.bz2 b/tests/rest_api/assets/cvat_db/cvat_data.tar.bz2 index cb9f2f9e..38ae1ac1 100644 Binary files a/tests/rest_api/assets/cvat_db/cvat_data.tar.bz2 and b/tests/rest_api/assets/cvat_db/cvat_data.tar.bz2 differ diff --git a/tests/rest_api/conftest.py b/tests/rest_api/conftest.py index b84513f7..02b0e448 100644 --- a/tests/rest_api/conftest.py +++ b/tests/rest_api/conftest.py @@ -1,306 +1,2 @@ -# Copyright (C) 2021 Intel Corporation -# -# SPDX-License-Identifier: MIT -from subprocess import run, CalledProcessError -import pytest -import json -import os.path as osp -from .utils.config import ASSETS_DIR - -CVAT_DB_DIR = osp.join(ASSETS_DIR, 'cvat_db') - -def _run(command): - try: - run(command.split(), check=True) #nosec - except CalledProcessError: - pytest.exit(f'Command failed: {command}. Add `-s` option to see more details') - -def restore_data_volume(): - _run(f"docker container cp {osp.join(ASSETS_DIR, 'cvat_db', 'cvat_data.tar.bz2')} cvat:cvat_data.tar.bz2") - _run(f"docker exec -i cvat tar --strip 3 -xjf /cvat_data.tar.bz2 -C /home/django/data/") - -def create_test_db(): - _run(f"docker container cp {osp.join(CVAT_DB_DIR, 'restore.sql')} cvat_db:restore.sql") - _run(f"docker container cp {osp.join(CVAT_DB_DIR, 'data.json')} cvat:data.json") - _run('docker exec cvat python manage.py loaddata /data.json') - _run('docker exec cvat_db psql -U root -d postgres -v from=cvat -v to=test_db -f restore.sql') - -@pytest.fixture(scope='session', autouse=True) -def init_test_db(): - restore_data_volume() - create_test_db() - - yield - - _run('docker exec cvat_db psql -U root -d postgres -v from=test_db -v to=cvat -f restore.sql') - _run('docker exec cvat_db dropdb test_db') - -@pytest.fixture(scope='function') -def restore(): - _run('docker exec cvat_db psql -U root -d postgres -v from=test_db -v to=cvat -f restore.sql') - -@pytest.fixture(scope='function') -def restore_cvat_data(): - restore_data_volume() - -class Container: - def __init__(self, data, key='id'): - self.raw_data = data - self.map_data = { obj[key]: obj for obj in data } - - @property - def raw(self): - return self.raw_data - - @property - def map(self): - return self.map_data - - def __iter__(self): - return iter(self.raw_data) - - def __len__(self): - return len(self.raw_data) - - def __getitem__(self, key): - if isinstance(key, slice): - return self.raw_data[key] - return self.map_data[key] - -@pytest.fixture(scope='module') -def users(): - with open(osp.join(ASSETS_DIR, 'users.json')) as f: - return Container(json.load(f)['results']) - -@pytest.fixture(scope='module') -def organizations(): - with open(osp.join(ASSETS_DIR, 'organizations.json')) as f: - return Container(json.load(f)) - -@pytest.fixture(scope='module') -def memberships(): - with open(osp.join(ASSETS_DIR, 'memberships.json')) as f: - return Container(json.load(f)['results']) - -@pytest.fixture(scope='module') -def tasks(): - with open(osp.join(ASSETS_DIR, 'tasks.json')) as f: - return Container(json.load(f)['results']) - -@pytest.fixture(scope='module') -def projects(): - with open(osp.join(ASSETS_DIR, 'projects.json')) as f: - return Container(json.load(f)['results']) - -@pytest.fixture(scope='module') -def jobs(): - with open(osp.join(ASSETS_DIR, 'jobs.json')) as f: - return Container(json.load(f)['results']) - -@pytest.fixture(scope='module') -def invitations(): - with open(osp.join(ASSETS_DIR, 'invitations.json')) as f: - return Container(json.load(f)['results'], key='key') - -@pytest.fixture(scope='module') -def annotations(): - with open(osp.join(ASSETS_DIR, 'annotations.json')) as f: - return json.load(f) - -@pytest.fixture(scope='module') -def cloud_storages(): - with open(osp.join(ASSETS_DIR, 'cloudstorages.json')) as f: - return Container(json.load(f)['results']) - -@pytest.fixture(scope='module') -def issues(): - with open(osp.join(ASSETS_DIR, 'issues.json')) as f: - return Container(json.load(f)['results']) - -@pytest.fixture(scope='module') -def users_by_name(users): - return {user['username']: user for user in users} - -@pytest.fixture(scope='module') -def jobs_by_org(tasks, jobs): - data = {} - for job in jobs: - data.setdefault(tasks[job['task_id']]['organization'], []).append(job) - data[''] = data.pop(None, []) - return data - -@pytest.fixture(scope='module') -def tasks_by_org(tasks): - data = {} - for task in tasks: - data.setdefault(task['organization'], []).append(task) - data[''] = data.pop(None, []) - return data - -@pytest.fixture(scope='module') -def issues_by_org(tasks, jobs, issues): - data = {} - for issue in issues: - data.setdefault(tasks[jobs[issue['job']]['task_id']]['organization'], []).append(issue) - data[''] = data.pop(None, []) - return data - -@pytest.fixture(scope='module') -def assignee_id(): - def get_id(data): - if data.get('assignee') is not None: - return data['assignee']['id'] - return get_id - -def ownership(func): - def wrap(user_id, resource_id): - if resource_id is None: - return False - return func(user_id, resource_id) - return wrap - -@pytest.fixture(scope='module') -def is_project_staff(projects, assignee_id): - @ownership - def check(user_id, pid): - return user_id == projects[pid]['owner']['id'] or \ - user_id == assignee_id(projects[pid]) - return check - -@pytest.fixture(scope='module') -def is_task_staff(tasks, is_project_staff, assignee_id): - @ownership - def check(user_id, tid): - return user_id == tasks[tid]['owner']['id'] or \ - user_id == assignee_id(tasks[tid]) or \ - is_project_staff(user_id, tasks[tid]['project_id']) - return check - -@pytest.fixture(scope='module') -def is_job_staff(jobs, is_task_staff, assignee_id): - @ownership - def check(user_id, jid): - return user_id == assignee_id(jobs[jid]) or \ - is_task_staff(user_id, jobs[jid]['task_id']) - return check - -@pytest.fixture(scope='module') -def is_issue_staff(issues, jobs, assignee_id): - @ownership - def check(user_id, issue_id): - return user_id == issues[issue_id]['owner']['id'] or \ - user_id == assignee_id(issues[issue_id]) or \ - user_id == assignee_id(jobs[issues[issue_id]['job']]) - return check - -@pytest.fixture(scope='module') -def is_issue_admin(issues, jobs, is_task_staff): - @ownership - def check(user_id, issue_id): - return is_task_staff(user_id, jobs[issues[issue_id]['job']]['task_id']) - return check - -@pytest.fixture(scope='module') -def find_users(test_db): - def find(**kwargs): - assert len(kwargs) > 0 - assert any(kwargs.values()) - - data = test_db - kwargs = dict(filter(lambda a: a[1] is not None, kwargs.items())) - for field, value in kwargs.items(): - if field.startswith('exclude_'): - field = field.split('_', maxsplit=1)[1] - exclude_rows = set(v['id'] for v in - filter(lambda a: a[field] == value, test_db)) - data = list(filter(lambda a: a['id'] not in exclude_rows, data)) - else: - data = list(filter(lambda a: a[field] == value, data)) - - return data - return find - -@pytest.fixture(scope='module') -def test_db(users, users_by_name, memberships): - data = [] - fields = ['username', 'id', 'privilege', 'role', 'org', 'membership_id'] - def add_row(**kwargs): - data.append({field: kwargs.get(field) for field in fields}) - - for user in users: - for group in user['groups']: - add_row(username=user['username'], id=user['id'], privilege=group) - - for membership in memberships: - username = membership['user']['username'] - for group in users_by_name[username]['groups']: - add_row(username=username, role=membership['role'], privilege=group, - id=membership['user']['id'], org=membership['organization'], - membership_id=membership['id']) - - return data - -@pytest.fixture(scope='module') -def org_staff(memberships): - def find(org_id): - if org_id in ['', None]: - return set() - else: - return set(m['user']['id'] for m in memberships - if m['role'] in ['maintainer', 'owner'] and m['user'] is not None - and m['organization'] == org_id) - return find - -@pytest.fixture(scope='module') -def is_org_member(memberships): - def check(user_id, org_id): - if org_id in ['', None]: - return True - else: - return user_id in set(m['user']['id'] for m in memberships - if m['user'] is not None and m['organization'] == org_id) - return check - -@pytest.fixture(scope='module') -def find_job_staff_user(is_job_staff): - def find(jobs, users, is_staff): - for job in jobs: - for user in users: - if is_staff == is_job_staff(user['id'], job['id']): - return user['username'], job['id'] - return None, None - return find - -@pytest.fixture(scope='module') -def find_task_staff_user(is_task_staff): - def find(tasks, users, is_staff): - for task in tasks: - for user in users: - if is_staff == is_task_staff(user['id'], task['id']): - return user['username'], task['id'] - return None, None - return find - -@pytest.fixture(scope='module') -def find_issue_staff_user(is_issue_staff, is_issue_admin): - def find(issues, users, is_staff, is_admin): - for issue in issues: - for user in users: - i_admin, i_staff = is_issue_admin(user['id'], issue['id']), is_issue_staff(user['id'], issue['id']) - if (is_admin is None and (i_staff or i_admin) == is_staff) \ - or (is_admin == i_admin and is_staff == i_staff): - return user['username'], issue['id'] - return None, None - return find - -@pytest.fixture(scope='module') -def filter_jobs_with_shapes(annotations): - def find(jobs): - return list(filter(lambda j: annotations['job'][str(j['id'])]['shapes'], jobs)) - return find - -@pytest.fixture(scope='module') -def filter_tasks_with_shapes(annotations): - def find(tasks): - return list(filter(lambda t: annotations['task'][str(t['id'])]['shapes'], tasks)) - return find +from .fixtures.init import * +from .fixtures.data import * diff --git a/tests/rest_api/docker-compose.minio.yml b/tests/rest_api/docker-compose.minio.yml index ebc7c746..ccc7d7d2 100644 --- a/tests/rest_api/docker-compose.minio.yml +++ b/tests/rest_api/docker-compose.minio.yml @@ -13,8 +13,8 @@ services: - 9000:9000 - 9001:9001 environment: - MINIO_ROOT_USER: ${MINIO_ACCESS_KEY} - MINIO_ROOT_PASSWORD: ${MINIO_SECRET_KEY} + MINIO_ROOT_USER: "minio_access_key" + MINIO_ROOT_PASSWORD: "minio_secret_key" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] interval: 30s @@ -31,8 +31,8 @@ services: environment: MC_PATH: "/usr/bin/mc" MINIO_HOST: "http://minio:9000" - MINIO_ACCESS_KEY: - MINIO_SECRET_KEY: + MINIO_ACCESS_KEY: "minio_access_key" + MINIO_SECRET_KEY: "minio_secret_key" MINIO_ALIAS: "local_minio" PRIVATE_BUCKET: "private" PUBLIC_BUCKET: "public" diff --git a/tests/rest_api/fixtures/__init__.py b/tests/rest_api/fixtures/__init__.py new file mode 100644 index 00000000..0aa5e58c --- /dev/null +++ b/tests/rest_api/fixtures/__init__.py @@ -0,0 +1,3 @@ +# Copyright (C) 2021 Intel Corporation +# +# SPDX-License-Identifier: MIT diff --git a/tests/rest_api/fixtures/data.py b/tests/rest_api/fixtures/data.py new file mode 100644 index 00000000..e4a1f738 --- /dev/null +++ b/tests/rest_api/fixtures/data.py @@ -0,0 +1,276 @@ +# Copyright (C) 2021 Intel Corporation +# +# SPDX-License-Identifier: MIT + +import pytest +import json +import os.path as osp +from rest_api.utils.config import ASSETS_DIR + +CVAT_DB_DIR = osp.join(ASSETS_DIR, 'cvat_db') + +class Container: + def __init__(self, data, key='id'): + self.raw_data = data + self.map_data = { obj[key]: obj for obj in data } + + @property + def raw(self): + return self.raw_data + + @property + def map(self): + return self.map_data + + def __iter__(self): + return iter(self.raw_data) + + def __len__(self): + return len(self.raw_data) + + def __getitem__(self, key): + if isinstance(key, slice): + return self.raw_data[key] + return self.map_data[key] + +@pytest.fixture(scope='session') +def users(): + with open(osp.join(ASSETS_DIR, 'users.json')) as f: + return Container(json.load(f)['results']) + +@pytest.fixture(scope='session') +def organizations(): + with open(osp.join(ASSETS_DIR, 'organizations.json')) as f: + return Container(json.load(f)) + +@pytest.fixture(scope='session') +def memberships(): + with open(osp.join(ASSETS_DIR, 'memberships.json')) as f: + return Container(json.load(f)['results']) + +@pytest.fixture(scope='session') +def tasks(): + with open(osp.join(ASSETS_DIR, 'tasks.json')) as f: + return Container(json.load(f)['results']) + +@pytest.fixture(scope='session') +def projects(): + with open(osp.join(ASSETS_DIR, 'projects.json')) as f: + return Container(json.load(f)['results']) + +@pytest.fixture(scope='session') +def jobs(): + with open(osp.join(ASSETS_DIR, 'jobs.json')) as f: + return Container(json.load(f)['results']) + +@pytest.fixture(scope='session') +def invitations(): + with open(osp.join(ASSETS_DIR, 'invitations.json')) as f: + return Container(json.load(f)['results'], key='key') + +@pytest.fixture(scope='session') +def annotations(): + with open(osp.join(ASSETS_DIR, 'annotations.json')) as f: + return json.load(f) + +@pytest.fixture(scope='session') +def cloud_storages(): + with open(osp.join(ASSETS_DIR, 'cloudstorages.json')) as f: + return Container(json.load(f)['results']) + +@pytest.fixture(scope='session') +def issues(): + with open(osp.join(ASSETS_DIR, 'issues.json')) as f: + return Container(json.load(f)['results']) + +@pytest.fixture(scope='session') +def users_by_name(users): + return {user['username']: user for user in users} + +@pytest.fixture(scope='session') +def jobs_by_org(tasks, jobs): + data = {} + for job in jobs: + data.setdefault(tasks[job['task_id']]['organization'], []).append(job) + data[''] = data.pop(None, []) + return data + +@pytest.fixture(scope='session') +def tasks_by_org(tasks): + data = {} + for task in tasks: + data.setdefault(task['organization'], []).append(task) + data[''] = data.pop(None, []) + return data + +@pytest.fixture(scope='session') +def issues_by_org(tasks, jobs, issues): + data = {} + for issue in issues: + data.setdefault(tasks[jobs[issue['job']]['task_id']]['organization'], []).append(issue) + data[''] = data.pop(None, []) + return data + +@pytest.fixture(scope='session') +def assignee_id(): + def get_id(data): + if data.get('assignee') is not None: + return data['assignee']['id'] + return get_id + +def ownership(func): + def wrap(user_id, resource_id): + if resource_id is None: + return False + return func(user_id, resource_id) + return wrap + +@pytest.fixture(scope='session') +def is_project_staff(projects, assignee_id): + @ownership + def check(user_id, pid): + return user_id == projects[pid]['owner']['id'] or \ + user_id == assignee_id(projects[pid]) + return check + +@pytest.fixture(scope='session') +def is_task_staff(tasks, is_project_staff, assignee_id): + @ownership + def check(user_id, tid): + return user_id == tasks[tid]['owner']['id'] or \ + user_id == assignee_id(tasks[tid]) or \ + is_project_staff(user_id, tasks[tid]['project_id']) + return check + +@pytest.fixture(scope='session') +def is_job_staff(jobs, is_task_staff, assignee_id): + @ownership + def check(user_id, jid): + return user_id == assignee_id(jobs[jid]) or \ + is_task_staff(user_id, jobs[jid]['task_id']) + return check + +@pytest.fixture(scope='session') +def is_issue_staff(issues, jobs, assignee_id): + @ownership + def check(user_id, issue_id): + return user_id == issues[issue_id]['owner']['id'] or \ + user_id == assignee_id(issues[issue_id]) or \ + user_id == assignee_id(jobs[issues[issue_id]['job']]) + return check + +@pytest.fixture(scope='session') +def is_issue_admin(issues, jobs, is_task_staff): + @ownership + def check(user_id, issue_id): + return is_task_staff(user_id, jobs[issues[issue_id]['job']]['task_id']) + return check + +@pytest.fixture(scope='session') +def find_users(test_db): + def find(**kwargs): + assert len(kwargs) > 0 + assert any(kwargs.values()) + + data = test_db + kwargs = dict(filter(lambda a: a[1] is not None, kwargs.items())) + for field, value in kwargs.items(): + if field.startswith('exclude_'): + field = field.split('_', maxsplit=1)[1] + exclude_rows = set(v['id'] for v in + filter(lambda a: a[field] == value, test_db)) + data = list(filter(lambda a: a['id'] not in exclude_rows, data)) + else: + data = list(filter(lambda a: a[field] == value, data)) + + return data + return find + +@pytest.fixture(scope='session') +def test_db(users, users_by_name, memberships): + data = [] + fields = ['username', 'id', 'privilege', 'role', 'org', 'membership_id'] + def add_row(**kwargs): + data.append({field: kwargs.get(field) for field in fields}) + + for user in users: + for group in user['groups']: + add_row(username=user['username'], id=user['id'], privilege=group) + + for membership in memberships: + username = membership['user']['username'] + for group in users_by_name[username]['groups']: + add_row(username=username, role=membership['role'], privilege=group, + id=membership['user']['id'], org=membership['organization'], + membership_id=membership['id']) + + return data + +@pytest.fixture(scope='session') +def org_staff(memberships): + def find(org_id): + if org_id in ['', None]: + return set() + else: + return set(m['user']['id'] for m in memberships + if m['role'] in ['maintainer', 'owner'] and m['user'] != None + and m['organization'] == org_id) + return find + +@pytest.fixture(scope='session') +def is_org_member(memberships): + def check(user_id, org_id): + if org_id in ['', None]: + return True + else: + return user_id in set(m['user']['id'] for m in memberships + if m['user'] != None and m['organization'] == org_id) + return check + +@pytest.fixture(scope='session') +def find_job_staff_user(is_job_staff): + def find(jobs, users, is_staff): + for job in jobs: + for user in users: + if is_staff == is_job_staff(user['id'], job['id']): + return user['username'], job['id'] + return None, None + return find + +@pytest.fixture(scope='session') +def find_task_staff_user(is_task_staff): + def find(tasks, users, is_staff): + for task in tasks: + for user in users: + if is_staff == is_task_staff(user['id'], task['id']): + return user['username'], task['id'] + return None, None + return find + +@pytest.fixture(scope='session') +def find_issue_staff_user(is_issue_staff, is_issue_admin): + def find(issues, users, is_staff, is_admin): + for issue in issues: + for user in users: + i_admin, i_staff = is_issue_admin(user['id'], issue['id']), is_issue_staff(user['id'], issue['id']) + if (is_admin is None and (i_staff or i_admin) == is_staff) \ + or (is_admin == i_admin and is_staff == i_staff): + return user['username'], issue['id'] + return None, None + return find + +@pytest.fixture(scope='session') +def filter_jobs_with_shapes(annotations): + def find(jobs): + return list(filter(lambda j: annotations['job'][str(j['id'])]['shapes'], jobs)) + return find + +@pytest.fixture(scope='session') +def filter_tasks_with_shapes(annotations): + def find(tasks): + return list(filter(lambda t: annotations['task'][str(t['id'])]['shapes'], tasks)) + return find + +@pytest.fixture(scope='session') +def tasks_with_shapes(tasks, filter_tasks_with_shapes): + return filter_tasks_with_shapes(tasks) \ No newline at end of file diff --git a/tests/rest_api/fixtures/init.py b/tests/rest_api/fixtures/init.py new file mode 100644 index 00000000..31f519f0 --- /dev/null +++ b/tests/rest_api/fixtures/init.py @@ -0,0 +1,177 @@ +import os.path as osp +import re +from http import HTTPStatus +from subprocess import PIPE, CalledProcessError, run + +import pytest +import os +import requests +from rest_api.utils.config import ASSETS_DIR, get_api_url + +CVAT_ROOT_DIR = __file__[: __file__.rfind(osp.join("tests", ""))] +CVAT_DB_DIR = osp.join(ASSETS_DIR, "cvat_db") +PREFIX = "test" + +CONTAINER_NAME_FILES = [ + osp.join(CVAT_ROOT_DIR, dc_file) + for dc_file in ( + "components/analytics/docker-compose.analytics.tests.yml", + "docker-compose.tests.yml", + ) +] + +DC_FILES = [ + osp.join(CVAT_ROOT_DIR, dc_file) + for dc_file in ("docker-compose.dev.yml", "tests/rest_api/docker-compose.minio.yml") +] + CONTAINER_NAME_FILES + + +def pytest_addoption(parser): + group = parser.getgroup("CVAT REST API testing options") + group._addoption( + "--start-services", + action="store_true", + help="Start all necessary CVAT containers without running tests. (default: %(default)s)", + ) + + group._addoption( + "--stop-services", + action="store_true", + help="Stop all testing containers without running tests. (default: %(default)s)", + ) + + group._addoption( + "--rebuild", + action="store_true", + help="Rebuild CVAT images and then start containers. (default: %(default)s)", + ) + + group._addoption( + "--cleanup", + action="store_true", + help="Delete files that was create by tests without running tests. (default: %(default)s)", + ) + + +def _run(command): + try: + proc = run(command.split(), check=True, stdout=PIPE, stderr=PIPE) # nosec + return proc.stdout.decode(), proc.stderr.decode() + except CalledProcessError as exc: + pytest.exit( + f"Command failed: {command}.\n" + f"Error message: {exc.stderr.decode()}.\n" + f"Add `-s` option to see more details" + ) + + +def docker_cp(source, target): + _run(f"docker container cp {source} {target}") + + +def exec_cvat(command): + _run(f"docker exec {PREFIX}_cvat_1 {command}") + + +def exec_cvat_db(command): + _run(f"docker exec {PREFIX}_cvat_db_1 {command}") + + +def restore_db(): + exec_cvat_db("psql -U root -d postgres -v from=test_db -v to=cvat -f /tmp/restore.sql") + + +def create_compose_files(): + for filename in CONTAINER_NAME_FILES: + with open(filename.replace(".tests.yml", ".yml"), "r") as dcf, open(filename, "w") as ndcf: + ndcf.writelines( + [line for line in dcf.readlines() if not re.match("^.+container_name.+$", line)] + ) + + +def delete_compose_files(): + for filename in CONTAINER_NAME_FILES: + if osp.exists(filename): + os.remove(filename) + + +def wait_for_server(): + while True: + response = requests.get(get_api_url("users/self")) + if response.status_code == HTTPStatus.UNAUTHORIZED: + break + +def restore_data_volumes(): + docker_cp(osp.join(CVAT_DB_DIR, "cvat_data.tar.bz2"), f"{PREFIX}_cvat_1:/tmp/cvat_data.tar.bz2") + exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/") + +def start_services(rebuild=False): + running_containers = [cn for cn in _run("docker ps --format {{.Names}}")[0].split("\n") if cn] + + if any([cn in ["cvat", "cvat_db"] for cn in running_containers]): + pytest.exit( + "It's looks like you already have running cvat containers. Stop them and try again. " + f"List of running containers: {', '.join(running_containers)}" + ) + + out = _run(f"docker-compose -p {PREFIX} -f {' -f '.join(DC_FILES)} up -d " + "--build" * rebuild)[1] + + restore_data_volumes() + docker_cp(osp.join(CVAT_DB_DIR, "restore.sql"), f"{PREFIX}_cvat_db_1:/tmp/restore.sql") + docker_cp(osp.join(CVAT_DB_DIR, "data.json"), f"{PREFIX}_cvat_1:/tmp/data.json") + + return out + + +@pytest.fixture(autouse=True, scope="session") +def services(request): + stop = request.config.getoption("--stop-services") + start = request.config.getoption("--start-services") + rebuild = request.config.getoption("--rebuild") + cleanup = request.config.getoption("--cleanup") + + if start and stop: + raise Exception("--start-services and --stop-services are incompatible") + + if cleanup: + delete_compose_files() + pytest.exit(f"All generated test files have been deleted", returncode=0) + + if not all([osp.exists(f) for f in CONTAINER_NAME_FILES]): + create_compose_files() + + if stop: + out = _run(f"docker-compose -p {PREFIX} -f {' -f '.join(DC_FILES)} down -v")[1] + out = set(l.split()[1] for l in out.split("\n") if "done" in l.split()) + pytest.exit(f"All testing containers are stopped: {', '.join(out)}", returncode=0) + + started_services = start_services(rebuild) + wait_for_server() + + exec_cvat("python manage.py loaddata /tmp/data.json") + exec_cvat_db("psql -U root -d postgres -v from=cvat -v to=test_db -f /tmp/restore.sql") + + if start: + pytest.exit( + f"All necessary containers have been created and started: {started_services}", + returncode=0, + ) + + yield + + restore_db() + exec_cvat_db("dropdb test_db") + + +@pytest.fixture(scope="function") +def changedb(): + restore_db() + + +@pytest.fixture(scope="class") +def dontchangedb(): + restore_db() + +@pytest.fixture(scope="function") +def restore_cvat_data(): + restore_data_volumes() diff --git a/tests/rest_api/test_analytics.py b/tests/rest_api/test_analytics.py index a5e76cc3..60685fe7 100644 --- a/tests/rest_api/test_analytics.py +++ b/tests/rest_api/test_analytics.py @@ -4,8 +4,9 @@ import pytest from http import HTTPStatus -from .utils.config import server_get +from rest_api.utils.config import server_get +@pytest.mark.usefixtures('dontchangedb') class TestGetAnalytics: endpoint = 'analytics/app/kibana' def _test_can_see(self, user): diff --git a/tests/rest_api/test_chache_policy.py b/tests/rest_api/test_chache_policy.py index 47fcf333..238ce8f6 100644 --- a/tests/rest_api/test_chache_policy.py +++ b/tests/rest_api/test_chache_policy.py @@ -4,7 +4,7 @@ from http import HTTPStatus import re -from .utils.config import server_get +from rest_api.utils.config import server_get class TestCachePolicy: diff --git a/tests/rest_api/test_check_objects_integrity.py b/tests/rest_api/test_check_objects_integrity.py index a8781060..c11ff086 100644 --- a/tests/rest_api/test_check_objects_integrity.py +++ b/tests/rest_api/test_check_objects_integrity.py @@ -6,23 +6,26 @@ import os.path as osp import glob import json from deepdiff import DeepDiff -from .utils import config +from rest_api.utils import config import pytest -@pytest.mark.parametrize('path', glob.glob(osp.join(config.ASSETS_DIR, '*.json'))) -def test_check_objects_integrity(path): - with open(path) as f: - endpoint = osp.basename(path).rsplit('.')[0] - if endpoint == 'annotations': - objects = json.load(f) - for jid, annotations in objects['job'].items(): - response = config.get_method('admin1', f'jobs/{jid}/annotations').json() - assert DeepDiff(annotations, response, ignore_order=True, - exclude_paths="root['version']") == {} - else: - response = config.get_method('admin1', endpoint, page_size='all') - json_objs = json.load(f) - resp_objs = response.json() +@pytest.mark.usefixtures('dontchangedb') +class TestGetResources: - assert DeepDiff(json_objs, resp_objs, ignore_order=True, - exclude_regex_paths=r"root\['results'\]\[\d+\]\['last_login'\]") == {} + @pytest.mark.parametrize('path', glob.glob(osp.join(config.ASSETS_DIR, '*.json'))) + def test_check_objects_integrity(self, path): + with open(path) as f: + endpoint = osp.basename(path).rsplit('.')[0] + if endpoint == 'annotations': + objects = json.load(f) + for jid, annotations in objects['job'].items(): + response = config.get_method('admin1', f'jobs/{jid}/annotations').json() + assert DeepDiff(annotations, response, ignore_order=True, + exclude_paths="root['version']") == {} + else: + response = config.get_method('admin1', endpoint, page_size='all') + json_objs = json.load(f) + resp_objs = response.json() + + assert DeepDiff(json_objs, resp_objs, ignore_order=True, + exclude_regex_paths=r"root\['results'\]\[\d+\]\['last_login'\]") == {} diff --git a/tests/rest_api/test_cloud_storages.py b/tests/rest_api/test_cloud_storages.py index 20dbbb07..1069c8e6 100644 --- a/tests/rest_api/test_cloud_storages.py +++ b/tests/rest_api/test_cloud_storages.py @@ -6,8 +6,9 @@ import pytest from http import HTTPStatus from deepdiff import DeepDiff -from .utils.config import get_method, patch_method, post_method +from rest_api.utils.config import get_method, patch_method, post_method +@pytest.mark.usefixtures('dontchangedb') class TestGetCloudStorage: def _test_can_see(self, user, storage_id, data, **kwargs): @@ -59,8 +60,8 @@ class TestGetCloudStorage: self._test_cannot_see(username, storage_id, org_id=org_id) -@pytest.mark.usefixtures("restore") -class TestPostCloudStorage: +@pytest.mark.usefixtures('changedb') +class TestPostCloudStorage(): _SPEC = { 'provider_type': 'AWS_S3_BUCKET', 'resource': 'test', @@ -121,7 +122,7 @@ class TestPostCloudStorage: else: self._test_cannot_create(username, self._SPEC, org_id=org_id) -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestPatchCloudStorage: _SPEC = { 'display_name': 'New display name', diff --git a/tests/rest_api/test_invitations.py b/tests/rest_api/test_invitations.py index 32c7e4b8..fd327df5 100644 --- a/tests/rest_api/test_invitations.py +++ b/tests/rest_api/test_invitations.py @@ -4,9 +4,9 @@ from http import HTTPStatus import pytest -from .utils.config import post_method +from rest_api.utils.config import post_method -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestCreateInvitations: def _test_post_invitation_201(self, user, data, invitee, **kwargs): response = post_method(user, 'invitations', data, **kwargs) diff --git a/tests/rest_api/test_issues.py b/tests/rest_api/test_issues.py index b898037b..f2b02fee 100644 --- a/tests/rest_api/test_issues.py +++ b/tests/rest_api/test_issues.py @@ -7,9 +7,9 @@ from http import HTTPStatus from deepdiff import DeepDiff from copy import deepcopy -from .utils.config import post_method, patch_method +from rest_api.utils.config import post_method, patch_method -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestPostIssues: def _test_check_response(self, user, data, is_allow, **kwargs): response = post_method(user, 'issues', data, **kwargs) @@ -78,9 +78,7 @@ class TestPostIssues: } self._test_check_response(username, data, is_allow, org_id=org) - - -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestPatchIssues: def _test_check_response(self, user, issue_id, data, is_allow, **kwargs): response = patch_method(user, f'issues/{issue_id}', data, diff --git a/tests/rest_api/test_jobs.py b/tests/rest_api/test_jobs.py index be45a685..c336f3e1 100644 --- a/tests/rest_api/test_jobs.py +++ b/tests/rest_api/test_jobs.py @@ -5,7 +5,8 @@ from http import HTTPStatus from deepdiff import DeepDiff import pytest -from .utils.config import get_method, patch_method +from copy import deepcopy +from rest_api.utils.config import get_method, patch_method def get_job_staff(job, tasks, projects): job_staff = [] @@ -37,6 +38,7 @@ def filter_jobs(jobs, tasks, org): return jobs, kwargs +@pytest.mark.usefixtures('dontchangedb') class TestGetJobs: def _test_get_job_200(self, user, jid, data, **kwargs): response = get_method(user, f'jobs/{jid}', **kwargs) @@ -75,6 +77,7 @@ class TestGetJobs: else: self._test_get_job_403(user['username'], job['id'], **kwargs) +@pytest.mark.usefixtures('dontchangedb') class TestListJobs: def _test_list_jobs_200(self, user, data, **kwargs): response = get_method(user, 'jobs', **kwargs, page_size='all') @@ -110,6 +113,7 @@ class TestListJobs: else: self._test_list_jobs_403(user['username'], **kwargs) +@pytest.mark.usefixtures('dontchangedb') class TestGetAnnotations: def _test_get_job_annotations_200(self, user, jid, data, **kwargs): response = get_method(user, f'jobs/{jid}/annotations', **kwargs) @@ -180,7 +184,8 @@ class TestGetAnnotations: job_id, annotations['job'][str(job_id)], **kwargs) else: self._test_get_job_annotations_403(username, job_id, **kwargs) -@pytest.mark.usefixtures("restore") + +@pytest.mark.usefixtures('changedb') class TestPatchJobAnnotations: _ORG = 2 @@ -195,7 +200,7 @@ class TestPatchJobAnnotations: @pytest.fixture(scope='class') def request_data(self, annotations): def get_data(jid): - data = annotations['job'][str(jid)].copy() + data = deepcopy(annotations['job'][str(jid)]) data['shapes'][0].update({'points': [2.0, 3.0, 4.0, 5.0, 6.0, 7.0]}) data['version'] += 1 return data @@ -259,7 +264,7 @@ class TestPatchJobAnnotations: self._test_check_respone(is_allow, response, data) -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestPatchJob: _ORG = 2 @@ -277,7 +282,7 @@ class TestPatchJob: def expected_data(self, jobs, users): keys = ['url', 'id', 'username', 'first_name', 'last_name'] def find(job_id, assignee_id): - data = jobs[job_id].copy() + data = deepcopy(jobs[job_id]) data['assignee'] = dict(filter(lambda a: a[0] in keys, users[assignee_id].items())) return data @@ -290,7 +295,6 @@ class TestPatchJob: members -= {assignee_id(jobs[jid]), user_id} return members.pop() return find_new_assignee - @pytest.mark.parametrize('org', [2]) @pytest.mark.parametrize('role, task_staff, is_allow', [ ('maintainer', False, True), ('owner', False, True), diff --git a/tests/rest_api/test_memberships.py b/tests/rest_api/test_memberships.py index 245f1607..10ccc6ca 100644 --- a/tests/rest_api/test_memberships.py +++ b/tests/rest_api/test_memberships.py @@ -6,8 +6,9 @@ import pytest from http import HTTPStatus from deepdiff import DeepDiff -from .utils.config import get_method, patch_method +from rest_api.utils.config import get_method, patch_method +@pytest.mark.usefixtures('dontchangedb') class TestGetMemberships: def _test_can_see_memberships(self, user, data, **kwargs): response = get_method(user, 'memberships', **kwargs) @@ -40,9 +41,7 @@ class TestGetMemberships: non_org1_users = ['user2', 'worker3'] for user in non_org1_users: self._test_cannot_see_memberships(user, org_id=1) - - -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestPatchMemberships: _ORG = 2 diff --git a/tests/rest_api/test_organizations.py b/tests/rest_api/test_organizations.py index 4411883b..68bdc67c 100644 --- a/tests/rest_api/test_organizations.py +++ b/tests/rest_api/test_organizations.py @@ -4,8 +4,9 @@ from http import HTTPStatus import pytest -from .utils.config import get_method, options_method, patch_method, delete_method +from rest_api.utils.config import get_method, options_method, patch_method, delete_method from deepdiff import DeepDiff +from copy import deepcopy class TestMetadataOrganizations: _ORG = 2 @@ -33,6 +34,7 @@ class TestMetadataOrganizations: response = options_method(user, f'organizations/{self._ORG}') assert response.status_code == HTTPStatus.OK +@pytest.mark.usefixtures('dontchangedb') class TestGetOrganizations: _ORG = 2 @@ -60,7 +62,7 @@ class TestGetOrganizations: else: assert response.status_code == HTTPStatus.NOT_FOUND -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestPatchOrganizations: _ORG = 2 @@ -71,7 +73,7 @@ class TestPatchOrganizations: @pytest.fixture(scope='class') def expected_data(self, organizations, request_data): - data = organizations[self._ORG].copy() + data = deepcopy(organizations[self._ORG]) data.update(request_data) return data @@ -101,7 +103,7 @@ class TestPatchOrganizations: else: assert response.status_code != HTTPStatus.OK -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestDeleteOrganizations: _ORG = 2 diff --git a/tests/rest_api/test_projects.py b/tests/rest_api/test_projects.py index 4b32f554..48356f4e 100644 --- a/tests/rest_api/test_projects.py +++ b/tests/rest_api/test_projects.py @@ -11,6 +11,7 @@ import pytest from .utils.config import get_method, post_files_method, post_method +@pytest.mark.usefixtures('dontchangedb') class TestGetProjects: def _find_project_by_user_org(self, user, projects, is_project_staff_flag, is_project_staff): if is_project_staff_flag: @@ -112,7 +113,7 @@ class TestGetProjects: self._test_response_200(user_in_project['username'], project_id, org_id=user_in_project['org']) -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestPostProjects: def _test_create_project_201(self, user, spec, **kwargs): response = post_method(user, '/projects', spec, **kwargs) @@ -199,7 +200,7 @@ class TestPostProjects: } self._test_create_project_201(user['username'], spec, org_id=user['org']) -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures("changedb") @pytest.mark.usefixtures("restore_cvat_data") class TestImportExportDatasetProject: def _test_export_project(self, username, project_id, format_name): @@ -217,7 +218,7 @@ class TestImportExportDatasetProject: return response def _test_import_project(self, username, project_id, format_name, data): - response = post_files_method(username, f'projects/{project_id}/dataset', data, + response = post_files_method(username, f'projects/{project_id}/dataset', None, data, format=format_name) assert response.status_code == HTTPStatus.ACCEPTED diff --git a/tests/rest_api/test_remote_url.py b/tests/rest_api/test_remote_url.py index 241b9210..a11e8aff 100644 --- a/tests/rest_api/test_remote_url.py +++ b/tests/rest_api/test_remote_url.py @@ -7,7 +7,7 @@ from time import sleep import pytest -from .utils.config import get_method, post_method +from rest_api.utils.config import get_method, post_method def _post_task_remote_data(username, task_id, resources): @@ -29,7 +29,7 @@ def _wait_until_task_is_created(username, task_id): sleep(1) -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestGetAnalytics: task_id = 12 def _test_can_create(self, user, task_id, resources): diff --git a/tests/rest_api/test_tasks.py b/tests/rest_api/test_tasks.py index 9a082721..4ccda838 100644 --- a/tests/rest_api/test_tasks.py +++ b/tests/rest_api/test_tasks.py @@ -2,6 +2,7 @@ # # SPDX-License-Identifier: MIT +from copy import deepcopy from http import HTTPStatus from io import BytesIO from time import sleep @@ -32,6 +33,7 @@ def generate_image_files(count): return images +@pytest.mark.usefixtures('dontchangedb') class TestGetTasks: def _test_task_list_200(self, user, project_id, data, exclude_paths = '', **kwargs): response = get_method(user, f'projects/{project_id}/tasks', **kwargs) @@ -66,9 +68,6 @@ class TestGetTasks: assert response.status_code == HTTPStatus.OK assert any(_task['id'] == task['id'] for _task in response_data['results']) - # [sandbox] admin can see task data in project even he has no ownerships in this project - # [sandbox] business cannot see task data in project if he has no ownerships in this project - # [sandbox] user that has one of these ownerships: [Project:owner, Project:assignee] can see task data @pytest.mark.parametrize('project_id', [1]) @pytest.mark.parametrize('groups, is_staff, is_allow', [ ('admin', False, True), @@ -81,18 +80,14 @@ class TestGetTasks: self._test_users_to_see_task_list(project_id, tasks, users, is_staff, is_allow, is_project_staff) - # [sandbox] user that has one of these ownerships: [Owner, Assignee] can see task data @pytest.mark.parametrize('project_id, groups', [(1, 'user')]) - def test_task_assigneed_to_see_task(self, project_id, groups, users, tasks, find_users, is_task_staff): + def test_task_assigned_to_see_task(self, project_id, groups, users, tasks, find_users, is_task_staff): users = find_users(privilege=groups) tasks = list(filter(lambda x: x['project_id'] == project_id and x['assignee'], tasks)) assert len(tasks) self._test_assigned_users_to_see_task_data(tasks, users, is_task_staff) - # [organization] maintainer can see task data even if he has no ownerships in corresponding Project, Task - # [organization] supervisor cannot see task data if he has no ownerships in corresponding Project, Task - # [organization] worker (as role) that has one of these ownerships: [Project:owner, Project:assignee], can see task data @pytest.mark.parametrize('org, project_id', [({'id': 2, 'slug': 'org2'}, 2)]) @pytest.mark.parametrize('role, is_staff, is_allow', [ ('maintainer', False, True), @@ -105,7 +100,6 @@ class TestGetTasks: self._test_users_to_see_task_list(project_id, tasks, users, is_staff, is_allow, is_project_staff, org=org['slug']) - # [organization] worker (as role) that has one of these ownerships: [Owner, Assignee], can see task data @pytest.mark.parametrize('org, project_id, role', [ ({'id': 2, 'slug': 'org2'}, 2, 'worker') ]) @@ -117,7 +111,7 @@ class TestGetTasks: self._test_assigned_users_to_see_task_data(tasks, users, is_task_staff, org=org['slug']) -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestPostTasks: def _test_create_task_201(self, user, spec, **kwargs): response = post_method(user, '/tasks', spec, **kwargs) @@ -127,31 +121,6 @@ class TestPostTasks: response = post_method(user, '/tasks', spec, **kwargs) assert response.status_code == HTTPStatus.FORBIDDEN - @staticmethod - def _wait_until_task_is_created(username, task_id): - url = f'tasks/{task_id}/status' - - while True: - response = get_method(username, url) - response_json = response.json() - if response_json['state'] == 'Finished' or response_json['state'] == 'Failed': - return response - sleep(1) - - def _test_create_task_with_images(self, username, spec, data, files): - response = post_method(username, '/tasks', spec) - assert response.status_code == HTTPStatus.CREATED - task_id = response.json()['id'] - - response = post_files_method(username, f'/tasks/{task_id}/data', data, files) - assert response.status_code == HTTPStatus.ACCEPTED - - response = self._wait_until_task_is_created(username, task_id) - response_json = response.json() - assert response_json['state'] == 'Finished' - - return task_id - def _test_users_to_create_task_in_project(self, project_id, users, is_staff, is_allow, is_project_staff, **kwargs): if is_staff: users = [user for user in users if is_project_staff(user['id'], project_id) ] @@ -171,9 +140,6 @@ class TestPostTasks: else: self._test_create_task_403(username, spec, **kwargs) - # [sandbox] admin can create task in project even he has no ownerships in this project - # [sandbox] business cannot create task in project if he has no ownerships in this project - # [sandbox] user that has one of these ownerships: [Project:owner, Project:assignee] and has less than 10 task can create task in project @pytest.mark.parametrize('project_id', [1]) @pytest.mark.parametrize('groups, is_staff, is_allow', [ ('admin', False, True), @@ -184,7 +150,6 @@ class TestPostTasks: users = find_users(privilege=groups) self._test_users_to_create_task_in_project(project_id, users, is_staff, is_allow, is_project_staff) - # [organization] worker cannot create task in project even he has no ownerships in this project @pytest.mark.parametrize('org, project_id', [({'id': 2, 'slug': 'org2'}, 2)]) @pytest.mark.parametrize('role, is_staff, is_allow', [ ('worker', False, False), @@ -193,32 +158,7 @@ class TestPostTasks: users = find_users(org=org['id'], role=role) self._test_users_to_create_task_in_project(project_id, users, is_staff, is_allow, is_project_staff, org=org['slug']) - def test_can_create_task_with_defined_start_and_stop_frames(self): - username = 'admin1' - task_spec = { - 'name': f'test {username} to create a task with defined start and stop frames', - "labels": [{ - "name": "car", - "color": "#ff00ff" - }], - } - - task_data = { - 'image_quality': 75, - 'start_frame': 2, - 'stop_frame': 5 - } - task_files = { - f'client_files[{i}]': image for i, image in enumerate(generate_image_files(7)) - } - - task_id = self._test_create_task_with_images(username, task_spec, task_data, task_files) - - # check task size - response = get_method(username, f'tasks/{task_id}') - response_json = response.json() - assert response_json['size'] == 4 - +@pytest.mark.usefixtures('dontchangedb') class TestGetData: _USERNAME = 'user1' @@ -232,9 +172,9 @@ class TestGetData: assert response.status_code == HTTPStatus.OK assert response.headers['Content-Type'] == content_type -@pytest.mark.usefixtures("restore") +@pytest.mark.usefixtures('changedb') class TestPatchTaskAnnotations: - def _test_check_respone(self, is_allow, response, data=None): + def _test_check_response(self, is_allow, response, data=None): if is_allow: assert response.status_code == HTTPStatus.OK assert DeepDiff(data, response.json(), @@ -245,7 +185,7 @@ class TestPatchTaskAnnotations: @pytest.fixture(scope='class') def request_data(self, annotations): def get_data(tid): - data = annotations['task'][str(tid)].copy() + data = deepcopy(annotations['task'][str(tid)]) data['shapes'][0].update({'points': [2.0, 3.0, 4.0, 5.0, 6.0, 7.0]}) data['version'] += 1 return data @@ -269,7 +209,7 @@ class TestPatchTaskAnnotations: response = patch_method(username, f'tasks/{tid}/annotations', data, org_id=org, action='update') - self._test_check_respone(is_allow, response, data) + self._test_check_response(is_allow, response, data) @pytest.mark.parametrize('org', [2]) @pytest.mark.parametrize('role, task_staff, is_allow', [ @@ -288,10 +228,26 @@ class TestPatchTaskAnnotations: response = patch_method(username, f'tasks/{tid}/annotations', data, org_id=org, action='update') - self._test_check_respone(is_allow, response, data) + self._test_check_response(is_allow, response, data) + +@pytest.mark.usefixtures('dontchangedb') +class TestGetTaskDataset: + def _test_export_project(self, username, tid, **kwargs): + response = get_method(username, f'tasks/{tid}/dataset', **kwargs) + assert response.status_code == HTTPStatus.ACCEPTED + + response = get_method(username, f'tasks/{tid}/dataset', **kwargs) + assert response.status_code == HTTPStatus.CREATED + + response = get_method(username, f'tasks/{tid}/dataset', action='download', **kwargs) + assert response.status_code == HTTPStatus.OK + + def test_admin_can_export_task_dataset(self, tasks_with_shapes): + task = tasks_with_shapes[0] + self._test_export_project('admin1', task['id'], format='CVAT for images 1.1') -@pytest.mark.usefixtures("restore") -class TestExportDatasetTask: +@pytest.mark.usefixtures("changedb") +class TestPostTaskData: @staticmethod def _wait_until_task_is_created(username, task_id): url = f'tasks/{task_id}/status' diff --git a/tests/rest_api/test_users.py b/tests/rest_api/test_users.py index 7fbccd4d..8caf2240 100644 --- a/tests/rest_api/test_users.py +++ b/tests/rest_api/test_users.py @@ -3,10 +3,14 @@ # SPDX-License-Identifier: MIT from http import HTTPStatus + +import pytest from deepdiff import DeepDiff -from .utils.config import get_method +from rest_api.utils.config import get_method + +@pytest.mark.usefixtures('dontchangedb') class TestGetUsers: def _test_can_see(self, user, data, endpoint='users', exclude_paths='', **kwargs): response = get_method(user, endpoint, **kwargs) diff --git a/tests/rest_api/utils/__init__.py b/tests/rest_api/utils/__init__.py new file mode 100644 index 00000000..1fd19bae --- /dev/null +++ b/tests/rest_api/utils/__init__.py @@ -0,0 +1,3 @@ +# Copyright (C) 2021 Intel Corporation +# +# SPDX-License-Identifier: MIT \ No newline at end of file diff --git a/tests/rest_api/utils/config.py b/tests/rest_api/utils/config.py index 7183a7b8..a114a7d6 100644 --- a/tests/rest_api/utils/config.py +++ b/tests/rest_api/utils/config.py @@ -5,8 +5,8 @@ import os.path as osp import requests -ROOT_DIR = osp.dirname(__file__) -ASSETS_DIR = osp.abspath(osp.join(ROOT_DIR, '..', 'assets')) +ROOT_DIR = __file__[:__file__.rfind(osp.join("utils", ""))] +ASSETS_DIR = osp.abspath(osp.join(ROOT_DIR, 'assets')) # Suppress the warning from Bandit about hardcoded passwords USER_PASS = '!Q@W#E$R' # nosec BASE_URL = 'http://localhost:8080/'