Helm tests (#4949)

main
Andrey Zhavoronkov 3 years ago committed by GitHub
parent 5a8b5dfc63
commit e22eac7d9b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,102 @@
name: Helm
on:
pull_request:
types: [edited, ready_for_review, opened, synchronize, reopened]
workflow_dispatch:
jobs:
check_changes:
runs-on: ubuntu-latest
outputs:
helm_dir_changed: ${{ steps.check_updates.outputs.helm_dir_changed }}
steps:
- uses: jitterbit/get-changed-files@v1
id: files
continue-on-error: true
- name: Run check
id: check_updates
env:
PR_FILES_AM: ${{ steps.files.outputs.added_modified }}
PR_FILES_RENAMED: ${{ steps.files.outputs.renamed }}
run: |
PR_FILES="$PR_FILES_AM $PR_FILES_RENAMED"
for FILE in $PR_FILES; do
if [[ $FILE == helm-chart/* ]] ; then
echo "::set-output name=helm_dir_changed::true"
break
fi
done
testing:
needs: check_changes
if: needs.check_changes.outputs.helm_dir_changed == 'true'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Start minikube
uses: medyagh/setup-minikube@latest
- name: Try the cluster!
run: kubectl get pods -A
- name: Pull images
run: |
export SHELL=/bin/bash
eval $(minikube -p minikube docker-env)
docker pull cvat/server
docker pull cvat/ui
echo -n "verifying images:"
docker images
- uses: azure/setup-helm@v3
with:
version: 'v3.9.4'
- name: Deploy to minikube
run: |
printf "traefik:\n service:\n externalIPs:\n - $(minikube ip)\n" > helm-chart/values.override.yaml
find cvat/apps/iam/rules -name "*.rego" -and ! -name '*test*' -exec basename {} \; | tar -czf helm-chart/rules.tar.gz -C cvat/apps/iam/rules/ -T -
cd helm-chart
helm dependency update
cd ..
helm upgrade -n default cvat -i --create-namespace helm-chart -f helm-chart/values.yaml -f helm-chart/values.override.yaml
- name: Update test config
run: |
sed -i -e 's$http://localhost:8080$http://cvat.local:80$g' tests/python/shared/utils/config.py
find tests/python/shared/assets/ -type f -name '*.json' | xargs sed -i -e 's$http://localhost:8080$http://cvat.local$g'
echo "$(minikube ip) cvat.local" | sudo tee -a /etc/hosts
- name: Wait for CVAT to be ready
run: |
max_tries=30
while [[ $(kubectl get pods -l component=server -o 'jsonpath={..status.conditions[?(@.type=="Ready")].status}') != "True" && max_tries -gt 0 ]]; do echo "waiting for CVAT pod" && (( max_tries-- )) && sleep 5; done
while [[ $(kubectl get pods -l app.kubernetes.io/name=postgresql -o 'jsonpath={..status.conditions[?(@.type=="Ready")].status}') != "True" && max_tries -gt 0 ]]; do echo "waiting for DB pod" && (( max_tries-- )) && sleep 5; done
kubectl get pods
- name: Generate schema
run: |
mkdir cvat-sdk/schema
kubectl exec $(kubectl get pods -l component=server -o jsonpath='{.items[0].metadata.name}') -- /bin/bash -c "python manage.py spectacular --file /tmp/schema.yml"
kubectl cp $(kubectl get pods -l component=server -o jsonpath='{.items[0].metadata.name}'):/tmp/schema.yml cvat-sdk/schema/schema.yml
pip3 install --user -r cvat-sdk/gen/requirements.txt
cd cvat-sdk/
gen/generate.sh
cd ..
- name: Install test requrements
run: |
pip3 install --user cvat-sdk/
pip3 install --user cvat-cli/
pip3 install --user -r tests/python/requirements.txt
- name: REST API and SDK tests
run: |
pytest --platform=kube \
--ignore=tests/python/rest_api/test_cloud_storages.py \
--ignore=tests/python/rest_api/test_analytics.py \
--ignore=tests/python/rest_api/test_resource_import_export.py \
-k 'not create_task_with_cloud_storage_files' \
tests/python

@ -32,7 +32,7 @@ def _wait_until_task_is_created(username, task_id):
@pytest.mark.usefixtures('changedb')
class TestGetAnalytics:
class TestCreateFromRemote:
task_id = 12
def _test_can_create(self, user, task_id, resources):
response = _post_task_remote_data(user, task_id, resources)

@ -63,15 +63,25 @@ def pytest_addoption(parser):
help="Update data.json without running tests. (default: %(default)s)",
)
group._addoption(
"--platform",
action="store",
default="local",
choices=("kube", "local"),
help="Platform identifier - 'kube' or 'local'. (default: %(default)s)",
)
def _run(command, capture_output=True):
_command = command.split() if isinstance(command, str) else command
try:
stdout, stderr = "", ""
if capture_output:
proc = run(command.split(), check=True, stdout=PIPE, stderr=PIPE) # nosec
proc = run(_command, check=True, stdout=PIPE, stderr=PIPE) # nosec
stdout, stderr = proc.stdout.decode(), proc.stderr.decode()
else:
proc = run(command.split(), check=True) # nosec
proc = run(_command, check=True) # nosec
return stdout, stderr
except CalledProcessError as exc:
stderr = exc.stderr.decode() if capture_output else "see above"
@ -81,24 +91,51 @@ def _run(command, capture_output=True):
"Add `-s` option to see more details"
)
def _kube_get_server_pod_name():
output, _ = _run("kubectl get pods -l component=server -o jsonpath={.items[0].metadata.name}")
return output
def _kube_get_db_pod_name():
output, _ = _run("kubectl get pods -l app.kubernetes.io/name=postgresql -o jsonpath={.items[0].metadata.name}")
return output
def docker_cp(source, target):
_run(f"docker container cp {source} {target}")
def exec_cvat(command):
def kube_cp(source, target):
_run(f"kubectl cp {source} {target}")
def docker_exec_cvat(command):
_run(f"docker exec {PREFIX}_cvat_server_1 {command}")
def exec_cvat_db(command):
def kube_exec_cvat(command):
pod_name = _kube_get_server_pod_name()
_run(f"kubectl exec {pod_name} -- {command}")
def docker_exec_cvat_db(command):
_run(f"docker exec {PREFIX}_cvat_db_1 {command}")
def restore_db():
exec_cvat_db(
def kube_exec_cvat_db(command):
pod_name = _kube_get_db_pod_name()
_run(["kubectl", "exec", pod_name, "--"] + command)
def docker_restore_db():
docker_exec_cvat_db(
"psql -U root -d postgres -v from=test_db -v to=cvat -f /tmp/restore.sql"
)
def kube_restore_db():
kube_exec_cvat_db(
["/bin/sh", "-c", "PGPASSWORD=cvat_postgresql_postgres psql -U postgres -d postgres -v from=test_db -v to=cvat -f /tmp/restore.sql"]
)
def running_containers():
return [cn for cn in _run("docker ps --format {{.Names}}")[0].split("\n") if cn]
@ -147,12 +184,20 @@ def wait_for_server():
sleep(5)
def restore_data_volumes():
def docker_restore_data_volumes():
docker_cp(
osp.join(CVAT_DB_DIR, "cvat_data.tar.bz2"),
f"{PREFIX}_cvat_server_1:/tmp/cvat_data.tar.bz2",
)
exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/")
docker_exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/")
def kube_restore_data_volumes():
pod_name = _kube_get_server_pod_name()
kube_cp(
osp.join(CVAT_DB_DIR, "cvat_data.tar.bz2"),
f"{pod_name}:/tmp/cvat_data.tar.bz2",
)
kube_exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/")
def start_services(rebuild=False):
@ -168,7 +213,7 @@ def start_services(rebuild=False):
capture_output=False,
)
restore_data_volumes()
docker_restore_data_volumes()
docker_cp(
osp.join(CVAT_DB_DIR, "restore.sql"), f"{PREFIX}_cvat_db_1:/tmp/restore.sql"
)
@ -182,61 +227,98 @@ def services(request):
rebuild = request.config.getoption("--rebuild")
cleanup = request.config.getoption("--cleanup")
dumpdb = request.config.getoption("--dumpdb")
platform = request.config.getoption("--platform")
if platform == 'kube' and any((stop, start, rebuild, cleanup, dumpdb)):
raise Exception('''--platform=kube is not compatible with any of the other options
--stop-services --start-services --rebuild --cleanup --dumpdb''')
if start and stop:
raise Exception("--start-services and --stop-services are incompatible")
if platform == 'local':
if start and stop:
raise Exception("--start-services and --stop-services are incompatible")
if dumpdb:
dump_db()
pytest.exit("data.json has been updated", returncode=0)
if dumpdb:
dump_db()
pytest.exit("data.json has been updated", returncode=0)
if cleanup:
delete_compose_files()
pytest.exit("All generated test files have been deleted", returncode=0)
if cleanup:
delete_compose_files()
pytest.exit("All generated test files have been deleted", returncode=0)
if not all([osp.exists(f) for f in CONTAINER_NAME_FILES]) or rebuild:
delete_compose_files()
create_compose_files()
if stop:
_run(
f"docker-compose -p {PREFIX} -f {' -f '.join(DC_FILES)} down -v",
capture_output=False,
)
pytest.exit("All testing containers are stopped", returncode=0)
if not all([osp.exists(f) for f in CONTAINER_NAME_FILES]) or rebuild:
delete_compose_files()
create_compose_files()
start_services(rebuild)
wait_for_server()
if stop:
_run(
f"docker-compose -p {PREFIX} -f {' -f '.join(DC_FILES)} down -v",
capture_output=False,
docker_exec_cvat("python manage.py loaddata /tmp/data.json")
docker_exec_cvat_db(
"psql -U root -d postgres -v from=cvat -v to=test_db -f /tmp/restore.sql"
)
pytest.exit("All testing containers are stopped", returncode=0)
start_services(rebuild)
wait_for_server()
if start:
pytest.exit(
"All necessary containers have been created and started.", returncode=0
)
exec_cvat("python manage.py loaddata /tmp/data.json")
exec_cvat_db(
"psql -U root -d postgres -v from=cvat -v to=test_db -f /tmp/restore.sql"
)
yield
if start:
pytest.exit(
"All necessary containers have been created and started.", returncode=0
docker_restore_db()
docker_exec_cvat_db("dropdb test_db")
elif platform == 'kube':
kube_restore_data_volumes()
server_pod_name = _kube_get_server_pod_name()
db_pod_name = _kube_get_db_pod_name()
kube_cp(
osp.join(CVAT_DB_DIR, "restore.sql"), f"{db_pod_name}:/tmp/restore.sql"
)
kube_cp(osp.join(CVAT_DB_DIR, "data.json"), f"{server_pod_name}:/tmp/data.json")
yield
wait_for_server()
kube_exec_cvat("python manage.py loaddata /tmp/data.json")
kube_exec_cvat_db(
["/bin/sh", "-c", "PGPASSWORD=cvat_postgresql_postgres psql -U postgres -d postgres -v from=cvat -v to=test_db -f /tmp/restore.sql"]
)
restore_db()
exec_cvat_db("dropdb test_db")
yield
@pytest.fixture(scope="function")
def changedb():
def changedb(request):
# Note that autouse fixtures are executed first within their scope, so be aware of the order
# Pre-test DB setups (eg. with class-declared autouse setup() method) may be cleaned.
# https://docs.pytest.org/en/stable/reference/fixtures.html#autouse-fixtures-are-executed-first-within-their-scope
restore_db()
platform = request.config.getoption("--platform")
if platform == "local":
docker_restore_db()
else:
kube_restore_db()
@pytest.fixture(scope="class")
def dontchangedb():
restore_db()
def dontchangedb(request):
platform = request.config.getoption("--platform")
if platform == "local":
docker_restore_db()
else:
kube_restore_db()
@pytest.fixture(scope="function")
def restore_cvat_data():
restore_data_volumes()
def restore_cvat_data(request):
platform = request.config.getoption("--platform")
if platform == "local":
docker_restore_data_volumes()
else:
kube_restore_data_volumes()

Loading…
Cancel
Save