Migrate tests/python to from os.path to pathlib (#5426)

`pathlib` improves code readability and type safety. It is already used
in some of the tests; convert all remaining `os.path` usage to `pathlib`
equivalents.
main
Roman Donchenko 3 years ago committed by GitHub
parent 3f9ab7cf68
commit 481630e719
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -71,7 +71,7 @@ class TestCLI:
@pytest.fixture @pytest.fixture
def fxt_new_task(self): def fxt_new_task(self):
files = generate_images(str(self.tmp_path), 5) files = generate_images(self.tmp_path, 5)
task = self.client.tasks.create_from_data( task = self.client.tasks.create_from_data(
spec={ spec={
@ -79,7 +79,7 @@ class TestCLI:
"labels": [{"name": "car"}, {"name": "person"}], "labels": [{"name": "car"}, {"name": "person"}],
}, },
resource_type=ResourceType.LOCAL, resource_type=ResourceType.LOCAL,
resources=files, resources=list(map(os.fspath, files)),
) )
return task return task
@ -100,13 +100,13 @@ class TestCLI:
return self.stdout.getvalue() return self.stdout.getvalue()
def test_can_create_task_from_local_images(self): def test_can_create_task_from_local_images(self):
files = generate_images(str(self.tmp_path), 5) files = generate_images(self.tmp_path, 5)
stdout = self.run_cli( stdout = self.run_cli(
"create", "create",
"test_task", "test_task",
ResourceType.LOCAL.name, ResourceType.LOCAL.name,
*files, *map(os.fspath, files),
"--labels", "--labels",
json.dumps([{"name": "car"}, {"name": "person"}]), json.dumps([{"name": "car"}, {"name": "person"}]),
"--completion_verification_period", "--completion_verification_period",

@ -3,10 +3,9 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import os
import os.path as osp
import unittest import unittest
from typing import Any, Union from pathlib import Path
from typing import Any, List, Union
from shared.utils.helpers import generate_image_file from shared.utils.helpers import generate_image_file
@ -22,12 +21,11 @@ def run_cli(test: Union[unittest.TestCase, Any], *args: str, expected_code: int
assert expected_code == main(args) assert expected_code == main(args)
def generate_images(dst_dir: str, count: int): def generate_images(dst_dir: Path, count: int) -> List[Path]:
filenames = [] filenames = []
os.makedirs(dst_dir, exist_ok=True) dst_dir.mkdir(parents=True, exist_ok=True)
for i in range(count): for i in range(count):
filename = osp.join(dst_dir, f"img_{i}.jpg") filename = dst_dir / f"img_{i}.jpg"
with open(filename, "wb") as f: filename.write_bytes(generate_image_file().getvalue())
f.write(generate_image_file().getvalue())
filenames.append(filename) filenames.append(filename)
return filenames return filenames

@ -3,9 +3,8 @@
# #
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import glob
import json import json
import os.path as osp from pathlib import Path
import pytest import pytest
from deepdiff import DeepDiff from deepdiff import DeepDiff
@ -15,10 +14,10 @@ from shared.utils import config
@pytest.mark.usefixtures("restore_db_per_class") @pytest.mark.usefixtures("restore_db_per_class")
class TestGetResources: class TestGetResources:
@pytest.mark.parametrize("path", glob.glob(osp.join(config.ASSETS_DIR, "*.json"))) @pytest.mark.parametrize("path", config.ASSETS_DIR.glob("*.json"))
def test_check_objects_integrity(self, path): def test_check_objects_integrity(self, path: Path):
with open(path) as f: with open(path) as f:
endpoint = osp.basename(path).rsplit(".")[0] endpoint = path.stem
if endpoint == "annotations": if endpoint == "annotations":
objects = json.load(f) objects = json.load(f)
for jid, annotations in objects["job"].items(): for jid, annotations in objects["job"].items():

@ -3,7 +3,6 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import json import json
import os.path as osp
from http import HTTPStatus from http import HTTPStatus
from time import sleep from time import sleep
@ -24,7 +23,7 @@ from shared.utils.config import delete_method, get_method, patch_method, post_me
def target_url(): def target_url():
env_data = {} env_data = {}
with open(osp.join(CVAT_ROOT_DIR, "tests", "python", "webhook_receiver", ".env"), "r") as f: with open(CVAT_ROOT_DIR / "tests/python/webhook_receiver/.env", "r") as f:
for line in f: for line in f:
name, value = tuple(line.strip().split("=")) name, value = tuple(line.strip().split("="))
env_data[name] = value env_data[name] = value

@ -3,7 +3,7 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import io import io
import os.path as osp import os
from logging import Logger from logging import Logger
from pathlib import Path from pathlib import Path
from typing import Tuple from typing import Tuple
@ -103,18 +103,18 @@ class TestJobUsecases:
pbar = make_pbar(file=pbar_out) pbar = make_pbar(file=pbar_out)
task_id = fxt_new_task.id task_id = fxt_new_task.id
path = str(self.tmp_path / f"task_{task_id}-cvat.zip") path = self.tmp_path / f"task_{task_id}-cvat.zip"
job_id = fxt_new_task.get_jobs()[0].id job_id = fxt_new_task.get_jobs()[0].id
job = self.client.jobs.retrieve(job_id) job = self.client.jobs.retrieve(job_id)
job.export_dataset( job.export_dataset(
format_name="CVAT for images 1.1", format_name="CVAT for images 1.1",
filename=path, filename=os.fspath(path),
pbar=pbar, pbar=pbar,
include_images=include_images, include_images=include_images,
) )
assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1] assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1]
assert osp.isfile(path) assert path.is_file()
assert self.stdout.getvalue() == "" assert self.stdout.getvalue() == ""
def test_can_download_preview(self, fxt_new_task: Task): def test_can_download_preview(self, fxt_new_task: Task):
@ -139,7 +139,7 @@ class TestJobUsecases:
filename_pattern="frame-{frame_id}{frame_ext}", filename_pattern="frame-{frame_id}{frame_ext}",
) )
assert osp.isfile(self.tmp_path / "frame-0.jpg") assert (self.tmp_path / "frame-0.jpg").is_file()
assert self.stdout.getvalue() == "" assert self.stdout.getvalue() == ""
def test_can_upload_annotations(self, fxt_new_task: Task, fxt_coco_file: Path): def test_can_upload_annotations(self, fxt_new_task: Task, fxt_coco_file: Path):

@ -4,7 +4,7 @@
import io import io
import json import json
import os.path as osp import os
import zipfile import zipfile
from logging import Logger from logging import Logger
from pathlib import Path from pathlib import Path
@ -112,7 +112,7 @@ class TestTaskUsecases:
task_files = generate_image_files(7) task_files = generate_image_files(7)
for i, f in enumerate(task_files): for i, f in enumerate(task_files):
fname = self.tmp_path / osp.basename(f.name) fname = self.tmp_path / f.name
with fname.open("wb") as fd: with fname.open("wb") as fd:
fd.write(f.getvalue()) fd.write(f.getvalue())
task_files[i] = str(fname) task_files[i] = str(fname)
@ -252,17 +252,17 @@ class TestTaskUsecases:
pbar = make_pbar(file=pbar_out) pbar = make_pbar(file=pbar_out)
task_id = fxt_new_task.id task_id = fxt_new_task.id
path = str(self.tmp_path / f"task_{task_id}-cvat.zip") path = self.tmp_path / f"task_{task_id}-cvat.zip"
task = self.client.tasks.retrieve(task_id) task = self.client.tasks.retrieve(task_id)
task.export_dataset( task.export_dataset(
format_name="CVAT for images 1.1", format_name="CVAT for images 1.1",
filename=path, filename=os.fspath(path),
pbar=pbar, pbar=pbar,
include_images=include_images, include_images=include_images,
) )
assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1] assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1]
assert osp.isfile(path) assert path.is_file()
assert self.stdout.getvalue() == "" assert self.stdout.getvalue() == ""
def test_can_download_backup(self, fxt_new_task: Task): def test_can_download_backup(self, fxt_new_task: Task):
@ -270,12 +270,12 @@ class TestTaskUsecases:
pbar = make_pbar(file=pbar_out) pbar = make_pbar(file=pbar_out)
task_id = fxt_new_task.id task_id = fxt_new_task.id
path = str(self.tmp_path / f"task_{task_id}-backup.zip") path = self.tmp_path / f"task_{task_id}-backup.zip"
task = self.client.tasks.retrieve(task_id) task = self.client.tasks.retrieve(task_id)
task.download_backup(filename=path, pbar=pbar) task.download_backup(filename=os.fspath(path), pbar=pbar)
assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1] assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1]
assert osp.isfile(path) assert path.is_file()
assert self.stdout.getvalue() == "" assert self.stdout.getvalue() == ""
def test_can_download_preview(self, fxt_new_task: Task): def test_can_download_preview(self, fxt_new_task: Task):
@ -300,7 +300,7 @@ class TestTaskUsecases:
filename_pattern="frame-{frame_id}{frame_ext}", filename_pattern="frame-{frame_id}{frame_ext}",
) )
assert osp.isfile(self.tmp_path / "frame-0.jpg") assert (self.tmp_path / "frame-0.jpg").is_file()
assert self.stdout.getvalue() == "" assert self.stdout.getvalue() == ""
@pytest.mark.parametrize("quality", ("compressed", "original")) @pytest.mark.parametrize("quality", ("compressed", "original"))

@ -2,8 +2,8 @@
# #
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import os.path as osp
import textwrap import textwrap
from pathlib import Path
from typing import Tuple from typing import Tuple
from cvat_sdk.core.helpers import TqdmProgressReporter from cvat_sdk.core.helpers import TqdmProgressReporter
@ -14,11 +14,11 @@ def make_pbar(file, **kwargs):
return TqdmProgressReporter(tqdm(file=file, mininterval=0, **kwargs)) return TqdmProgressReporter(tqdm(file=file, mininterval=0, **kwargs))
def generate_coco_json(filename: str, img_info: Tuple[str, int, int]): def generate_coco_json(filename: Path, img_info: Tuple[Path, int, int]):
image_filename, image_width, image_height = img_info image_filename, image_width, image_height = img_info
content = generate_coco_anno( content = generate_coco_anno(
osp.basename(image_filename), image_filename.name,
image_width=image_width, image_width=image_width,
image_height=image_height, image_height=image_height,
) )

@ -3,14 +3,11 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import json import json
import os.path as osp
import pytest import pytest
from shared.utils.config import ASSETS_DIR from shared.utils.config import ASSETS_DIR
CVAT_DB_DIR = osp.join(ASSETS_DIR, "cvat_db")
class Container: class Container:
def __init__(self, data, key="id"): def __init__(self, data, key="id"):
@ -39,73 +36,73 @@ class Container:
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def users(): def users():
with open(osp.join(ASSETS_DIR, "users.json")) as f: with open(ASSETS_DIR / "users.json") as f:
return Container(json.load(f)["results"]) return Container(json.load(f)["results"])
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def organizations(): def organizations():
with open(osp.join(ASSETS_DIR, "organizations.json")) as f: with open(ASSETS_DIR / "organizations.json") as f:
return Container(json.load(f)) return Container(json.load(f))
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def memberships(): def memberships():
with open(osp.join(ASSETS_DIR, "memberships.json")) as f: with open(ASSETS_DIR / "memberships.json") as f:
return Container(json.load(f)["results"]) return Container(json.load(f)["results"])
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def tasks(): def tasks():
with open(osp.join(ASSETS_DIR, "tasks.json")) as f: with open(ASSETS_DIR / "tasks.json") as f:
return Container(json.load(f)["results"]) return Container(json.load(f)["results"])
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def projects(): def projects():
with open(osp.join(ASSETS_DIR, "projects.json")) as f: with open(ASSETS_DIR / "projects.json") as f:
return Container(json.load(f)["results"]) return Container(json.load(f)["results"])
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def jobs(): def jobs():
with open(osp.join(ASSETS_DIR, "jobs.json")) as f: with open(ASSETS_DIR / "jobs.json") as f:
return Container(json.load(f)["results"]) return Container(json.load(f)["results"])
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def invitations(): def invitations():
with open(osp.join(ASSETS_DIR, "invitations.json")) as f: with open(ASSETS_DIR / "invitations.json") as f:
return Container(json.load(f)["results"], key="key") return Container(json.load(f)["results"], key="key")
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def annotations(): def annotations():
with open(osp.join(ASSETS_DIR, "annotations.json")) as f: with open(ASSETS_DIR / "annotations.json") as f:
return json.load(f) return json.load(f)
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def cloud_storages(): def cloud_storages():
with open(osp.join(ASSETS_DIR, "cloudstorages.json")) as f: with open(ASSETS_DIR / "cloudstorages.json") as f:
return Container(json.load(f)["results"]) return Container(json.load(f)["results"])
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def issues(): def issues():
with open(osp.join(ASSETS_DIR, "issues.json")) as f: with open(ASSETS_DIR / "issues.json") as f:
return Container(json.load(f)["results"]) return Container(json.load(f)["results"])
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def comments(): def comments():
with open(osp.join(ASSETS_DIR, "comments.json")) as f: with open(ASSETS_DIR / "comments.json") as f:
return Container(json.load(f)["results"]) return Container(json.load(f)["results"])
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def webhooks(): def webhooks():
with open(osp.join(ASSETS_DIR, "webhooks.json")) as f: with open(ASSETS_DIR / "webhooks.json") as f:
return Container(json.load(f)["results"]) return Container(json.load(f)["results"])

@ -2,10 +2,9 @@
# #
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import os
import os.path as osp
import re import re
from http import HTTPStatus from http import HTTPStatus
from pathlib import Path
from subprocess import PIPE, CalledProcessError, run from subprocess import PIPE, CalledProcessError, run
from time import sleep from time import sleep
@ -14,12 +13,12 @@ import requests
from shared.utils.config import ASSETS_DIR, get_api_url from shared.utils.config import ASSETS_DIR, get_api_url
CVAT_ROOT_DIR = __file__[: __file__.rfind(osp.join("tests", ""))] CVAT_ROOT_DIR = next(dir.parent for dir in Path(__file__).parents if dir.name == "tests")
CVAT_DB_DIR = osp.join(ASSETS_DIR, "cvat_db") CVAT_DB_DIR = ASSETS_DIR / "cvat_db"
PREFIX = "test" PREFIX = "test"
CONTAINER_NAME_FILES = [ CONTAINER_NAME_FILES = [
osp.join(CVAT_ROOT_DIR, dc_file) CVAT_ROOT_DIR / dc_file
for dc_file in ( for dc_file in (
"components/analytics/docker-compose.analytics.tests.yml", "components/analytics/docker-compose.analytics.tests.yml",
"docker-compose.tests.yml", "docker-compose.tests.yml",
@ -27,7 +26,7 @@ CONTAINER_NAME_FILES = [
] ]
DC_FILES = [ DC_FILES = [
osp.join(CVAT_ROOT_DIR, dc_file) CVAT_ROOT_DIR / dc_file
for dc_file in ( for dc_file in (
"docker-compose.dev.yml", "docker-compose.dev.yml",
"tests/docker-compose.file_share.yml", "tests/docker-compose.file_share.yml",
@ -157,7 +156,7 @@ def running_containers():
def dump_db(): def dump_db():
if "test_cvat_server_1" not in running_containers(): if "test_cvat_server_1" not in running_containers():
pytest.exit("CVAT is not running") pytest.exit("CVAT is not running")
with open(osp.join(CVAT_DB_DIR, "data.json"), "w") as f: with open(CVAT_DB_DIR / "data.json", "w") as f:
try: try:
run( # nosec run( # nosec
"docker exec test_cvat_server_1 \ "docker exec test_cvat_server_1 \
@ -173,7 +172,9 @@ def dump_db():
def create_compose_files(): def create_compose_files():
for filename in CONTAINER_NAME_FILES: for filename in CONTAINER_NAME_FILES:
with open(filename.replace(".tests.yml", ".yml"), "r") as dcf, open(filename, "w") as ndcf: with open(filename.with_name(filename.name.replace(".tests", "")), "r") as dcf, open(
filename, "w"
) as ndcf:
ndcf.writelines( ndcf.writelines(
[line for line in dcf.readlines() if not re.match("^.+container_name.+$", line)] [line for line in dcf.readlines() if not re.match("^.+container_name.+$", line)]
) )
@ -181,8 +182,7 @@ def create_compose_files():
def delete_compose_files(): def delete_compose_files():
for filename in CONTAINER_NAME_FILES: for filename in CONTAINER_NAME_FILES:
if osp.exists(filename): filename.unlink(missing_ok=True)
os.remove(filename)
def wait_for_server(): def wait_for_server():
@ -195,7 +195,7 @@ def wait_for_server():
def docker_restore_data_volumes(): def docker_restore_data_volumes():
docker_cp( docker_cp(
osp.join(CVAT_DB_DIR, "cvat_data.tar.bz2"), CVAT_DB_DIR / "cvat_data.tar.bz2",
f"{PREFIX}_cvat_server_1:/tmp/cvat_data.tar.bz2", f"{PREFIX}_cvat_server_1:/tmp/cvat_data.tar.bz2",
) )
docker_exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/") docker_exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/")
@ -204,7 +204,7 @@ def docker_restore_data_volumes():
def kube_restore_data_volumes(): def kube_restore_data_volumes():
pod_name = _kube_get_server_pod_name() pod_name = _kube_get_server_pod_name()
kube_cp( kube_cp(
osp.join(CVAT_DB_DIR, "cvat_data.tar.bz2"), CVAT_DB_DIR / "cvat_data.tar.bz2",
f"{pod_name}:/tmp/cvat_data.tar.bz2", f"{pod_name}:/tmp/cvat_data.tar.bz2",
) )
kube_exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/") kube_exec_cvat("tar --strip 3 -xjf /tmp/cvat_data.tar.bz2 -C /home/django/data/")
@ -218,19 +218,24 @@ def start_services(rebuild=False):
) )
_run( _run(
# use compatibility mode to have fixed names for containers (with underscores) [
# https://github.com/docker/compose#about-update-and-backward-compatibility "docker-compose",
f"docker-compose -p {PREFIX} --compatibility " f"--project-name={PREFIX}",
+ "--env-file " # use compatibility mode to have fixed names for containers (with underscores)
+ osp.join(CVAT_ROOT_DIR, "tests", "python", "webhook_receiver", ".env") # https://github.com/docker/compose#about-update-and-backward-compatibility
+ f" -f {' -f '.join(DC_FILES)} up -d " "--compatibility",
+ "--build" * rebuild, f"--env-file={CVAT_ROOT_DIR / 'tests/python/webhook_receiver/.env'}",
*(f"--file={f}" for f in DC_FILES),
"up",
"-d",
*["--build"] * rebuild,
],
capture_output=False, capture_output=False,
) )
docker_restore_data_volumes() docker_restore_data_volumes()
docker_cp(osp.join(CVAT_DB_DIR, "restore.sql"), f"{PREFIX}_cvat_db_1:/tmp/restore.sql") docker_cp(CVAT_DB_DIR / "restore.sql", f"{PREFIX}_cvat_db_1:/tmp/restore.sql")
docker_cp(osp.join(CVAT_DB_DIR, "data.json"), f"{PREFIX}_cvat_server_1:/tmp/data.json") docker_cp(CVAT_DB_DIR / "data.json", f"{PREFIX}_cvat_server_1:/tmp/data.json")
@pytest.fixture(autouse=True, scope="session") @pytest.fixture(autouse=True, scope="session")
@ -260,18 +265,23 @@ def services(request):
delete_compose_files() delete_compose_files()
pytest.exit("All generated test files have been deleted", returncode=0) pytest.exit("All generated test files have been deleted", returncode=0)
if not all([osp.exists(f) for f in CONTAINER_NAME_FILES]) or rebuild: if not all([f.exists() for f in CONTAINER_NAME_FILES]) or rebuild:
delete_compose_files() delete_compose_files()
create_compose_files() create_compose_files()
if stop: if stop:
_run( _run(
# use compatibility mode to have fixed names for containers (with underscores) [
# https://github.com/docker/compose#about-update-and-backward-compatibility "docker-compose",
f"docker-compose -p {PREFIX} --compatibility " f"--project-name={PREFIX}",
+ "--env-file " # use compatibility mode to have fixed names for containers (with underscores)
+ osp.join(CVAT_ROOT_DIR, "tests", "python", "webhook_receiver", ".env") # https://github.com/docker/compose#about-update-and-backward-compatibility
+ f" -f {' -f '.join(DC_FILES)} down -v", "--compatibility",
f"--env-file={CVAT_ROOT_DIR / 'tests/python/webhook_receiver/.env'}",
*(f"--file={f}" for f in DC_FILES),
"down",
"-v",
],
capture_output=False, capture_output=False,
) )
pytest.exit("All testing containers are stopped", returncode=0) pytest.exit("All testing containers are stopped", returncode=0)
@ -296,8 +306,8 @@ def services(request):
kube_restore_data_volumes() kube_restore_data_volumes()
server_pod_name = _kube_get_server_pod_name() server_pod_name = _kube_get_server_pod_name()
db_pod_name = _kube_get_db_pod_name() db_pod_name = _kube_get_db_pod_name()
kube_cp(osp.join(CVAT_DB_DIR, "restore.sql"), f"{db_pod_name}:/tmp/restore.sql") kube_cp(CVAT_DB_DIR / "restore.sql", f"{db_pod_name}:/tmp/restore.sql")
kube_cp(osp.join(CVAT_DB_DIR, "data.json"), f"{server_pod_name}:/tmp/data.json") kube_cp(CVAT_DB_DIR / "data.json", f"{server_pod_name}:/tmp/data.json")
wait_for_server() wait_for_server()

@ -2,13 +2,13 @@
# #
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import os.path as osp from pathlib import Path
import requests import requests
from cvat_sdk.api_client import ApiClient, Configuration from cvat_sdk.api_client import ApiClient, Configuration
ROOT_DIR = __file__[: __file__.rfind(osp.join("utils", ""))] ROOT_DIR = next(dir.parent for dir in Path(__file__).parents if dir.name == "utils")
ASSETS_DIR = osp.abspath(osp.join(ROOT_DIR, "assets")) ASSETS_DIR = (ROOT_DIR / "assets").resolve()
# Suppress the warning from Bandit about hardcoded passwords # Suppress the warning from Bandit about hardcoded passwords
USER_PASS = "!Q@W#E$R" # nosec USER_PASS = "!Q@W#E$R" # nosec
BASE_URL = "http://localhost:8080" BASE_URL = "http://localhost:8080"

@ -3,7 +3,6 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import json import json
import os.path as osp
from http import HTTPStatus from http import HTTPStatus
from config import ASSETS_DIR, get_method from config import ASSETS_DIR, get_method
@ -24,7 +23,7 @@ if __name__ == "__main__":
"webhook", "webhook",
]: ]:
response = get_method("admin1", f"{obj}s", page_size="all") response = get_method("admin1", f"{obj}s", page_size="all")
with open(osp.join(ASSETS_DIR, f"{obj}s.json"), "w") as f: with open(ASSETS_DIR / f"{obj}s.json", "w") as f:
json.dump(response.json(), f, indent=2, sort_keys=True) json.dump(response.json(), f, indent=2, sort_keys=True)
if obj in ["job", "task"]: if obj in ["job", "task"]:
@ -35,5 +34,5 @@ if __name__ == "__main__":
if response.status_code == HTTPStatus.OK: if response.status_code == HTTPStatus.OK:
annotations[obj][oid] = response.json() annotations[obj][oid] = response.json()
with open(osp.join(ASSETS_DIR, f"annotations.json"), "w") as f: with open(ASSETS_DIR / "annotations.json", "w") as f:
json.dump(annotations, f, indent=2, sort_keys=True) json.dump(annotations, f, indent=2, sort_keys=True)

Loading…
Cancel
Save