Migrate to using pathlib in most of the SDK (#5435)

For user-facing functions, keep accepting `str` paths to maintain
compatibility and flexibility, but add support for arbitrary path-like
objects. For internal functions (in `downloading.py` and
`uploading.py`), don't bother and require `pathlib.Path`.

The only code that isn't converted is build-time code (e.g. `setup.py`)
and code that came from openapi-generator.
main
Roman Donchenko 3 years ago committed by GitHub
parent 5441c4ee67
commit 0a032b3236
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -29,6 +29,8 @@ from online detectors & interactors) (<https://github.com/opencv/cvat/pull/4543>
- Allowed trailing slashes in the SDK host address (<https://github.com/opencv/cvat/pull/5057>) - Allowed trailing slashes in the SDK host address (<https://github.com/opencv/cvat/pull/5057>)
- Adjusted initial camera position, enabled 'Reset zoom' option for 3D canvas (<https://github.com/opencv/cvat/pull/5395>) - Adjusted initial camera position, enabled 'Reset zoom' option for 3D canvas (<https://github.com/opencv/cvat/pull/5395>)
- Enabled authentication via email (<https://github.com/opencv/cvat/pull/5037>) - Enabled authentication via email (<https://github.com/opencv/cvat/pull/5037>)
- In the SDK, functions taking paths as strings now also accept path-like objects
(<https://github.com/opencv/cvat/pull/5435>)
### Deprecated ### Deprecated
- TDB - TDB

@ -5,8 +5,8 @@
from __future__ import annotations from __future__ import annotations
import os.path as osp
from contextlib import closing from contextlib import closing
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, Optional from typing import TYPE_CHECKING, Any, Dict, Optional
from cvat_sdk.api_client.api_client import Endpoint from cvat_sdk.api_client.api_client import Endpoint
@ -28,7 +28,7 @@ class Downloader:
def download_file( def download_file(
self, self,
url: str, url: str,
output_path: str, output_path: Path,
*, *,
timeout: int = 60, timeout: int = 60,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
@ -39,7 +39,7 @@ class Downloader:
CHUNK_SIZE = 10 * 2**20 CHUNK_SIZE = 10 * 2**20
assert not osp.exists(output_path) assert not output_path.exists()
response = self._client.api_client.rest_client.GET( response = self._client.api_client.rest_client.GET(
url, url,
@ -70,7 +70,7 @@ class Downloader:
def prepare_and_download_file_from_endpoint( def prepare_and_download_file_from_endpoint(
self, self,
endpoint: Endpoint, endpoint: Endpoint,
filename: str, filename: Path,
*, *,
url_params: Optional[Dict[str, Any]] = None, url_params: Optional[Dict[str, Any]] = None,
query_params: Optional[Dict[str, Any]] = None, query_params: Optional[Dict[str, Any]] = None,

@ -6,9 +6,8 @@ from __future__ import annotations
import io import io
import mimetypes import mimetypes
import os from pathlib import Path
import os.path as osp from typing import TYPE_CHECKING, List, Optional, Sequence
from typing import List, Optional, Sequence
from PIL import Image from PIL import Image
@ -26,6 +25,9 @@ from cvat_sdk.core.proxies.model_proxy import (
) )
from cvat_sdk.core.uploading import AnnotationUploader from cvat_sdk.core.uploading import AnnotationUploader
if TYPE_CHECKING:
from _typeshed import StrPath
_JobEntityBase, _JobRepoBase = build_model_bases( _JobEntityBase, _JobRepoBase = build_model_bases(
models.JobRead, apis.JobsApi, api_member_name="jobs_api" models.JobRead, apis.JobsApi, api_member_name="jobs_api"
) )
@ -43,7 +45,7 @@ class Job(
def import_annotations( def import_annotations(
self, self,
format_name: str, format_name: str,
filename: str, filename: StrPath,
*, *,
status_check_period: Optional[int] = None, status_check_period: Optional[int] = None,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
@ -52,6 +54,8 @@ class Job(
Upload annotations for a job in the specified format (e.g. 'YOLO ZIP 1.0'). Upload annotations for a job in the specified format (e.g. 'YOLO ZIP 1.0').
""" """
filename = Path(filename)
AnnotationUploader(self._client).upload_file_and_wait( AnnotationUploader(self._client).upload_file_and_wait(
self.api.create_annotations_endpoint, self.api.create_annotations_endpoint,
filename, filename,
@ -66,7 +70,7 @@ class Job(
def export_dataset( def export_dataset(
self, self,
format_name: str, format_name: str,
filename: str, filename: StrPath,
*, *,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
status_check_period: Optional[int] = None, status_check_period: Optional[int] = None,
@ -75,6 +79,9 @@ class Job(
""" """
Download annotations for a job in the specified format (e.g. 'YOLO ZIP 1.0'). Download annotations for a job in the specified format (e.g. 'YOLO ZIP 1.0').
""" """
filename = Path(filename)
if include_images: if include_images:
endpoint = self.api.retrieve_dataset_endpoint endpoint = self.api.retrieve_dataset_endpoint
else: else:
@ -112,7 +119,7 @@ class Job(
self, self,
frame_ids: Sequence[int], frame_ids: Sequence[int],
*, *,
outdir: str = "", outdir: StrPath = ".",
quality: str = "original", quality: str = "original",
filename_pattern: str = "frame_{frame_id:06d}{frame_ext}", filename_pattern: str = "frame_{frame_id:06d}{frame_ext}",
) -> Optional[List[Image.Image]]: ) -> Optional[List[Image.Image]]:
@ -120,7 +127,9 @@ class Job(
Download the requested frame numbers for a job and save images as outdir/filename_pattern Download the requested frame numbers for a job and save images as outdir/filename_pattern
""" """
# TODO: add arg descriptions in schema # TODO: add arg descriptions in schema
os.makedirs(outdir, exist_ok=True)
outdir = Path(outdir)
outdir.mkdir(parents=True, exist_ok=True)
for frame_id in frame_ids: for frame_id in frame_ids:
frame_bytes = self.get_frame(frame_id, quality=quality) frame_bytes = self.get_frame(frame_id, quality=quality)
@ -136,7 +145,7 @@ class Job(
im_ext = ".jpg" im_ext = ".jpg"
outfile = filename_pattern.format(frame_id=frame_id, frame_ext=im_ext) outfile = filename_pattern.format(frame_id=frame_id, frame_ext=im_ext)
im.save(osp.join(outdir, outfile)) im.save(outdir / outfile)
def get_meta(self) -> models.IDataMetaRead: def get_meta(self) -> models.IDataMetaRead:
(meta, _) = self.api.retrieve_data_meta(self.id) (meta, _) = self.api.retrieve_data_meta(self.id)

@ -5,8 +5,8 @@
from __future__ import annotations from __future__ import annotations
import json import json
import os.path as osp from pathlib import Path
from typing import Optional from typing import TYPE_CHECKING, Optional
from cvat_sdk.api_client import apis, models from cvat_sdk.api_client import apis, models
from cvat_sdk.core.downloading import Downloader from cvat_sdk.core.downloading import Downloader
@ -21,6 +21,9 @@ from cvat_sdk.core.proxies.model_proxy import (
) )
from cvat_sdk.core.uploading import DatasetUploader, Uploader from cvat_sdk.core.uploading import DatasetUploader, Uploader
if TYPE_CHECKING:
from _typeshed import StrPath
_ProjectEntityBase, _ProjectRepoBase = build_model_bases( _ProjectEntityBase, _ProjectRepoBase = build_model_bases(
models.ProjectRead, apis.ProjectsApi, api_member_name="projects_api" models.ProjectRead, apis.ProjectsApi, api_member_name="projects_api"
) )
@ -34,7 +37,7 @@ class Project(
def import_dataset( def import_dataset(
self, self,
format_name: str, format_name: str,
filename: str, filename: StrPath,
*, *,
status_check_period: Optional[int] = None, status_check_period: Optional[int] = None,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
@ -43,6 +46,8 @@ class Project(
Import dataset for a project in the specified format (e.g. 'YOLO ZIP 1.0'). Import dataset for a project in the specified format (e.g. 'YOLO ZIP 1.0').
""" """
filename = Path(filename)
DatasetUploader(self._client).upload_file_and_wait( DatasetUploader(self._client).upload_file_and_wait(
self.api.create_dataset_endpoint, self.api.create_dataset_endpoint,
filename, filename,
@ -57,7 +62,7 @@ class Project(
def export_dataset( def export_dataset(
self, self,
format_name: str, format_name: str,
filename: str, filename: StrPath,
*, *,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
status_check_period: Optional[int] = None, status_check_period: Optional[int] = None,
@ -66,6 +71,9 @@ class Project(
""" """
Download annotations for a project in the specified format (e.g. 'YOLO ZIP 1.0'). Download annotations for a project in the specified format (e.g. 'YOLO ZIP 1.0').
""" """
filename = Path(filename)
if include_images: if include_images:
endpoint = self.api.retrieve_dataset_endpoint endpoint = self.api.retrieve_dataset_endpoint
else: else:
@ -84,7 +92,7 @@ class Project(
def download_backup( def download_backup(
self, self,
filename: str, filename: StrPath,
*, *,
status_check_period: int = None, status_check_period: int = None,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
@ -93,6 +101,8 @@ class Project(
Download a project backup Download a project backup
""" """
filename = Path(filename)
Downloader(self._client).prepare_and_download_file_from_endpoint( Downloader(self._client).prepare_and_download_file_from_endpoint(
self.api.retrieve_backup_endpoint, self.api.retrieve_backup_endpoint,
filename=filename, filename=filename,
@ -148,7 +158,7 @@ class ProjectsRepo(
def create_from_backup( def create_from_backup(
self, self,
filename: str, filename: StrPath,
*, *,
status_check_period: int = None, status_check_period: int = None,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
@ -156,10 +166,13 @@ class ProjectsRepo(
""" """
Import a project from a backup file Import a project from a backup file
""" """
filename = Path(filename)
if status_check_period is None: if status_check_period is None:
status_check_period = self.config.status_check_period status_check_period = self.config.status_check_period
params = {"filename": osp.basename(filename)} params = {"filename": filename.name}
url = self.api_map.make_endpoint_url(self.api.create_backup_endpoint.path) url = self.api_map.make_endpoint_url(self.api.create_backup_endpoint.path)
uploader = Uploader(self) uploader = Uploader(self)

@ -7,10 +7,9 @@ from __future__ import annotations
import io import io
import json import json
import mimetypes import mimetypes
import os
import os.path as osp
import shutil import shutil
from enum import Enum from enum import Enum
from pathlib import Path
from time import sleep from time import sleep
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence
@ -34,7 +33,7 @@ from cvat_sdk.core.uploading import AnnotationUploader, DataUploader, Uploader
from cvat_sdk.core.utils import filter_dict from cvat_sdk.core.utils import filter_dict
if TYPE_CHECKING: if TYPE_CHECKING:
from _typeshed import SupportsWrite from _typeshed import StrPath, SupportsWrite
class ResourceType(Enum): class ResourceType(Enum):
@ -67,7 +66,7 @@ class Task(
def upload_data( def upload_data(
self, self,
resource_type: ResourceType, resource_type: ResourceType,
resources: Sequence[str], resources: Sequence[StrPath],
*, *,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
params: Optional[Dict[str, Any]] = None, params: Optional[Dict[str, Any]] = None,
@ -77,15 +76,8 @@ class Task(
""" """
params = params or {} params = params or {}
data = {} data = {"image_quality": 70}
if resource_type is ResourceType.LOCAL:
pass # handled later
elif resource_type is ResourceType.REMOTE:
data["remote_files"] = resources
elif resource_type is ResourceType.SHARE:
data["server_files"] = resources
data["image_quality"] = 70
data.update( data.update(
filter_dict( filter_dict(
params, params,
@ -105,6 +97,15 @@ class Task(
data["frame_filter"] = f"step={params.get('frame_step')}" data["frame_filter"] = f"step={params.get('frame_step')}"
if resource_type in [ResourceType.REMOTE, ResourceType.SHARE]: if resource_type in [ResourceType.REMOTE, ResourceType.SHARE]:
for resource in resources:
if not isinstance(resource, str):
raise TypeError(f"resources: expected instances of str, got {type(resource)}")
if resource_type is ResourceType.REMOTE:
data["remote_files"] = resources
elif resource_type is ResourceType.SHARE:
data["server_files"] = resources
self.api.create_data( self.api.create_data(
self.id, self.id,
data_request=models.DataRequest(**data), data_request=models.DataRequest(**data),
@ -114,12 +115,14 @@ class Task(
self.api.create_data_endpoint.path, kwsub={"id": self.id} self.api.create_data_endpoint.path, kwsub={"id": self.id}
) )
DataUploader(self._client).upload_files(url, resources, pbar=pbar, **data) DataUploader(self._client).upload_files(
url, list(map(Path, resources)), pbar=pbar, **data
)
def import_annotations( def import_annotations(
self, self,
format_name: str, format_name: str,
filename: str, filename: StrPath,
*, *,
status_check_period: Optional[int] = None, status_check_period: Optional[int] = None,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
@ -128,6 +131,8 @@ class Task(
Upload annotations for a task in the specified format (e.g. 'YOLO ZIP 1.0'). Upload annotations for a task in the specified format (e.g. 'YOLO ZIP 1.0').
""" """
filename = Path(filename)
AnnotationUploader(self._client).upload_file_and_wait( AnnotationUploader(self._client).upload_file_and_wait(
self.api.create_annotations_endpoint, self.api.create_annotations_endpoint,
filename, filename,
@ -178,7 +183,7 @@ class Task(
self, self,
frame_ids: Sequence[int], frame_ids: Sequence[int],
*, *,
outdir: str = "", outdir: StrPath = ".",
quality: str = "original", quality: str = "original",
filename_pattern: str = "frame_{frame_id:06d}{frame_ext}", filename_pattern: str = "frame_{frame_id:06d}{frame_ext}",
) -> Optional[List[Image.Image]]: ) -> Optional[List[Image.Image]]:
@ -186,7 +191,9 @@ class Task(
Download the requested frame numbers for a task and save images as outdir/filename_pattern Download the requested frame numbers for a task and save images as outdir/filename_pattern
""" """
# TODO: add arg descriptions in schema # TODO: add arg descriptions in schema
os.makedirs(outdir, exist_ok=True)
outdir = Path(outdir)
outdir.mkdir(exist_ok=True)
for frame_id in frame_ids: for frame_id in frame_ids:
frame_bytes = self.get_frame(frame_id, quality=quality) frame_bytes = self.get_frame(frame_id, quality=quality)
@ -202,12 +209,12 @@ class Task(
im_ext = ".jpg" im_ext = ".jpg"
outfile = filename_pattern.format(frame_id=frame_id, frame_ext=im_ext) outfile = filename_pattern.format(frame_id=frame_id, frame_ext=im_ext)
im.save(osp.join(outdir, outfile)) im.save(outdir / outfile)
def export_dataset( def export_dataset(
self, self,
format_name: str, format_name: str,
filename: str, filename: StrPath,
*, *,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
status_check_period: Optional[int] = None, status_check_period: Optional[int] = None,
@ -216,6 +223,9 @@ class Task(
""" """
Download annotations for a task in the specified format (e.g. 'YOLO ZIP 1.0'). Download annotations for a task in the specified format (e.g. 'YOLO ZIP 1.0').
""" """
filename = Path(filename)
if include_images: if include_images:
endpoint = self.api.retrieve_dataset_endpoint endpoint = self.api.retrieve_dataset_endpoint
else: else:
@ -234,7 +244,7 @@ class Task(
def download_backup( def download_backup(
self, self,
filename: str, filename: StrPath,
*, *,
status_check_period: int = None, status_check_period: int = None,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
@ -243,6 +253,8 @@ class Task(
Download a task backup Download a task backup
""" """
filename = Path(filename)
Downloader(self._client).prepare_and_download_file_from_endpoint( Downloader(self._client).prepare_and_download_file_from_endpoint(
self.api.retrieve_backup_endpoint, self.api.retrieve_backup_endpoint,
filename=filename, filename=filename,
@ -370,7 +382,7 @@ class TasksRepo(
def create_from_backup( def create_from_backup(
self, self,
filename: str, filename: StrPath,
*, *,
status_check_period: int = None, status_check_period: int = None,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
@ -378,10 +390,13 @@ class TasksRepo(
""" """
Import a task from a backup file Import a task from a backup file
""" """
filename = Path(filename)
if status_check_period is None: if status_check_period is None:
status_check_period = self._client.config.status_check_period status_check_period = self._client.config.status_check_period
params = {"filename": osp.basename(filename)} params = {"filename": filename.name}
url = self._client.api_map.make_endpoint_url(self.api.create_backup_endpoint.path) url = self._client.api_map.make_endpoint_url(self.api.create_backup_endpoint.path)
uploader = Uploader(self._client) uploader = Uploader(self._client)
response = uploader.upload_file( response = uploader.upload_file(

@ -5,8 +5,8 @@
from __future__ import annotations from __future__ import annotations
import os import os
import os.path as osp
from contextlib import ExitStack, closing from contextlib import ExitStack, closing
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple
import requests import requests
@ -144,7 +144,7 @@ class Uploader:
def upload_file( def upload_file(
self, self,
url: str, url: str,
filename: str, filename: Path,
*, *,
meta: Dict[str, Any], meta: Dict[str, Any],
query_params: Dict[str, Any] = None, query_params: Dict[str, Any] = None,
@ -207,15 +207,15 @@ class Uploader:
) )
def _split_files_by_requests( def _split_files_by_requests(
self, filenames: List[str] self, filenames: List[Path]
) -> Tuple[List[Tuple[List[str], int]], List[str], int]: ) -> Tuple[List[Tuple[List[Path], int]], List[Path], int]:
bulk_files: Dict[str, int] = {} bulk_files: Dict[str, int] = {}
separate_files: Dict[str, int] = {} separate_files: Dict[str, int] = {}
# sort by size # sort by size
for filename in filenames: for filename in filenames:
filename = os.path.abspath(filename) filename = filename.resolve()
file_size = os.stat(filename).st_size file_size = filename.stat().st_size
if MAX_REQUEST_SIZE < file_size: if MAX_REQUEST_SIZE < file_size:
separate_files[filename] = file_size separate_files[filename] = file_size
else: else:
@ -252,7 +252,7 @@ class Uploader:
return _MyTusUploader(client=client, api_client=api_client, **kwargs) return _MyTusUploader(client=client, api_client=api_client, **kwargs)
def _upload_file_data_with_tus(self, url, filename, *, meta=None, pbar=None, logger=None): def _upload_file_data_with_tus(self, url, filename, *, meta=None, pbar=None, logger=None):
file_size = os.stat(filename).st_size file_size = filename.stat().st_size
if pbar is None: if pbar is None:
pbar = NullProgressReporter() pbar = NullProgressReporter()
@ -299,7 +299,7 @@ class AnnotationUploader(Uploader):
def upload_file_and_wait( def upload_file_and_wait(
self, self,
endpoint: Endpoint, endpoint: Endpoint,
filename: str, filename: Path,
format_name: str, format_name: str,
*, *,
url_params: Optional[Dict[str, Any]] = None, url_params: Optional[Dict[str, Any]] = None,
@ -307,7 +307,7 @@ class AnnotationUploader(Uploader):
status_check_period: Optional[int] = None, status_check_period: Optional[int] = None,
): ):
url = self._client.api_map.make_endpoint_url(endpoint.path, kwsub=url_params) url = self._client.api_map.make_endpoint_url(endpoint.path, kwsub=url_params)
params = {"format": format_name, "filename": osp.basename(filename)} params = {"format": format_name, "filename": filename.name}
self.upload_file( self.upload_file(
url, filename, pbar=pbar, query_params=params, meta={"filename": params["filename"]} url, filename, pbar=pbar, query_params=params, meta={"filename": params["filename"]}
) )
@ -326,7 +326,7 @@ class DatasetUploader(Uploader):
def upload_file_and_wait( def upload_file_and_wait(
self, self,
endpoint: Endpoint, endpoint: Endpoint,
filename: str, filename: Path,
format_name: str, format_name: str,
*, *,
url_params: Optional[Dict[str, Any]] = None, url_params: Optional[Dict[str, Any]] = None,
@ -334,7 +334,7 @@ class DatasetUploader(Uploader):
status_check_period: Optional[int] = None, status_check_period: Optional[int] = None,
): ):
url = self._client.api_map.make_endpoint_url(endpoint.path, kwsub=url_params) url = self._client.api_map.make_endpoint_url(endpoint.path, kwsub=url_params)
params = {"format": format_name, "filename": osp.basename(filename)} params = {"format": format_name, "filename": filename.name}
self.upload_file( self.upload_file(
url, filename, pbar=pbar, query_params=params, meta={"filename": params["filename"]} url, filename, pbar=pbar, query_params=params, meta={"filename": params["filename"]}
) )
@ -353,7 +353,7 @@ class DataUploader(Uploader):
def upload_files( def upload_files(
self, self,
url: str, url: str,
resources: List[str], resources: List[Path],
*, *,
pbar: Optional[ProgressReporter] = None, pbar: Optional[ProgressReporter] = None,
**kwargs, **kwargs,
@ -370,7 +370,7 @@ class DataUploader(Uploader):
files = {} files = {}
for i, filename in enumerate(group): for i, filename in enumerate(group):
files[f"client_files[{i}]"] = ( files[f"client_files[{i}]"] = (
filename, os.fspath(filename),
es.enter_context(closing(open(filename, "rb"))).read(), es.enter_context(closing(open(filename, "rb"))).read(),
) )
response = self._client.api_client.rest_client.POST( response = self._client.api_client.rest_client.POST(
@ -392,7 +392,7 @@ class DataUploader(Uploader):
self._upload_file_data_with_tus( self._upload_file_data_with_tus(
url, url,
filename, filename,
meta={"filename": osp.basename(filename)}, meta={"filename": filename.name},
pbar=pbar, pbar=pbar,
logger=self._client.logger.debug, logger=self._client.logger.debug,
) )

@ -65,7 +65,7 @@ class TestCLI:
backup_path = self.tmp_path / "backup.zip" backup_path = self.tmp_path / "backup.zip"
fxt_new_task.import_annotations("COCO 1.0", filename=fxt_coco_file) fxt_new_task.import_annotations("COCO 1.0", filename=fxt_coco_file)
fxt_new_task.download_backup(str(backup_path)) fxt_new_task.download_backup(backup_path)
yield backup_path yield backup_path
@ -79,7 +79,7 @@ class TestCLI:
"labels": [{"name": "car"}, {"name": "person"}], "labels": [{"name": "car"}, {"name": "person"}],
}, },
resource_type=ResourceType.LOCAL, resource_type=ResourceType.LOCAL,
resources=list(map(os.fspath, files)), resources=files,
) )
return task return task

@ -42,7 +42,7 @@ class TestIssuesUsecases:
"labels": [{"name": "car"}, {"name": "person"}], "labels": [{"name": "car"}, {"name": "person"}],
}, },
resource_type=ResourceType.LOCAL, resource_type=ResourceType.LOCAL,
resources=[str(fxt_image_file)], resources=[fxt_image_file],
data_params={"image_quality": 80}, data_params={"image_quality": 80},
) )
@ -162,7 +162,7 @@ class TestCommentsUsecases:
"labels": [{"name": "car"}, {"name": "person"}], "labels": [{"name": "car"}, {"name": "person"}],
}, },
resource_type=ResourceType.LOCAL, resource_type=ResourceType.LOCAL,
resources=[str(fxt_image_file)], resources=[fxt_image_file],
data_params={"image_quality": 80}, data_params={"image_quality": 80},
) )

@ -3,7 +3,6 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
import io import io
import os
from logging import Logger from logging import Logger
from pathlib import Path from pathlib import Path
from typing import Tuple from typing import Tuple
@ -46,7 +45,7 @@ class TestJobUsecases:
"labels": [{"name": "car"}, {"name": "person"}], "labels": [{"name": "car"}, {"name": "person"}],
}, },
resource_type=ResourceType.LOCAL, resource_type=ResourceType.LOCAL,
resources=[str(fxt_image_file)], resources=[fxt_image_file],
data_params={"image_quality": 80}, data_params={"image_quality": 80},
) )
@ -108,7 +107,7 @@ class TestJobUsecases:
job = self.client.jobs.retrieve(job_id) job = self.client.jobs.retrieve(job_id)
job.export_dataset( job.export_dataset(
format_name="CVAT for images 1.1", format_name="CVAT for images 1.1",
filename=os.fspath(path), filename=path,
pbar=pbar, pbar=pbar,
include_images=include_images, include_images=include_images,
) )
@ -135,7 +134,7 @@ class TestJobUsecases:
fxt_new_task.get_jobs()[0].download_frames( fxt_new_task.get_jobs()[0].download_frames(
[0], [0],
quality=quality, quality=quality,
outdir=str(self.tmp_path), outdir=self.tmp_path,
filename_pattern="frame-{frame_id}{frame_ext}", filename_pattern="frame-{frame_id}{frame_ext}",
) )
@ -147,7 +146,7 @@ class TestJobUsecases:
pbar = make_pbar(file=pbar_out) pbar = make_pbar(file=pbar_out)
fxt_new_task.get_jobs()[0].import_annotations( fxt_new_task.get_jobs()[0].import_annotations(
format_name="COCO 1.0", filename=str(fxt_coco_file), pbar=pbar format_name="COCO 1.0", filename=fxt_coco_file, pbar=pbar
) )
assert "uploaded" in self.logger_stream.getvalue() assert "uploaded" in self.logger_stream.getvalue()

@ -4,7 +4,6 @@
import io import io
import json import json
import os
import zipfile import zipfile
from logging import Logger from logging import Logger
from pathlib import Path from pathlib import Path
@ -48,7 +47,7 @@ class TestTaskUsecases:
backup_path = self.tmp_path / "backup.zip" backup_path = self.tmp_path / "backup.zip"
fxt_new_task.import_annotations("COCO 1.0", filename=fxt_coco_file) fxt_new_task.import_annotations("COCO 1.0", filename=fxt_coco_file)
fxt_new_task.download_backup(str(backup_path)) fxt_new_task.download_backup(backup_path)
yield backup_path yield backup_path
@ -60,7 +59,7 @@ class TestTaskUsecases:
"labels": [{"name": "car"}, {"name": "person"}], "labels": [{"name": "car"}, {"name": "person"}],
}, },
resource_type=ResourceType.LOCAL, resource_type=ResourceType.LOCAL,
resources=[str(fxt_image_file)], resources=[fxt_image_file],
data_params={"image_quality": 80}, data_params={"image_quality": 80},
) )
@ -113,9 +112,8 @@ class TestTaskUsecases:
task_files = generate_image_files(7) task_files = generate_image_files(7)
for i, f in enumerate(task_files): for i, f in enumerate(task_files):
fname = self.tmp_path / f.name fname = self.tmp_path / f.name
with fname.open("wb") as fd: fname.write_bytes(f.getvalue())
fd.write(f.getvalue()) task_files[i] = fname
task_files[i] = str(fname)
task = self.client.tasks.create_from_data( task = self.client.tasks.create_from_data(
spec=task_spec, spec=task_spec,
@ -184,7 +182,7 @@ class TestTaskUsecases:
task = self.client.tasks.create_from_data( task = self.client.tasks.create_from_data(
spec=task_spec, spec=task_spec,
resource_type=ResourceType.LOCAL, resource_type=ResourceType.LOCAL,
resources=[str(fxt_image_file)], resources=[fxt_image_file],
pbar=pbar, pbar=pbar,
dataset_repository_url=repository_url, dataset_repository_url=repository_url,
) )
@ -256,7 +254,7 @@ class TestTaskUsecases:
task = self.client.tasks.retrieve(task_id) task = self.client.tasks.retrieve(task_id)
task.export_dataset( task.export_dataset(
format_name="CVAT for images 1.1", format_name="CVAT for images 1.1",
filename=os.fspath(path), filename=path,
pbar=pbar, pbar=pbar,
include_images=include_images, include_images=include_images,
) )
@ -272,7 +270,7 @@ class TestTaskUsecases:
task_id = fxt_new_task.id task_id = fxt_new_task.id
path = self.tmp_path / f"task_{task_id}-backup.zip" path = self.tmp_path / f"task_{task_id}-backup.zip"
task = self.client.tasks.retrieve(task_id) task = self.client.tasks.retrieve(task_id)
task.download_backup(filename=os.fspath(path), pbar=pbar) task.download_backup(filename=path, pbar=pbar)
assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1] assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1]
assert path.is_file() assert path.is_file()
@ -296,7 +294,7 @@ class TestTaskUsecases:
fxt_new_task.download_frames( fxt_new_task.download_frames(
[0], [0],
quality=quality, quality=quality,
outdir=str(self.tmp_path), outdir=self.tmp_path,
filename_pattern="frame-{frame_id}{frame_ext}", filename_pattern="frame-{frame_id}{frame_ext}",
) )
@ -319,9 +317,7 @@ class TestTaskUsecases:
pbar_out = io.StringIO() pbar_out = io.StringIO()
pbar = make_pbar(file=pbar_out) pbar = make_pbar(file=pbar_out)
fxt_new_task.import_annotations( fxt_new_task.import_annotations(format_name="COCO 1.0", filename=fxt_coco_file, pbar=pbar)
format_name="COCO 1.0", filename=str(fxt_coco_file), pbar=pbar
)
assert "uploaded" in self.logger_stream.getvalue() assert "uploaded" in self.logger_stream.getvalue()
assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1] assert "100%" in pbar_out.getvalue().strip("\r").split("\r")[-1]
@ -331,7 +327,7 @@ class TestTaskUsecases:
pbar_out = io.StringIO() pbar_out = io.StringIO()
pbar = make_pbar(file=pbar_out) pbar = make_pbar(file=pbar_out)
task = self.client.tasks.create_from_backup(str(fxt_backup_file), pbar=pbar) task = self.client.tasks.create_from_backup(fxt_backup_file, pbar=pbar)
assert task.id assert task.id
assert task.id != fxt_new_task.id assert task.id != fxt_new_task.id

Loading…
Cancel
Save