main
Maya 5 years ago
parent b97a6cda10
commit a1b32b406d

@ -10,9 +10,9 @@ from django.conf import settings
from cvat.apps.engine.media_extractors import (Mpeg4ChunkWriter,
Mpeg4CompressedChunkWriter, ZipChunkWriter, ZipCompressedChunkWriter)
from cvat.apps.engine.models import DataChoice
from cvat.apps.engine.models import DataChoice, UploadedDataStorageLocationChoice
from cvat.apps.engine.prepare import PrepareInfo
from .log import slogger
class CacheInteraction:
def __init__(self):
@ -31,28 +31,36 @@ class CacheInteraction:
def prepare_chunk_buff(self, db_data, quality, chunk_number):
from cvat.apps.engine.frame_provider import FrameProvider # TODO: remove circular dependency
extractor_classes = {
writer_classes = {
FrameProvider.Quality.COMPRESSED : Mpeg4CompressedChunkWriter if db_data.compressed_chunk_type == DataChoice.VIDEO else ZipCompressedChunkWriter,
FrameProvider.Quality.ORIGINAL : Mpeg4ChunkWriter if db_data.original_chunk_type == DataChoice.VIDEO else ZipChunkWriter,
}
image_quality = 100 if extractor_classes[quality] in [Mpeg4ChunkWriter, ZipChunkWriter] else db_data.image_quality
mime_type = 'video/mp4' if extractor_classes[quality] in [Mpeg4ChunkWriter, Mpeg4CompressedChunkWriter] else 'application/zip'
image_quality = 100 if writer_classes[quality] in [Mpeg4ChunkWriter, ZipChunkWriter] else db_data.image_quality
mime_type = 'video/mp4' if writer_classes[quality] in [Mpeg4ChunkWriter, Mpeg4CompressedChunkWriter] else 'application/zip'
extractor = extractor_classes[quality](image_quality)
extractor = writer_classes[quality](image_quality)
images = []
buff = BytesIO()
if os.path.exists(db_data.get_meta_path()):
source_path = os.path.join(db_data.get_upload_dirname(), db_data.video.path)
meta = PrepareInfo(source_path=source_path, meta_path=db_data.get_meta_path())
for frame in meta.decode_needed_frames(chunk_number, db_data):
images.append(frame)
extractor.save_as_chunk([(image, source_path, None) for image in images], buff)
else:
with open(db_data.get_dummy_chunk_path(chunk_number), 'r') as dummy_file:
images = [os.path.join(db_data.get_upload_dirname(), line.strip()) for line in dummy_file]
extractor.save_as_chunk([(image, image, None) for image in images], buff)
upload_dir = {
UploadedDataStorageLocationChoice.LOCAL: db_data.get_upload_dirname(),
UploadedDataStorageLocationChoice.SHARE: settings.SHARE_ROOT
}[db_data.uploaded_data_storage_location]
try:
if os.path.exists(db_data.get_meta_path()):
source_path = os.path.join(upload_dir, db_data.video.path)
meta = PrepareInfo(source_path=source_path, meta_path=db_data.get_meta_path())
for frame in meta.decode_needed_frames(chunk_number, db_data):
images.append(frame)
extractor.save_as_chunk([(image, source_path, None) for image in images], buff)
else:
with open(db_data.get_dummy_chunk_path(chunk_number), 'r') as dummy_file:
images = [os.path.join(upload_dir, line.strip()) for line in dummy_file]
extractor.save_as_chunk([(image, image, None) for image in images], buff)
except FileNotFoundError as ex:
slogger.glob.exception(f"{ex.strerror} {ex.filename}")
buff.seek(0)
return buff, mime_type

@ -171,12 +171,13 @@ class DataSerializer(serializers.ModelSerializer):
server_files = ServerFileSerializer(many=True, default=[])
remote_files = RemoteFileSerializer(many=True, default=[])
use_cache = serializers.BooleanField(default=False)
copy_data = serializers.BooleanField(default=False)
class Meta:
model = models.Data
fields = ('chunk_size', 'size', 'image_quality', 'start_frame', 'stop_frame', 'frame_filter',
'compressed_chunk_type', 'original_chunk_type', 'client_files', 'server_files', 'remote_files', 'use_zip_chunks',
'use_cache')
'use_cache', 'copy_data')
# pylint: disable=no-self-use
def validate_frame_filter(self, value):
@ -205,6 +206,7 @@ class DataSerializer(serializers.ModelSerializer):
remote_files = validated_data.pop('remote_files')
validated_data.pop('use_zip_chunks')
validated_data.pop('use_cache')
validated_data.pop('copy_data')
db_data = models.Data.objects.create(**validated_data)
data_path = db_data.get_data_dirname()

@ -15,7 +15,7 @@ from urllib import parse as urlparse
from urllib import request as urlrequest
from cvat.apps.engine.media_extractors import get_mime, MEDIA_TYPES, Mpeg4ChunkWriter, ZipChunkWriter, Mpeg4CompressedChunkWriter, ZipCompressedChunkWriter
from cvat.apps.engine.models import DataChoice, StorageMethodChoice
from cvat.apps.engine.models import DataChoice, StorageMethodChoice, UploadedDataStorageLocationChoice as LocationChoice
from cvat.apps.engine.utils import av_scan_paths
from cvat.apps.engine.prepare import prepare_meta
@ -232,7 +232,10 @@ def _create_thread(tid, data):
"File with meta information can be uploaded if 'Use cache' option is also selected"
if data['server_files']:
_copy_data_from_share(data['server_files'], upload_dir)
if db_data.uploaded_data_storage_location == LocationChoice.LOCAL:
_copy_data_from_share(data['server_files'], upload_dir)
else:
upload_dir = settings.SHARE_ROOT
av_scan_paths(upload_dir)
@ -303,10 +306,11 @@ def _create_thread(tid, data):
try:
from cvat.apps.engine.prepare import UploadedMeta
if os.path.split(meta_info_file[0])[0]:
os.replace(
os.path.join(upload_dir, meta_info_file[0]),
db_data.get_meta_path()
)
if db_data.uploaded_data_storage_location is not LocationChoice.SHARE:
os.replace(
os.path.join(upload_dir, meta_info_file[0]),
db_data.get_meta_path()
)
meta_info = UploadedMeta(source_path=os.path.join(upload_dir, media_files[0]),
meta_path=db_data.get_meta_path())
meta_info.check_seek_key_frames()

@ -36,7 +36,9 @@ import cvat.apps.dataset_manager.views # pylint: disable=unused-import
from cvat.apps.authentication import auth
from cvat.apps.dataset_manager.serializers import DatasetFormatsSerializer
from cvat.apps.engine.frame_provider import FrameProvider
from cvat.apps.engine.models import Job, StatusChoice, Task, StorageMethodChoice
from cvat.apps.engine.models import (
Job, StatusChoice, Task, StorageMethodChoice, UploadedDataStorageLocationChoice
)
from cvat.apps.engine.serializers import (
AboutSerializer, AnnotationFileSerializer, BasicUserSerializer,
DataMetaSerializer, DataSerializer, ExceptionSerializer,
@ -398,7 +400,9 @@ class TaskViewSet(auth.TaskGetQuerySetMixin, viewsets.ModelViewSet):
if data['use_cache']:
db_task.data.storage_method = StorageMethodChoice.CACHE
db_task.data.save(update_fields=['storage_method'])
if data['server_files'] and data.get('copy_data') == False:
db_task.data.uploaded_data_storage_location = UploadedDataStorageLocationChoice.SHARE
db_task.data.save(update_fields=['uploaded_data_storage_location'])
# if the value of stop_frame is 0, then inside the function we cannot know
# the value specified by the user or it's default value from the database
if 'stop_frame' not in serializer.validated_data:

Loading…
Cancel
Save