def save_file(self, content, filename, size, failed=False): """Save file with given content in deposit bucket. If downloading a content failed, file will be still created, with tag `failed`. :param content: stream :param filename: name that file will be saved with :param size: size of content :param failed: if failed during downloading the content """ obj = ObjectVersion.create(bucket=self.files.bucket, key=filename) obj.file = FileInstance.create() self.files.flush() if not failed: self.files[filename].file.set_contents( content, default_location=self.files.bucket.location.uri, size=size) print('File {} saved ({}b).\n'.format(filename, size)) else: ObjectVersionTag.create(object_version=obj, key='status', value='failed') print('File {} not saved.\n'.format(filename)) self.files.flush() db.session.commit() return obj
def _init_object_version(event): """Create, if doesn't exists, the version object.""" event_id = str(event.id) with db.session.begin_nested(): # create a object version if doesn't exists if 'version_id' in event.payload: version_id = event.payload['version_id'] object_version = as_object_version(version_id) else: object_version = ObjectVersion.create( bucket=event.payload['bucket_id'], key=event.payload['key']) ObjectVersionTag.create(object_version, 'uri_origin', event.payload['uri']) version_id = str(object_version.version_id) # add tag with corresponding event ObjectVersionTag.create_or_update(object_version, '_event_id', event_id) # add tag for preview ObjectVersionTag.create_or_update(object_version, 'preview', True) # add tags for file type ObjectVersionTag.create_or_update(object_version, 'media_type', 'video') ObjectVersionTag.create_or_update(object_version, 'context_type', 'master') event.response['version_id'] = version_id return object_version
def _resolve_file(cls, bucket, file_): """Resolve file.""" def progress_callback(size, total): logging.debug('Moving file {0} of {1}'.format(total, size)) # resolve preset info tags_to_guess_preset = file_.get('tags_to_guess_preset', {}) if tags_to_guess_preset: file_['tags'].update( **cls._resolve_preset(obj=None, clues=tags_to_guess_preset)) # we cannot deal with it now delete the file if 'preset_quality' not in file_['tags']: return None # create object stream, size = cls._get_migration_file_stream_and_size(file_=file_) obj = ObjectVersion.create(bucket=bucket, key=file_['key'], stream=stream, size=size, progress_callback=progress_callback) tags_to_transform = file_.get('tags_to_transform', {}) # resolve timestamp if 'timestamp' in tags_to_transform: file_['tags']['timestamp'] = tags_to_transform['timestamp'] # Add DFS path to run ffmpeg without copying the file file_['tags']['dfs_path'] = cls._get_full_path( filepath=file_['filepath']) # create tags for key, value in file_.get('tags', {}).items(): ObjectVersionTag.create(obj, key, value) db.session.commit() return obj.version_id
def create_slave(key): """Create a slave.""" slave = ObjectVersion.create(bucket=bucket_id, key=key, stream=open(video, 'rb')) ObjectVersionTag.create(slave, 'master', str(master_obj.version_id)) return slave
def test_tag_manager_update(api, users, location, es, update_style): with api.test_request_context(): bucket = Bucket.create() object_version = ObjectVersion.create(bucket=bucket, key="hello") ObjectVersionTag.create( object_version=object_version, key=ObjectTagKey.Packaging.value, value="old-packaging", ) ObjectVersionTag.create( object_version=object_version, key=ObjectTagKey.MetadataFormat.value, value="old-metadata", ) tags = TagManager(object_version) assert ( ObjectVersionTag.query.filter_by(object_version=object_version).count() == 2 ) assert tags == { ObjectTagKey.Packaging: "old-packaging", ObjectTagKey.MetadataFormat: "old-metadata", } if update_style == "dict": tags.update( { ObjectTagKey.MetadataFormat: "new-metadata", ObjectTagKey.DerivedFrom: "new-derived-from", } ) elif update_style == "kwargs": tags.update( **{ ObjectTagKey.MetadataFormat.value: "new-metadata", ObjectTagKey.DerivedFrom.value: "new-derived-from", } ) assert tags == { ObjectTagKey.Packaging: "old-packaging", ObjectTagKey.MetadataFormat: "new-metadata", ObjectTagKey.DerivedFrom: "new-derived-from", } assert ( ObjectVersionTag.query.filter_by(object_version=object_version).count() == 3 ) db.session.refresh(object_version) assert object_version.get_tags() == { ObjectTagKey.Packaging.value: "old-packaging", ObjectTagKey.MetadataFormat.value: "new-metadata", ObjectTagKey.DerivedFrom.value: "new-derived-from", }
def test_put_fileset_url(api, users, location, es, task_delay): with api.test_request_context(), api.test_client() as client: client.post( url_for_security("login"), data={ "email": users[0]["email"], "password": "******" }, ) record = SWORDDeposit.create({}) record.commit() object_version = ObjectVersion.create( record.bucket, key="old-file.txt", stream=io.BytesIO(b"hello"), mimetype="text/plain", ) ObjectVersionTag.create( object_version=object_version, key=ObjectTagKey.FileSetFile.value, value="true", ) db.session.commit() response = client.put( url_for("invenio_sword.depid_fileset", pid_value=record.pid.pid_value), data=b"hello again", headers={ "Content-Disposition": "attachment; filename=new-file.txt", "Content-Type": "text/plain", }, ) assert response.status_code == HTTPStatus.NO_CONTENT assert task_delay.call_count == 1 task_self = task_delay.call_args[0][0] task_self.apply() # Check original ObjectVersion is marked deleted original_object_versions = list( ObjectVersion.query.filter_by( bucket=record.bucket, key="old-file.txt").order_by("created")) assert len(original_object_versions) == 2 assert not original_object_versions[0].is_head assert original_object_versions[1].is_head assert original_object_versions[1].file is None # Check new object has been created new_object_version = ObjectVersion.query.filter_by( bucket=record.bucket, key="new-file.txt").one() assert new_object_version.is_head
def _resolve_master_tag(cls, deposit): """Create the master tag for dependent files.""" # build a partial files dump cls._resolve_dumps(record=deposit) # get master master_video = CDSVideosFilesIterator.get_master_video_file(deposit) # get deposit bucket bucket = cls._get_bucket(record=deposit) # attach the master tag for obj in ObjectVersion.get_by_bucket(bucket=bucket): if obj.get_tags()['context_type'] in cls.dependent_objs: ObjectVersionTag.create(obj, 'master', master_video['version_id'])
def test_object_version_tags(app, db, dummy_location): """Test object version tags.""" f = FileInstance(uri="f1", size=1, checksum="mychecksum") db.session.add(f) db.session.commit() b = Bucket.create() obj1 = ObjectVersion.create(b, "test").set_file(f) ObjectVersionTag.create(obj1, "mykey", "testvalue") ObjectVersionTag.create(obj1, "another_key", "another value") db.session.commit() # Duplicate key pytest.raises( IntegrityError, ObjectVersionTag.create, obj1, "mykey", "newvalue") # Test get assert ObjectVersionTag.query.count() == 2 assert ObjectVersionTag.get(obj1, "mykey").value == "testvalue" assert ObjectVersionTag.get_value(obj1.version_id, "another_key") \ == "another value" assert ObjectVersionTag.get_value(obj1, "invalid") is None # Test delete ObjectVersionTag.delete(obj1, "mykey") assert ObjectVersionTag.query.count() == 1 ObjectVersionTag.delete(obj1, "invalid") assert ObjectVersionTag.query.count() == 1 # Create or update ObjectVersionTag.create_or_update(obj1, "another_key", "newval") ObjectVersionTag.create_or_update(obj1.version_id, "newkey", "testval") db.session.commit() assert ObjectVersionTag.get_value(obj1, "another_key") == "newval" assert ObjectVersionTag.get_value(obj1, "newkey") == "testval" # Get tags as dictionary assert obj1.get_tags() == dict(another_key="newval", newkey="testval") obj2 = ObjectVersion.create(b, 'test2') assert obj2.get_tags() == dict() # Copy object version obj_copy = obj1.copy() db.session.commit() assert obj_copy.get_tags() == dict(another_key="newval", newkey="testval") assert ObjectVersionTag.query.count() == 4 # Cascade delete ObjectVersion.query.delete() db.session.commit() assert ObjectVersionTag.query.count() == 0
def _upload_video_and_publish(video): bucket_id = video['_buckets']['deposit'] random_file_content = 'fake video file ' + _random_string() random_bytes = random_file_content.encode('utf-8') video_file = ObjectVersion.create(bucket=bucket_id, key='master.mp4', stream=BytesIO(random_bytes)) ObjectVersionTag.create(video_file, 'context_type', 'master') prepare_videos_for_publish([video]) published_video = video.publish() (_, record_published_video) = published_video.fetch_published() return published_video, record_published_video
def video_extract_frames(self, object_version, frames_start=5, frames_end=95, frames_gap=1, **kwargs): """Extract images from some frames of the video. Each of the frame images generates an ``ObjectVersion`` tagged as "frame" using ``ObjectVersionTags``. :param object_version: master video to extract frames from. :param frames_start: start percentage, default 5. :param frames_end: end percentage, default 95. :param frames_gap: percentage between frames from start to end, default 10. """ object_version = as_object_version(object_version) self._base_payload = dict() input_file = object_version.file.uri output_folder = tempfile.mkdtemp() def progress_updater(seconds, duration): """Progress reporter.""" meta = dict( payload=dict( size=duration, percentage=seconds or 0.0 / duration * 100, ), message='Extracting frames {0} of {1} seconds'.format(seconds, duration), ) self.update_state(state=STARTED, meta=meta) ff_frames( object_version.file.uri, frames_start, frames_end, frames_gap, os.path.join(output_folder, 'frame-%d.jpg'), progress_callback=progress_updater) for filename in os.listdir(output_folder): obj = ObjectVersion.create( bucket=object_version.bucket, key=filename, stream=open(os.path.join(output_folder, filename),'rb')) ObjectVersionTag.create(obj, 'master', object_version.version_id) shutil.rmtree(output_folder) db.session.commit()
def transcode_task(bucket, filesize, filename, preset_qualities): """Get a transcode task.""" obj = ObjectVersion.create(bucket, key=filename, stream=BytesIO(b'\x00' * filesize)) ObjectVersionTag.create(obj, 'display_aspect_ratio', '16:9') obj_id = str(obj.version_id) db.session.commit() return (obj_id, [ TranscodeVideoTask().s(version_id=obj_id, preset_quality=preset_quality, sleep_time=0) for preset_quality in preset_qualities ])
def create_video_tags(obj, context_type, bitrate=None, smil=True): """Create video tags.""" tags = [('width', 1000), ('height', 1000), ('bit_rate', 123456), ('video_bitrate', bitrate or 123456), ('media_type', 'video'), ('context_type', context_type), ] # Append smil tag if smil: tags.append(('smil', True)) [ObjectVersionTag.create(obj, key, val) for key, val in tags]
def _resolve_file(cls, deposit, bucket, file_): """Resolve file.""" # create object stream = cls._get_migration_file_stream(file_=file_) obj = ObjectVersion.create( bucket=bucket, key=file_['key'], stream=stream) # resolve preset info tags_to_guess_preset = file_.get('tags_to_guess_preset', {}) if tags_to_guess_preset: file_['tags'].update(**cls._resolve_preset( obj=obj, clues=tags_to_guess_preset)) tags_to_transform = file_.get('tags_to_transform', {}) # resolve timestamp if 'timestamp' in tags_to_transform: file_['tags']['timestamp'] = tags_to_transform['timestamp'] # create tags for key, value in file_.get('tags', {}).items(): ObjectVersionTag.create(obj, key, value)
def test_transcode_and_undo(db, cds_depid, mock_sorenson): """Test TranscodeVideoTask task.""" def get_bucket_keys(): return [o.key for o in list(ObjectVersion.get_by_bucket(bucket))] bucket = deposit_project_resolver(cds_depid).files.bucket filesize = 1024 filename = 'test.mp4' preset_quality = '480p' new_filename = '{0}.mp4'.format(preset_quality) obj = ObjectVersion.create(bucket, key=filename, stream=BytesIO(b'\x00' * filesize)) ObjectVersionTag.create(obj, 'display_aspect_ratio', '16:9') obj_id = str(obj.version_id) db.session.commit() assert get_bucket_keys() == [filename] assert bucket.size == filesize task_s = TranscodeVideoTask().s(version_id=obj_id, preset_quality=preset_quality, sleep_time=0) # Transcode task_s.delay(deposit_id=cds_depid) db.session.add(bucket) keys = get_bucket_keys() assert len(keys) == 2 assert filename in keys assert new_filename in keys assert bucket.size == 2 * filesize # Undo TranscodeVideoTask().clean(version_id=obj_id, preset_quality=preset_quality) db.session.add(bucket) keys = get_bucket_keys() assert len(keys) == 1 assert filename in keys assert new_filename not in keys # file size doesn't change assert bucket.size == 2 * filesize
def test_legacy_embed(previewer_app, db, api_project, video, users): """Test backwards-compatibility with legacy embed URL for videos.""" project, video_1, _ = api_project filename = 'test.mp4' bucket_id = video_1['_buckets']['deposit'] obj = ObjectVersion.create(bucket=bucket_id, key=filename, stream=open(video, 'rb')) ObjectVersionTag.create(obj, 'context_type', 'master') ObjectVersionTag.create(obj, 'preview', True) login_user(User.query.get(users[0])) prepare_videos_for_publish([video_1]) video_1 = video_1.publish() with previewer_app.test_client() as client: res = client.get('/video/{0}'.format(video_1.report_number)) assert res.location.endswith(url_for( 'invenio_records_ui.recid_embed_default', pid_value=video_1['recid'], ))
def create_object(key, media_type, context_type, **tags): """Create object versions with given type and tags.""" obj = ObjectVersion.create(bucket=self.object.bucket, key=key, stream=open(in_output(key), 'rb')) ObjectVersionTag.create(obj, 'master', self.obj_id) ObjectVersionTag.create(obj, 'media_type', media_type) ObjectVersionTag.create(obj, 'context_type', context_type) [ObjectVersionTag.create(obj, k, tags[k]) for k in tags]
def _create_object(cls, bucket, key, stream, size, media_type, context_type, master_id, **tags): """Create object versions with given type and tags.""" obj = ObjectVersion.create( bucket=bucket, key=key, stream=stream, size=size) ObjectVersionTag.create(obj, 'master', str(master_id)) ObjectVersionTag.create(obj, 'media_type', media_type) ObjectVersionTag.create(obj, 'context_type', context_type) [ObjectVersionTag.create(obj, k, tags[k]) for k in tags]
def _process_files(record, files_metadata): """Attach files to a record with a given metadata. Assumptions: - The source must be a URL pointing to a tar file. - All files listed in the metadata are inside the source tar. - Master files are listed before slaves. - The reference from the slave to master is done via key. """ if not files_metadata: return bucket = Bucket.create(location=Location.get_by_name('videos')) RecordsBuckets.create(record=record.model, bucket=bucket) response = requests.get( files_metadata['source'], stream=True, verify=False) # Throw an error for bad status codes response.raise_for_status() with tempfile.NamedTemporaryFile(suffix='.tar', delete=False) as f: for chunk in response: f.write(chunk) tar = tarfile.open(name=f.name) tar.extractall(path=tempfile.gettempdir()) files_base_dir = os.path.join(tempfile.gettempdir(), tar.getnames()[0]) tar.close() os.remove(f.name) for f in files_metadata['metadata']: obj = ObjectVersion.create(bucket, f['key']) with open(os.path.join(files_base_dir, f['key']), 'rb') as fp: obj.set_contents(fp) for k, v in f['tags'].items(): if k == 'master': v = ObjectVersion.get(bucket, v).version_id ObjectVersionTag.create(obj, k, v) shutil.rmtree(files_base_dir) record['_files'] = record.files.dumps()
def _resolve_file(cls, bucket, file_): """Resolve file.""" def progress_callback(size, total): logging.debug('Moving file {0} of {1}'.format(total, size)) # create object stream, size = cls._get_migration_file_stream_and_size(file_=file_) obj = ObjectVersion.create( bucket=bucket, key=file_['key'], stream=stream, size=size, progress_callback=progress_callback) # resolve preset info tags_to_guess_preset = file_.get('tags_to_guess_preset', {}) if tags_to_guess_preset: file_['tags'].update(**cls._resolve_preset( obj=obj, clues=tags_to_guess_preset)) tags_to_transform = file_.get('tags_to_transform', {}) # resolve timestamp if 'timestamp' in tags_to_transform: file_['tags']['timestamp'] = tags_to_transform['timestamp'] # create tags for key, value in file_.get('tags', {}).items(): ObjectVersionTag.create(obj, key, value)
def create_metadata_tags(cls, object_, keys, uri=None): """Extract metadata from the video and create corresponding tags.""" uri = uri or object_.file.uri # Extract video's metadata using `ff_probe` metadata = ff_probe_all(uri) extracted_dict = dict(metadata['format'], **metadata['streams'][0]) # Add technical information to the ObjectVersion as Tags [ ObjectVersionTag.create(object_, k, v) for k, v in extracted_dict.items() if k in keys ] db.session.refresh(object_) return extracted_dict
def move_file_into_local(obj, delete=True): """Move file from XRootD accessed file system into a local path :param obj: Object version to make locally available. :param delete: Whether or not the tmp file should be deleted on exit. """ if os.path.exists(obj.file.uri): yield obj.file.uri # TODO: remove migration hack # Check if we are migrating elif obj.get_tags().get('dfs_path', None): # This is a special situation! yield obj.get_tags().get('dfs_path', None) else: temp_location = obj.get_tags().get('temp_location', None) if not temp_location: temp_folder = tempfile.mkdtemp() temp_location = os.path.join(temp_folder, 'data') with open(temp_location, 'wb') as dst: shutil.copyfileobj(file_opener_xrootd(obj.file.uri, 'rb'), dst) ObjectVersionTag.create(obj, 'temp_location', temp_location) db.session.commit() else: temp_folder = os.path.dirname(temp_location) try: yield temp_location except: shutil.rmtree(temp_folder) ObjectVersionTag.delete(obj, 'temp_location') db.session.commit() raise else: if delete: shutil.rmtree(temp_folder) ObjectVersionTag.delete(obj, 'temp_location') db.session.commit()
def test_put_status_document(api, users, location, es): with api.test_request_context(), api.test_client() as client: client.post( url_for_security("login"), data={ "email": users[0]["email"], "password": "******" }, ) record = SWORDDeposit.create({}) record.commit() db.session.commit() object_version = ObjectVersion.create( record.bucket, "file.n3", mimetype="text/n3", stream=io.BytesIO(b"1 _:a 2 ."), ) ObjectVersionTag.create( object_version=object_version, key=ObjectTagKey.FileSetFile.value, value="true", ) response = client.put("/sword/deposit/{}".format(record.pid.pid_value), data=b"") assert response.status_code == HTTPStatus.OK # This should have removed the previous file, as the empty PUT is a reset. object_versions = list( ObjectVersion.query.filter_by( bucket=record.bucket).order_by("created")) assert len(object_versions) == 2 assert not object_versions[0].is_head assert object_versions[1].is_head assert object_versions[1].file is None
def test_tag_manager_delitem(api, users, location, es): value = "http://example.org/" with api.test_request_context(): bucket = Bucket.create() object_version = ObjectVersion.create(bucket=bucket, key="hello") ObjectVersionTag.create( object_version=object_version, key=ObjectTagKey.Packaging.value, value=value ) tags = TagManager(object_version) assert tags == {ObjectTagKey.Packaging: value} assert tags[ObjectTagKey.Packaging] == value del tags[ObjectTagKey.Packaging] assert tags == {} with pytest.raises(KeyError): _ = tags[ObjectTagKey.Packaging] # We've deleted the database object assert ( ObjectVersionTag.query.filter_by(object_version=object_version).count() == 0 )
def _init_object_version(event): """Create the version object.""" event_id = str(event.id) with db.session.begin_nested(): object_version = ObjectVersion.create( bucket=event.payload['bucket_id'], key=event.payload['key']) ObjectVersionTag.create(object_version, 'uri_origin', event.payload['uri']) ObjectVersionTag.create(object_version, '_event_id', event_id) ObjectVersionTag.create(object_version, 'context_type', 'master') return object_version
def generate_smil_file(record_id, record, bucket, master_object, **kwargs): """Generate SMIL file for Video record (on publish).""" # output_folder = tempfile.mkdtemp() master_object = as_object_version(master_object) # Generate SMIL file master_key = master_object.key smil_key = '{0}.smil'.format(master_key.rsplit('.', 1)[0]) smil_content = SmilSerializer.serialize(record_id, record, **kwargs) # Create ObjectVersion for SMIL file with db.session.begin_nested(): obj = ObjectVersion.create(bucket=bucket, key=smil_key, stream=BytesIO(smil_content.encode())) ObjectVersionTag.create(obj, 'master', str(master_object.version_id)) ObjectVersionTag.create(obj, 'context_type', 'playlist') ObjectVersionTag.create(obj, 'media_type', 'text')
def test_publish_process_files(api_app, db, location): """Test _process_files changing master tags on bucket snapshots.""" deposit = CDSDeposit.create( dict( date='1/2/3', category='cat', type='type', title=dict(title='title'), report_number=['1234'], videos=[]), bucket_location='videos') # deposit has no files, so _process_files must yield None with deposit._process_files(None, dict()) as data: assert data is None bucket = deposit.files.bucket master_obj = ObjectVersion.create( bucket=bucket, key='master', _file_id=FileInstance.create()) number_of_slaves = 10 for i in range(number_of_slaves): slave_obj = ObjectVersion.create( bucket=bucket, key='slave{}.mp4'.format(i + 1), _file_id=FileInstance.create()) ObjectVersionTag.create(slave_obj, 'master', master_obj.version_id) ObjectVersionTag.create(slave_obj, 'media_type', 'video') ObjectVersionTag.create(slave_obj, 'context_type', 'subformat') assert Bucket.query.count() == 1 with deposit._process_files(None, dict()): # the snapshot bucket must have been created assert Bucket.query.count() == 2 for bucket in Bucket.query.all(): master_version = [str(obj.version_id) for obj in bucket.objects if 'master' not in obj.get_tags()][0] # the master of each slave must be in the same bucket for obj in bucket.objects: if str(obj.version_id) != master_version: assert obj.get_tags()['master'] == master_version assert obj.get_tags()['media_type'] == 'video' assert obj.get_tags()['context_type'] == 'subformat'
def test_transcode_too_high_resolutions(db, bucket): """Test trascoding task when it should discard some high resolutions.""" filesize = 1024 filename = 'test.mp4' preset_quality = '480p' obj = ObjectVersion.create(bucket, key=filename, stream=BytesIO(b'\x00' * filesize)) ObjectVersionTag.create(obj, 'display_aspect_ratio', '16:9') ObjectVersionTag.create(obj, 'height', 360) ObjectVersionTag.create(obj, 'width', 640) obj_id = str(obj.version_id) db.session.commit() task_s = TranscodeVideoTask().s(version_id=obj_id, preset_quality=preset_quality, sleep_time=0) # Transcode result = task_s.delay() assert result.status == states.IGNORED
def run(self, preset_quality, sleep_time=5, *args, **kwargs): """Launch video transcoding. For each of the presets generate a new ``ObjectVersion`` tagged as slave with the preset name as key and a link to the master version. :param self: reference to instance of task base class :param preset_quality: preset quality to use for transcoding. :param sleep_time: time interval between requests for the Sorenson status. """ self._base_payload.update(preset_quality=preset_quality) # Get master file's bucket_id bucket_id = self.object.bucket_id bucket_location = self.object.bucket.location.uri # Get master file's key master_key = self.object.key tags = self.object.get_tags() # Get master file's aspect ratio aspect_ratio = tags['display_aspect_ratio'] # Get master file's width x height width = int(tags['width']) if 'width' in tags else None height = int(tags['height']) if 'height' in tags else None with db.session.begin_nested(): # Create FileInstance file_instance = FileInstance.create() # Create ObjectVersion obj_key = self._build_slave_key(preset_quality=preset_quality, master_key=master_key) obj = ObjectVersion.create(bucket=bucket_id, key=obj_key) # Extract new location storage = file_instance.storage(default_location=bucket_location) directory, filename = storage._get_fs() input_file = self.object.file.uri output_file = os.path.join(directory.root_path, filename) try: # Start Sorenson job_id = start_encoding(input_file, output_file, preset_quality, aspect_ratio, max_height=height, max_width=width) except (InvalidResolutionError, TooHighResolutionError) as e: exception = self._meta_exception_envelope(exc=e) self.update_state(state=REVOKED, meta=exception) raise Ignore() # Set revoke handler, in case of an abrupt execution halt. self.set_revoke_handler(partial(stop_encoding, job_id)) # Create ObjectVersionTags ObjectVersionTag.create(obj, 'master', self.obj_id) ObjectVersionTag.create(obj, '_sorenson_job_id', job_id) ObjectVersionTag.create(obj, 'preset_quality', preset_quality) ObjectVersionTag.create(obj, 'media_type', 'video') ObjectVersionTag.create(obj, 'context_type', 'subformat') preset_info = get_preset_info(aspect_ratio, preset_quality) for key, value in preset_info.items(): ObjectVersionTag.create(obj, key, value) # Information necessary for monitoring job_info = dict( preset_quality=preset_quality, job_id=job_id, file_instance=str(file_instance.id), uri=output_file, version_id=str(obj.version_id), key=obj_key, tags=obj.get_tags(), percentage=0, ) db.session.commit() self.update_state(state=STARTED, meta=dict(payload=dict(**job_info), message='Started transcoding.')) status = '' # Monitor job and report accordingly while status != 'Finished': # Get job status status, percentage = get_encoding_status(job_id) if status == 'Error': raise RuntimeError('Error transcoding') job_info['percentage'] = percentage # Update task's state for this preset self.update_state( state=STARTED, meta=dict(payload=dict(**job_info), message='Transcoding {0}'.format(percentage))) time.sleep(sleep_time) # Set file's location, if job has completed self._clean_file_name(output_file) with db.session.begin_nested(): uri = output_file with open(uri, 'rb') as transcoded_file: digest = hashlib.md5(transcoded_file.read()).hexdigest() size = os.path.getsize(uri) checksum = '{0}:{1}'.format('md5', digest) file_instance.set_uri(uri, size, checksum) as_object_version(job_info['version_id']).set_file(file_instance) db.session.commit()
def add_video_tags(video_object): """Add standard technical metadata tags to a video.""" ObjectVersionTag.create(video_object, 'duration', '60.095000') ObjectVersionTag.create(video_object, 'width', '640') ObjectVersionTag.create(video_object, 'height', '360') ObjectVersionTag.create(video_object, 'display_aspect_ratio', '16:9')
def test_video_dumps(db, api_project, video): """Test video dump, in particular file dump.""" (project, video_1, video_2) = api_project bucket_id = video_1['_buckets']['deposit'] obj = ObjectVersion.create(bucket=bucket_id, key='master.mp4', stream=open(video, 'rb')) slave_1 = ObjectVersion.create(bucket=bucket_id, key='slave_1.mp4', stream=open(video, 'rb')) ObjectVersionTag.create(slave_1, 'master', str(obj.version_id)) ObjectVersionTag.create(slave_1, 'media_type', 'video') ObjectVersionTag.create(slave_1, 'context_type', 'subformat') for i in reversed(range(10)): slave = ObjectVersion.create(bucket=bucket_id, key='frame-{0}.jpeg'.format(i), stream=BytesIO(b'\x00' * 1024)) ObjectVersionTag.create(slave, 'master', str(obj.version_id)) ObjectVersionTag.create(slave, 'media_type', 'image') ObjectVersionTag.create(slave, 'context_type', 'frame') db.session.commit() files = video_1.files.dumps() assert len(files) == 1 files = files[0] # only one master file assert 'frame' in files assert [f['key'] for f in files['frame'] ] == ['frame-{}.jpeg'.format(i) for i in range(10)] assert 'subformat' in files assert len(files['subformat']) == 1