예제 #1
0
def test_fileinstance_get(app, db, dummy_location):
    """Test fileinstance get."""
    f = FileInstance.create()
    db.session.commit()
    # Get existing file.
    assert FileInstance.get(f.id) is not None
    # Non-existing files returns none
    assert FileInstance.get(uuid.uuid4()) is None
예제 #2
0
def test_fileinstance_get(app, db, dummy_location):
    """Test fileinstance get."""
    f = FileInstance.create()
    db.session.commit()
    # Get existing file.
    assert FileInstance.get(f.id) is not None
    # Non-existing files returns none
    assert FileInstance.get(uuid.uuid4()) is None
예제 #3
0
 def _get_frames(cls, master_video):
     """Get Frames."""
     return [
         FileInstance.get(f['file_id']).uri
         for f in CDSVideosFilesIterator.get_video_frames(
             master_file=master_video)
     ]
예제 #4
0
    def delete_record(self, fileinstance_id, record_uuid):
        """Delete a record.

        :param fileinstance_id: The file instance id.
        :param record_uuid: The record's uuid.
        """
        # get the FileInstance object
        file_instance = FileInstance.get(fileinstance_id)
        # get the uri of the file for the directory of the folder
        uri = file_instance.uri
        # building the path to delete by storing the index of the folder data
        i = uri.find('data')

        # removing the record indexing, the record and the file instance
        recind = RecordIndexer()
        recind.delete_by_id(record_uuid=record_uuid)
        self.delete_bucket()
        FileInstance.query.filter_by(id=fileinstance_id).delete()
        PersistentIdentifier.query.filter_by(object_uuid=record_uuid).delete()
        db.session.commit()

        # removing the file on disk and the folder containing it
        # the full path is /home/<user>/.local/share/virtualenvs/
        # fare-platform-<code>/var/instance/data/<f1>/<f2>/<bucketid>/<filename>
        # after have stored the index of the folder "data", where there are all
        # the records, the path is passed to the function below
        # and trimmed at <f1>, a folder name composed by 2 character,
        # at the index "i" is added 8 because is the number of
        # character for completing the path, terminating at "<f1>/"
        shutil.rmtree(uri[:i + 8])

        current_app.logger.info("Deleted file= " + self['title'] +
                                ", by user= " + current_user.email)
    def _verify_file_and_symlink(record, file_id, filename):
        # verify uploaded file exists
        uploaded_file = FileInstance.get(file_id)
        uploaded_file_path = uploaded_file.uri
        assert os.path.exists(uploaded_file_path)

        # verify symlink exists
        symlink_path = _get_symlink_path(record, filename)
        assert os.path.exists(symlink_path)

        # verify symlink points to the correct file
        assert os.path.realpath(symlink_path) == os.path.realpath(
            uploaded_file_path)
예제 #6
0
def delete_file_instance(obj: ObjectVersion):
    """Delete file on filesystem and mark as not readable."""
    current_app.logger.debug(f"Delete file instance: {str(obj)}")

    if obj.file_id:
        f = FileInstance.get(str(obj.file_id))  # type: FileInstance

        is_readable = f.readable

        # Mark file not readable
        f.readable = False

        # Remove the file on disk
        if is_readable:
            f.storage().delete()

    db.session.commit()
def upload_to_zenodo(bucket_id, filename):
    """Upload code to zenodo."""
    zenodo_server_url = current_app.config.get('ZENODO_SERVER_URL')
    params = {"access_token": current_app.config.get(
        'ZENODO_ACCESS_TOKEN')}
    filename = filename + '.tar.gz'

    r = requests.post(zenodo_server_url,
                      params=params, json={},
                      )

    file_obj = ObjectVersion.get(bucket_id, filename)
    file = FileInstance.get(file_obj.file_id)

    bucket_url = r.json()['links']['bucket']
    with open(file.uri, 'rb') as fp:
        response = requests.put(
            bucket_url + '/{}'.format(filename),
            data=fp,
            params=params,
        )

    return jsonify({"status": response.status_code})
def upload_to_zenodo(bucket_id, filename):
    """Upload code to zenodo."""
    zenodo_server_url = current_app.config.get('ZENODO_SERVER_URL')
    params = {"access_token": current_app.config.get('ZENODO_ACCESS_TOKEN')}
    filename = filename + '.tar.gz'

    r = requests.post(
        zenodo_server_url,
        params=params,
        json={},
    )

    file_obj = ObjectVersion.get(bucket_id, filename)
    file = FileInstance.get(file_obj.file_id)

    bucket_url = r.json()['links']['bucket']
    with open(file.uri, 'rb') as fp:
        response = requests.put(
            bucket_url + '/{}'.format(filename),
            data=fp,
            params=params,
        )

    return jsonify({"status": response.status_code})
예제 #9
0
def video_transcode(self,
                    object_version,
                    video_presets=None,
                    sleep_time=5,
                    **kwargs):
    """Launch video transcoding.

    For each of the presets generate a new ``ObjectVersion`` tagged as slave
    with the preset name as key and a link to the master version.

    :param object_version: Master video.
    :param video_presets: List of presets to use for transcoding. If ``None``
        it will use the default values set in ``VIDEO_DEFAULT_PRESETS``.
    :param sleep_time: the time interval between requests for Sorenson status
    """
    object_version = as_object_version(object_version)

    self._base_payload = dict(
        object_version=str(object_version.version_id),
        video_presets=video_presets,
        tags=object_version.get_tags(),
        deposit_id=kwargs.get('deposit_id', None),
        event_id=kwargs.get('event_id', None),
    )

    job_ids = deque()
    # Set handler for canceling all jobs
    def handler(signum, frame):
        # TODO handle better file deleting and ObjectVersion cleaning
        map(lambda _info: stop_encoding(info['job_id']), job_ids)
    signal.signal(signal.SIGTERM, handler)

    # Get master file's bucket_id
    bucket_id = object_version.bucket_id
    bucket_location = object_version.bucket.location.uri

    preset_config = current_app.config['CDS_SORENSON_PRESETS']
    for preset in video_presets or preset_config.keys():
        with db.session.begin_nested():
            # Create FileInstance and get generated UUID
            file_instance = FileInstance.create()
            # Create ObjectVersion
            base_name = object_version.key.rsplit('.', 1)[0]
            new_extension = preset_config[preset][1]
            obj = ObjectVersion.create(
                bucket=bucket_id,
                key='{0}-{1}{2}'.format(base_name, preset, new_extension)
            )
            obj.set_file(file_instance)
            ObjectVersionTag.create(
                obj, 'master', str(object_version.version_id))
            ObjectVersionTag.create(obj, 'preset', preset)

            # Extract new location
            storage = file_instance.storage(default_location=bucket_location)
            directory, filename = storage._get_fs()

            # Start Sorenson
            input_file = object_version.file.uri
            output_file = os.path.join(directory.root_path, filename)

            job_id = start_encoding(input_file, preset, output_file)
            ObjectVersionTag.create(obj, '_sorenson_job_id', job_id)
            job_info = dict(
                preset=preset,
                job_id=job_id,
                file_instance=str(file_instance.id),
                uri=output_file,
                object_version=str(obj.version_id),
                key=obj.key,
                tags=obj.get_tags(),
            )
        db.session.commit()

        self.update_state(
            state=STARTED,
            meta=dict(
                payload=dict(job_info=job_info),
                message='Started transcoding.'
            )
        )
        job_ids.append(job_info)

    # Monitor jobs and report accordingly
    while job_ids:
        info = job_ids.popleft()

        # Get job status
        status = get_encoding_status(info['job_id'])['Status']
        percentage = 100 if status['TimeFinished'] else status['Progress']
        info['percentage'] = percentage

        # Update task's state for each individual preset
        self.update_state(
            state=STARTED,
            meta=dict(
                payload=dict(job_info=job_info),
                message='Transcoding {0}'.format(percentage),
            )
        )

        # Set file's location for completed jobs
        if percentage == 100:
            with db.session.begin_nested():
                uri = info['uri']
                with open(uri, 'rb') as transcoded_file:
                    digest = hashlib.md5(transcoded_file.read()).hexdigest()
                size = os.path.getsize(uri)
                checksum = '{0}:{1}'.format('md5', digest)
                FileInstance.get(
                    info['file_instance']).set_uri(uri, size, checksum)
            db.session.commit()
        else:
            job_ids.append(info)

        time.sleep(sleep_time)