def test_get_tasks_status_by_task():
    """Test get tasks status by task."""
    # simulate a "celery cache empty" creating an empty result
    AsyncResult = namedtuple('AsyncResult', ['result', 'status'])
    lost_result = AsyncResult(None, states.PENDING)
    # and: event -> receiver -> lost result
    Event = namedtuple('Event', ['receiver'])
    receiver = mock.MagicMock()
    receiver.has_result = mock.MagicMock(return_value=True)
    receiver._raw_info = mock.MagicMock(
        return_value={'task_trascode': lost_result})
    event = Event(receiver)
    # build the current state if the previous value saved on db is SUCCESS
    computed_states = get_tasks_status_by_task(
        events=[event], statuses={'task_trascode': states.SUCCESS})
    assert computed_states == {'task_trascode': states.SUCCESS}
Beispiel #2
0
def test_video_events_on_workflow(webhooks, api_app, db, api_project, bucket,
                                  access_token, json_headers):
    """Test deposit events."""
    (project, video_1, video_2) = api_project
    project_depid = project['_deposit']['id']
    video_1_depid = video_1['_deposit']['id']
    db.session.add(bucket)

    # registering receiver
    receiver_id = 'test_video_events_on_workflow'
    workflow_receiver_video_failing(api_app,
                                    db,
                                    video_1,
                                    receiver_id=receiver_id)

    with api_app.test_request_context():
        url = url_for('invenio_webhooks.event_list',
                      receiver_id=receiver_id,
                      access_token=access_token)

    with api_app.test_client() as client:
        # run workflow
        resp = client.post(url, headers=json_headers, data=json.dumps({}))
        assert resp.status_code == 500
        # run again workflow
        resp = client.post(url, headers=json_headers, data=json.dumps({}))
        assert resp.status_code == 500
        # resolve deposit and events
        deposit = deposit_video_resolver(video_1_depid)
        events = get_deposit_events(deposit['_deposit']['id'])
        # check events
        assert len(events) == 2
        assert events[0].payload['deposit_id'] == video_1_depid
        assert events[1].payload['deposit_id'] == video_1_depid
        # check computed status
        status = get_tasks_status_by_task(events)
        assert status['add'] == states.SUCCESS
        assert status['failing'] == states.FAILURE

        # check every task for every event
        for event in events:
            result = event.receiver._deserialize_result(event)
            assert result.parent.status == states.SUCCESS
            assert result.children[0].status == states.FAILURE
            assert result.children[1].status == states.SUCCESS

        # check if the states are inside the deposit
        res = client.get(url_for('invenio_deposit_rest.video_item',
                                 pid_value=video_1_depid,
                                 access_token=access_token),
                         headers=json_headers)
        assert res.status_code == 200
        data = json.loads(res.data.decode('utf-8'))['metadata']
        assert data['_cds']['state']['add'] == states.SUCCESS
        assert data['_cds']['state']['failing'] == states.FAILURE

        # run indexer
        RecordIndexer().process_bulk_queue()
        sleep(2)
        # check elasticsearch video_1 state
        resp = client.get(url_for('invenio_deposit_rest.video_list',
                                  q='_deposit.id:{0}'.format(video_1_depid),
                                  access_token=access_token),
                          headers=json_headers)
        assert resp.status_code == 200
        data = json.loads(resp.data.decode('utf-8'))
        status = data['hits']['hits'][0]['metadata']['_cds']['state']
        assert status['add'] == states.SUCCESS
        assert status['failing'] == states.FAILURE
        # check elasticsearch project state
        resp = client.get(url_for('invenio_deposit_rest.video_list',
                                  q='_deposit.id:{0}'.format(project_depid),
                                  access_token=access_token),
                          headers=json_headers)
        assert resp.status_code == 200
        data = json.loads(resp.data.decode('utf-8'))
        status = data['hits']['hits'][0]['metadata']['_cds']['state']
        assert status['add'] == states.SUCCESS
        assert status['failing'] == states.FAILURE
Beispiel #3
0
def test_video_events_on_download_create(api_app, webhooks, db, api_project,
                                         access_token, json_headers):
    """Test deposit events."""
    (project, video_1, video_2) = api_project
    video_1_depid = video_1['_deposit']['id']
    project_id = str(project.id)
    video_1_id = str(video_1.id)
    bucket_id = video_1._bucket.id

    with api_app.test_request_context():
        url = url_for('invenio_webhooks.event_list',
                      receiver_id='downloader',
                      access_token=access_token)

    with mock.patch('requests.get') as mock_request, \
            mock.patch('invenio_indexer.tasks.index_record.delay') \
            as mock_indexer, \
            api_app.test_client() as client:
        file_size = 1024 * 1024
        mock_request.return_value = type(
            'Response', (object, ), {
                'raw': BytesIO(b'\x00' * file_size),
                'headers': {
                    'Content-Length': file_size
                }
            })

        payload = dict(uri='http://example.com/test.pdf',
                       bucket_id=str(bucket_id),
                       deposit_id=video_1_depid,
                       key='test.pdf')

        resp = client.post(url, headers=json_headers, data=json.dumps(payload))
        assert resp.status_code == 201

        file_size = 1024 * 1024 * 6
        mock_request.return_value = type(
            'Response', (object, ), {
                'raw': BytesIO(b'\x00' * file_size),
                'headers': {
                    'Content-Length': file_size
                }
            })

        resp = client.post(url, headers=json_headers, data=json.dumps(payload))
        assert resp.status_code == 201

        deposit = deposit_video_resolver(video_1_depid)

        events = get_deposit_events(deposit['_deposit']['id'])

        assert len(events) == 2
        assert events[0].payload['deposit_id'] == video_1_depid
        assert events[1].payload['deposit_id'] == video_1_depid

        status = get_tasks_status_by_task(events)
        assert status == {'file_download': states.SUCCESS}

        # check if the states are inside the deposit
        res = client.get(url_for('invenio_deposit_rest.video_item',
                                 pid_value=video_1_depid,
                                 access_token=access_token),
                         headers=json_headers)
        assert res.status_code == 200
        data = json.loads(res.data.decode('utf-8'))['metadata']
        assert data['_cds']['state']['file_download'] == states.SUCCESS
        assert deposit._get_files_dump() == data['_files']

        # check the record is inside the indexer queue
        ids = set(get_indexed_records_from_mock(mock_indexer))
        assert len(ids) == 2
        assert video_1_id in ids
        assert project_id in ids
Beispiel #4
0
def test_avc_workflow_receiver_local_file_pass(api_app, db, api_project,
                                               access_token, json_headers,
                                               mock_sorenson, online_video,
                                               webhooks, local_file):
    """Test AVCWorkflow receiver."""
    project, video_1, video_2 = api_project
    video_1_depid = video_1['_deposit']['id']
    video_1_id = str(video_1.id)
    project_id = str(project.id)

    bucket_id = ObjectVersion.query.filter_by(
        version_id=local_file).one().bucket_id
    video_size = 5510872
    master_key = 'test.mp4'
    slave_keys = [
        'slave_{0}.mp4'.format(quality)
        for quality in get_presets_applied().keys() if quality != '1024p'
    ]
    with api_app.test_request_context():
        url = url_for('invenio_webhooks.event_list',
                      receiver_id='avc',
                      access_token=access_token)

    with api_app.test_client() as client, \
            mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \
            mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \
            as mock_indexer:
        sse_channel = 'mychannel'
        payload = dict(
            uri=online_video,
            deposit_id=video_1_depid,
            key=master_key,
            sse_channel=sse_channel,
            sleep_time=0,
            version_id=str(local_file),
        )
        # [[ RUN WORKFLOW ]]
        resp = client.post(url, headers=json_headers, data=json.dumps(payload))

        assert resp.status_code == 201
        data = json.loads(resp.data.decode('utf-8'))

        assert '_tasks' in data
        assert data['key'] == master_key
        assert 'version_id' in data
        assert data.get('presets') == get_available_preset_qualities()
        assert 'links' in data  # TODO decide with links are needed

        assert ObjectVersion.query.count() == get_object_count()

        # Master file
        master = ObjectVersion.get(bucket_id, master_key)
        tags = master.get_tags()
        assert tags['_event_id'] == data['tags']['_event_id']
        assert master.key == master_key
        assert str(master.version_id) == data['version_id']
        assert master.file
        assert master.file.size == video_size

        # Check metadata tags
        metadata_keys = [
            'duration', 'bit_rate', 'size', 'avg_frame_rate', 'codec_name',
            'codec_long_name', 'width', 'height', 'nb_frames',
            'display_aspect_ratio', 'color_range'
        ]
        assert all([key in tags for key in metadata_keys])
        assert ObjectVersion.query.count() == get_object_count()
        assert ObjectVersionTag.query.count() == get_tag_count(is_local=True)

        # Check metadata patch
        recid = PersistentIdentifier.get('depid', video_1_depid).object_uuid
        record = Record.get_record(recid)
        assert 'extracted_metadata' in record['_cds']
        assert all([
            key in str(record['_cds']['extracted_metadata'])
            for key in metadata_keys
        ])

        # Check slaves
        for slave_key in slave_keys:
            slave = ObjectVersion.get(bucket_id, slave_key)
            tags = slave.get_tags()
            assert slave.key == slave_key
            assert '_sorenson_job_id' in tags
            assert tags['_sorenson_job_id'] == '1234'
            assert 'master' in tags
            assert tags['master'] == str(master.version_id)
            assert master.file
            assert master.file.size == video_size

        video = deposit_video_resolver(video_1_depid)
        events = get_deposit_events(video['_deposit']['id'])

        # check deposit tasks status
        tasks_status = get_tasks_status_by_task(events)
        assert len(tasks_status) == 3
        assert 'file_transcode' in tasks_status
        assert 'file_video_extract_frames' in tasks_status
        assert 'file_video_metadata_extraction' in tasks_status

        # check single status
        collector = CollectInfoTasks()
        iterate_events_results(events=events, fun=collector)
        info = list(collector)
        assert len(info) == 11
        assert info[0][0] == 'file_video_metadata_extraction'
        assert info[0][1].status == states.SUCCESS
        assert info[1][0] == 'file_video_extract_frames'
        assert info[1][1].status == states.SUCCESS
        transocode_tasks = info[2:]
        statuses = [task[1].status for task in info[2:]]
        assert len(transocode_tasks) == len(statuses)
        assert [
            states.SUCCESS, states.REVOKED, states.REVOKED, states.REVOKED,
            states.SUCCESS, states.REVOKED, states.REVOKED, states.REVOKED,
            states.REVOKED
        ] == statuses

        # check tags (exclude 'uri-origin')
        assert ObjectVersionTag.query.count() == (get_tag_count() - 1)

        # check sse is called
        assert mock_sse.called

        messages = [
            (sse_channel, states.SUCCESS, 'file_video_metadata_extraction'),
            (sse_channel, states.STARTED, 'file_transcode'),
            (sse_channel, states.SUCCESS, 'file_transcode'),
            (sse_channel, states.REVOKED, 'file_transcode'),  # ResolutionError
            (sse_channel, states.STARTED, 'file_video_extract_frames'),
            (sse_channel, states.SUCCESS, 'file_video_extract_frames'),
            (sse_channel, states.SUCCESS, 'update_deposit'),
        ]

        call_args = []
        for (_, kwargs) in mock_sse.call_args_list:
            type_ = kwargs['type_']
            state = kwargs['data']['state']
            channel = kwargs['channel']
            tuple_ = (channel, state, type_)
            if tuple_ not in call_args:
                call_args.append(tuple_)

        assert len(call_args) == len(messages)
        for message in messages:
            assert message in call_args

        deposit = deposit_video_resolver(video_1_depid)

        def filter_events(call_args):
            _, x = call_args
            return x['type_'] == 'update_deposit'

        list_kwargs = list(filter(filter_events, mock_sse.call_args_list))
        assert len(list_kwargs) == 10
        _, kwargs = list_kwargs[8]
        assert kwargs['type_'] == 'update_deposit'
        assert kwargs['channel'] == 'mychannel'
        assert kwargs['data']['state'] == states.SUCCESS
        assert kwargs['data']['meta']['payload'] == {
            'deposit_id': deposit['_deposit']['id'],
            'event_id': data['tags']['_event_id'],
            'deposit': deposit,
        }

        # check ElasticSearch is called
        ids = set(get_indexed_records_from_mock(mock_indexer))
        assert video_1_id in ids
        assert project_id in ids
        assert deposit['_cds']['state'] == {
            'file_video_metadata_extraction': states.SUCCESS,
            'file_video_extract_frames': states.SUCCESS,
            'file_transcode': states.SUCCESS,
        }

    # Test cleaning!
    url = '{0}?access_token={1}'.format(data['links']['cancel'], access_token)

    with mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \
            mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \
            as mock_indexer, \
            api_app.test_client() as client:
        # [[ DELETE WORKFLOW ]]
        resp = client.delete(url, headers=json_headers)

        assert resp.status_code == 201

        # check that object versions and tags are deleted
        # (Create + Delete) * Num Objs - 1 (because the file is local and will
        # be not touched)
        assert ObjectVersion.query.count() == 2 * get_object_count() - 1
        # Tags associated with the old version
        assert ObjectVersionTag.query.count() == get_tag_count(is_local=True)
        bucket = Bucket.query.first()
        # and bucket is empty
        assert bucket.size == 0

        record = RecordMetadata.query.filter_by(id=video_1_id).one()

        # check metadata patch are deleted
        assert 'extracted_metadata' not in record.json['_cds']

        # check the corresponding Event persisted after cleaning
        assert len(get_deposit_events(record.json['_deposit']['id'])) == 0
        assert len(
            get_deposit_events(record.json['_deposit']['id'],
                               _deleted=True)) == 1

        # check no SSE message and reindexing is fired
        assert mock_sse.called is False
        assert mock_indexer.called is False
Beispiel #5
0
def test_avc_workflow_receiver_pass(api_app, db, api_project, access_token,
                                    json_headers, mock_sorenson, online_video,
                                    webhooks, users):
    """Test AVCWorkflow receiver."""
    project, video_1, video_2 = api_project
    video_1_depid = video_1['_deposit']['id']
    video_1_id = str(video_1.id)
    project_id = str(project.id)

    bucket_id = video_1['_buckets']['deposit']
    video_size = 5510872
    master_key = 'test.mp4'
    slave_keys = [
        'slave_{0}.mp4'.format(quality) for quality in get_presets_applied()
        if quality != '1024p'
    ]
    with api_app.test_request_context():
        url = url_for('invenio_webhooks.event_list',
                      receiver_id='avc',
                      access_token=access_token)

    with api_app.test_client() as client, \
            mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \
            mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \
            as mock_indexer:
        sse_channel = 'mychannel'
        payload = dict(
            uri=online_video,
            deposit_id=video_1_depid,
            key=master_key,
            sse_channel=sse_channel,
            sleep_time=0,
        )
        resp = client.post(url, headers=json_headers, data=json.dumps(payload))

        assert resp.status_code == 201
        data = json.loads(resp.data.decode('utf-8'))

        assert '_tasks' in data
        assert data['tags']['uri_origin'] == online_video
        assert data['key'] == master_key
        assert 'version_id' in data
        assert data.get('presets') == get_available_preset_qualities()
        assert 'links' in data  # TODO decide with links are needed

        assert ObjectVersion.query.count() == get_object_count()

        # Master file
        master = ObjectVersion.get(bucket_id, master_key)
        tags = master.get_tags()
        assert tags['_event_id'] == data['tags']['_event_id']
        assert master.key == master_key
        assert str(master.version_id) == data['version_id']
        assert master.file
        assert master.file.size == video_size

        # Check metadata tags
        metadata_keys = [
            'duration', 'bit_rate', 'size', 'avg_frame_rate', 'codec_name',
            'codec_long_name', 'width', 'height', 'nb_frames',
            'display_aspect_ratio', 'color_range'
        ]
        assert all([key in tags for key in metadata_keys])

        # Check metadata patch
        recid = PersistentIdentifier.get('depid', video_1_depid).object_uuid
        record = Record.get_record(recid)
        assert 'extracted_metadata' in record['_cds']
        assert all([
            key in str(record['_cds']['extracted_metadata'])
            for key in metadata_keys
        ])

        # Check slaves
        for slave_key in slave_keys:
            slave = ObjectVersion.get(bucket_id, slave_key)
            tags = slave.get_tags()
            assert slave.key == slave_key
            assert '_sorenson_job_id' in tags
            assert tags['_sorenson_job_id'] == '1234'
            assert 'master' in tags
            assert tags['master'] == str(master.version_id)
            assert master.file
            assert master.file.size == video_size

        video = deposit_video_resolver(video_1_depid)
        events = get_deposit_events(video['_deposit']['id'])

        # check deposit tasks status
        tasks_status = get_tasks_status_by_task(events)
        assert len(tasks_status) == 4
        assert 'file_download' in tasks_status
        assert 'file_transcode' in tasks_status
        assert 'file_video_extract_frames' in tasks_status
        assert 'file_video_metadata_extraction' in tasks_status

        # check single status
        collector = CollectInfoTasks()
        iterate_events_results(events=events, fun=collector)
        info = list(collector)
        presets = get_presets_applied().keys()
        assert info[0][0] == 'file_download'
        assert info[0][1].status == states.SUCCESS
        assert info[1][0] == 'file_video_metadata_extraction'
        assert info[1][1].status == states.SUCCESS
        assert info[2][0] == 'file_video_extract_frames'
        assert info[2][1].status == states.SUCCESS
        for i in info[3:]:
            assert i[0] == 'file_transcode'
            if i[1].status == states.SUCCESS:
                assert i[1].result['payload']['preset_quality'] in presets

        # check tags
        assert ObjectVersionTag.query.count() == get_tag_count()

        # check sse is called
        assert mock_sse.called

        messages = [
            (sse_channel, states.STARTED, 'file_download'),
            (sse_channel, states.SUCCESS, 'file_download'),
            (sse_channel, states.SUCCESS, 'file_video_metadata_extraction'),
            (sse_channel, states.STARTED, 'file_transcode'),
            (sse_channel, states.SUCCESS, 'file_transcode'),
            (sse_channel, states.REVOKED, 'file_transcode'),  # ResolutionError
            (sse_channel, states.STARTED, 'file_video_extract_frames'),
            (sse_channel, states.SUCCESS, 'file_video_extract_frames'),
            (sse_channel, states.SUCCESS, 'update_deposit'),
        ]

        call_args = []
        for (_, kwargs) in mock_sse.call_args_list:
            type_ = kwargs['type_']
            state = kwargs['data']['state']
            channel = kwargs['channel']
            tuple_ = (channel, state, type_)
            if tuple_ not in call_args:
                call_args.append(tuple_)

        assert len(call_args) == len(messages)
        for message in messages:
            assert message in call_args

        deposit = deposit_video_resolver(video_1_depid)

        def filter_events(call_args):
            _, x = call_args
            return x['type_'] == 'update_deposit'

        list_kwargs = list(filter(filter_events, mock_sse.call_args_list))
        assert len(list_kwargs) == 12
        _, kwargs = list_kwargs[10]
        assert kwargs['type_'] == 'update_deposit'
        assert kwargs['channel'] == 'mychannel'
        assert kwargs['data']['state'] == states.SUCCESS
        assert kwargs['data']['meta']['payload'] == {
            'deposit_id': deposit['_deposit']['id'],
            'event_id': data['tags']['_event_id'],
            'deposit': deposit,
        }

        # check ElasticSearch is called
        ids = set(get_indexed_records_from_mock(mock_indexer))
        assert video_1_id in ids
        assert project_id in ids
        assert deposit['_cds']['state'] == {
            'file_download': states.SUCCESS,
            'file_video_metadata_extraction': states.SUCCESS,
            'file_video_extract_frames': states.SUCCESS,
            'file_transcode': states.SUCCESS,
        }

    # check feedback from anoymous user
    event_id = data['tags']['_event_id']
    with api_app.test_request_context():
        url = url_for('invenio_webhooks.event_feedback_item',
                      event_id=event_id,
                      receiver_id='avc')
    with api_app.test_client() as client:
        resp = client.get(url, headers=json_headers)
        assert resp.status_code == 401
    # check feedback from owner
    with api_app.test_request_context():
        url = url_for('invenio_webhooks.event_feedback_item',
                      event_id=event_id,
                      receiver_id='avc')
    with api_app.test_client() as client:
        login_user_via_session(client, email=User.query.get(users[0]).email)
        resp = client.get(url, headers=json_headers)
        assert resp.status_code == 200
    # check feedback from another user without access
    with api_app.test_request_context():
        url = url_for('invenio_webhooks.event_feedback_item',
                      event_id=event_id,
                      receiver_id='avc')
    with api_app.test_client() as client:
        login_user_via_session(client, email=User.query.get(users[1]).email)
        resp = client.get(url, headers=json_headers)
        assert resp.status_code == 403
    # check feedback from another user with access
    user_2 = User.query.get(users[1])
    user_2_id = str(user_2.id)
    user_2_email = user_2.email
    project = deposit_project_resolver(project['_deposit']['id'])
    project['_access'] = {'update': [user_2_email]}
    project = project.commit()
    with api_app.test_request_context():
        url = url_for('invenio_webhooks.event_feedback_item',
                      event_id=event_id,
                      receiver_id='avc')
    with api_app.test_client() as client:

        @identity_loaded.connect
        def load_email(sender, identity):
            if current_user.get_id() == user_2_id:
                identity.provides.update([UserNeed(user_2_email)])

        login_user_via_session(client, email=user_2_email)
        resp = client.get(url, headers=json_headers)
        assert resp.status_code == 200

    # Test cleaning!
    url = '{0}?access_token={1}'.format(data['links']['cancel'], access_token)

    with mock.patch('invenio_sse.ext._SSEState.publish') as mock_sse, \
            mock.patch('invenio_indexer.api.RecordIndexer.bulk_index') \
            as mock_indexer, \
            api_app.test_client() as client:
        resp = client.delete(url, headers=json_headers)

        assert resp.status_code == 201

        # check that object versions and tags are deleted
        # (Create + Delete) * Num Objs
        assert ObjectVersion.query.count() == 2 * get_object_count()
        # Tags connected with the old version
        assert ObjectVersionTag.query.count() == get_tag_count()
        bucket = Bucket.query.first()
        # and bucket is empty
        assert bucket.size == 0

        record = RecordMetadata.query.filter_by(id=video_1_id).one()

        # check metadata patch are deleted
        assert 'extracted_metadata' not in record.json['_cds']

        # check the corresponding Event persisted after cleaning
        assert len(get_deposit_events(record.json['_deposit']['id'])) == 0
        assert len(
            get_deposit_events(record.json['_deposit']['id'],
                               _deleted=True)) == 1

        # check no SSE message and reindexing is fired
        assert mock_sse.called is False
        assert mock_indexer.called is False