コード例 #1
0
        elif b["target_type"] == "content":
            b["target_url"] = reverse(
                "api-1-content",
                url_args={"q": f'sha1_git:{b["target"]}'},
                request=request,
            )

    for _, b in snapshot_data["branches"].items():
        if b["target_type"] == "alias":
            target = resolve_branch_alias(snapshot_data, b)
            b["target_url"] = target["target_url"]

    assert actual_snapshot == snapshot_data


@given(origin())
def test_enrich_origin(api_request_factory, origin):
    url = reverse("api-1-origin", url_args={"origin_url": origin["url"]})
    request = api_request_factory.get(url)

    origin_data = {"url": origin["url"]}
    actual_origin = utils.enrich_origin(origin_data, request)

    origin_data["origin_visits_url"] = reverse(
        "api-1-origin-visits",
        url_args={"origin_url": origin["url"]},
        request=request)

    assert actual_origin == origin_data

コード例 #2
0
ファイル: test_archive.py プロジェクト: shivam2003sy/swh-web
    assert actual_revision_log == expected_revision_log


def _get_origin_branches(archive_data, origin):
    origin_visit = archive_data.origin_visit_get(origin["url"])[-1]
    snapshot = archive_data.snapshot_get(origin_visit["snapshot"])
    branches = {
        k: v
        for (k, v) in snapshot["branches"].items()
        if v["target_type"] == "revision"
    }
    return branches


@given(origin())
def test_lookup_revision_log_by(archive_data, origin):
    branches = _get_origin_branches(archive_data, origin)
    branch_name = random.choice(list(branches.keys()))

    actual_log = list(
        archive.lookup_revision_log_by(origin["url"],
                                       branch_name,
                                       None,
                                       limit=25))

    expected_log = archive_data.revision_log(branches[branch_name]["target"],
                                             limit=25)

    assert actual_log == expected_log
コード例 #3
0
ファイル: test_origin.py プロジェクト: monperrus/swh-web
class OriginApiTestCase(WebTestCase, APITestCase):

    @patch('swh.web.api.views.origin.get_origin_visits')
    def test_api_lookup_origin_visits_raise_error(
        self, mock_get_origin_visits,
    ):

        err_msg = 'voluntary error to check the bad request middleware.'

        mock_get_origin_visits.side_effect = ValueError(err_msg)

        url = reverse('api-origin-visits', url_args={'origin_id': 2})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 400)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, {
            'exception': 'ValueError',
            'reason': err_msg})

    @patch('swh.web.api.views.origin.get_origin_visits')
    def test_api_lookup_origin_visits_raise_swh_storage_error_db(
            self, mock_get_origin_visits):

        err_msg = 'Storage exploded! Will be back online shortly!'

        mock_get_origin_visits.side_effect = StorageDBError(err_msg)

        url = reverse('api-origin-visits', url_args={'origin_id': 2})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 503)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, {
            'exception': 'StorageDBError',
            'reason':
            'An unexpected error occurred in the backend: %s' % err_msg})

    @patch('swh.web.api.views.origin.get_origin_visits')
    def test_api_lookup_origin_visits_raise_swh_storage_error_api(
            self, mock_get_origin_visits):

        err_msg = 'Storage API dropped dead! Will resurrect asap!'

        mock_get_origin_visits.side_effect = StorageAPIError(err_msg)

        url = reverse('api-origin-visits', url_args={'origin_id': 2})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 503)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, {
            'exception': 'StorageAPIError',
            'reason':
            'An unexpected error occurred in the api backend: %s' % err_msg
        })

    @given(new_origin(), visit_dates(4), new_snapshots(4))
    def test_api_lookup_origin_visits(self, new_origin, visit_dates,
                                      new_snapshots):

        origin_id = self.storage.origin_add_one(new_origin)
        new_origin['id'] = origin_id
        for i, visit_date in enumerate(visit_dates):
            origin_visit = self.storage.origin_visit_add(origin_id, visit_date)
            self.storage.snapshot_add(origin_id, origin_visit['visit'],
                                      new_snapshots[i])

        all_visits = list(reversed(get_origin_visits(new_origin)))

        for last_visit, expected_visits in (
                (None, all_visits[:2]),
                (all_visits[1]['visit'], all_visits[2:4])):

            url = reverse('api-origin-visits',
                          url_args={'origin_id': origin_id},
                          query_params={'per_page': 2,
                                        'last_visit': last_visit})

            rv = self.client.get(url)

            self.assertEqual(rv.status_code, 200)
            self.assertEqual(rv['Content-Type'], 'application/json')

            for expected_visit in expected_visits:
                origin_visit_url = reverse(
                    'api-origin-visit',
                    url_args={'origin_id': origin_id,
                              'visit_id': expected_visit['visit']})
                snapshot_url = reverse(
                    'api-snapshot',
                    url_args={'snapshot_id': expected_visit['snapshot']})
                expected_visit['origin_visit_url'] = origin_visit_url
                expected_visit['snapshot_url'] = snapshot_url

            self.assertEqual(rv.data, expected_visits)

    @given(new_origin(), visit_dates(4), new_snapshots(4))
    def test_api_lookup_origin_visit(self, new_origin, visit_dates,
                                     new_snapshots):

        origin_id = self.storage.origin_add_one(new_origin)
        new_origin['id'] = origin_id
        for i, visit_date in enumerate(visit_dates):
            origin_visit = self.storage.origin_visit_add(origin_id, visit_date)
            visit_id = origin_visit['visit']
            self.storage.snapshot_add(origin_id, origin_visit['visit'],
                                      new_snapshots[i])
            url = reverse('api-origin-visit',
                          url_args={'origin_id': origin_id,
                                    'visit_id': visit_id})

            rv = self.client.get(url)
            self.assertEqual(rv.status_code, 200)
            self.assertEqual(rv['Content-Type'], 'application/json')

            expected_visit = self.origin_visit_get_by(origin_id, visit_id)

            origin_url = reverse('api-origin',
                                 url_args={'origin_id': origin_id})
            snapshot_url = reverse(
                'api-snapshot',
                url_args={'snapshot_id': expected_visit['snapshot']})

            expected_visit['origin_url'] = origin_url
            expected_visit['snapshot_url'] = snapshot_url

            self.assertEqual(rv.data, expected_visit)

    @given(origin())
    def test_api_lookup_origin_visit_not_found(self, origin):

        all_visits = list(reversed(get_origin_visits(origin)))

        max_visit_id = max([v['visit'] for v in all_visits])

        url = reverse('api-origin-visit',
                      url_args={'origin_id': origin['id'],
                                'visit_id': max_visit_id + 1})

        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, {
            'exception': 'NotFoundExc',
            'reason': 'Origin with id %s or its visit with id %s not found!' %
            (origin['id'], max_visit_id+1)
        })

    @given(origin())
    def test_api_origin_by_id(self, origin):

        url = reverse('api-origin', url_args={'origin_id': origin['id']})

        rv = self.client.get(url)

        expected_origin = self.origin_get(origin)

        origin_visits_url = reverse('api-origin-visits',
                                    url_args={'origin_id': origin['id']})

        expected_origin['origin_visits_url'] = origin_visits_url

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, expected_origin)

    @given(origin())
    def test_api_origin_by_type_url(self, origin):

        url = reverse('api-origin',
                      url_args={'origin_type': origin['type'],
                                'origin_url': origin['url']})
        rv = self.client.get(url)

        expected_origin = self.origin_get(origin)

        origin_visits_url = reverse('api-origin-visits',
                                    url_args={'origin_id': origin['id']})

        expected_origin['origin_visits_url'] = origin_visits_url

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, expected_origin)

    @given(new_origin())
    def test_api_origin_not_found(self, new_origin):

        url = reverse('api-origin',
                      url_args={'origin_type': new_origin['type'],
                                'origin_url': new_origin['url']})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, {
            'exception': 'NotFoundExc',
            'reason': 'Origin with type %s and url %s not found!' %
            (new_origin['type'], new_origin['url'])
        })

    @patch('swh.web.common.service.idx_storage')
    @given(origin())
    def test_api_origin_metadata_search(self, mock_idx_storage, origin):

        mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
            .side_effect = lambda conjunction, limit: [{
                'from_revision':
                b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed \xf2U\xfa\x05B8',
                'metadata': {'author': 'Jane Doe'},
                'id': origin['id'],
                'tool': {
                    'configuration': {
                        'context': ['NpmMapping', 'CodemetaMapping'],
                        'type': 'local'
                    },
                    'id': 3,
                    'name': 'swh-metadata-detector',
                    'version': '0.0.1'
                }
            }]

        url = reverse('api-origin-metadata-search',
                      query_params={'fulltext': 'Jane Doe'})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 200, rv.content)
        self.assertEqual(rv['Content-Type'], 'application/json')
        expected_data = [{
            'id': origin['id'],
            'type': origin['type'],
            'url': origin['url'],
            'metadata': {
                'metadata': {'author': 'Jane Doe'},
                'from_revision': '7026b7c1a2af56521e951c01ed20f255fa054238',
                'tool': {
                    'configuration': {
                        'context': ['NpmMapping', 'CodemetaMapping'],
                        'type': 'local'
                    },
                    'id': 3,
                    'name': 'swh-metadata-detector',
                    'version': '0.0.1',
                }
            }
        }]
        self.assertEqual(rv.data, expected_data)
        mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
            .assert_called_with(conjunction=['Jane Doe'], limit=70)

    @patch('swh.web.common.service.idx_storage')
    @given(origin())
    def test_api_origin_metadata_search_limit(self, mock_idx_storage, origin):

        mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
            .side_effect = lambda conjunction, limit: [{
                'from_revision':
                b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed \xf2U\xfa\x05B8',
                'metadata': {'author': 'Jane Doe'},
                'id': origin['id'],
                'tool': {
                    'configuration': {
                        'context': ['NpmMapping', 'CodemetaMapping'],
                        'type': 'local'
                    },
                    'id': 3,
                    'name': 'swh-metadata-detector',
                    'version': '0.0.1'
                }
            }]

        url = reverse('api-origin-metadata-search',
                      query_params={'fulltext': 'Jane Doe'})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 200, rv.content)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(len(rv.data), 1)
        mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
            .assert_called_with(conjunction=['Jane Doe'], limit=70)

        url = reverse('api-origin-metadata-search',
                      query_params={'fulltext': 'Jane Doe',
                                    'limit': 10})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 200, rv.content)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(len(rv.data), 1)
        mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
            .assert_called_with(conjunction=['Jane Doe'], limit=10)

        url = reverse('api-origin-metadata-search',
                      query_params={'fulltext': 'Jane Doe',
                                    'limit': 987})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 200, rv.content)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(len(rv.data), 1)
        mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
            .assert_called_with(conjunction=['Jane Doe'], limit=100)

    @patch('swh.web.common.service.idx_storage')
    def test_api_origin_metadata_search_invalid(self, mock_idx_storage):

        url = reverse('api-origin-metadata-search')
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 400, rv.content)
        mock_idx_storage.assert_not_called()

    @given(new_origins(20))
    def test_api_lookup_origins(self, new_origins):

        nb_origins = len(new_origins)

        expected_origins = self.storage.origin_add(new_origins)

        origin_from_idx = random.randint(1, nb_origins-1) - 1
        origin_from = expected_origins[origin_from_idx]['id']
        max_origin_id = expected_origins[-1]['id']
        origin_count = random.randint(1, max_origin_id - origin_from)

        url = reverse('api-origins',
                      query_params={'origin_from': origin_from,
                                    'origin_count': origin_count})

        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 200)

        start = origin_from_idx
        end = origin_from_idx + origin_count
        expected_origins = expected_origins[start:end]

        for expected_origin in expected_origins:
            expected_origin['origin_visits_url'] = reverse(
                'api-origin-visits',
                url_args={'origin_id': expected_origin['id']})

        self.assertEqual(rv.data, expected_origins)

        next_origin_id = expected_origins[-1]['id']+1
        if self.storage.origin_get({'id': next_origin_id}):
            self.assertIn('Link', rv)
            next_url = reverse('api-origins',
                               query_params={'origin_from': next_origin_id,
                                             'origin_count': origin_count})
            self.assertIn(next_url, rv['Link'])
コード例 #4
0
ファイル: test_revision.py プロジェクト: monperrus/swh-web
class RevisionApiTestCase(WebTestCase, APITestCase):
    @given(revision())
    def test_api_revision(self, revision):

        url = reverse('api-revision', url_args={'sha1_git': revision})
        rv = self.client.get(url)

        expected_revision = self.revision_get(revision)

        self._enrich_revision(expected_revision)

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, expected_revision)

    @given(unknown_revision())
    def test_api_revision_not_found(self, unknown_revision):

        url = reverse('api-revision', url_args={'sha1_git': unknown_revision})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(
            rv.data, {
                'exception': 'NotFoundExc',
                'reason':
                'Revision with sha1_git %s not found.' % unknown_revision
            })

    @given(revision())
    def test_api_revision_raw_ok(self, revision):

        url = reverse('api-revision-raw-message',
                      url_args={'sha1_git': revision})
        rv = self.client.get(url)

        expected_message = self.revision_get(revision)['message']

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/octet-stream')
        self.assertEqual(rv.content, expected_message.encode())

    @given(new_revision())
    def test_api_revision_raw_ok_no_msg(self, new_revision):

        del new_revision['message']
        self.storage.revision_add([new_revision])

        new_revision_id = hash_to_hex(new_revision['id'])

        url = reverse('api-revision-raw-message',
                      url_args={'sha1_git': new_revision_id})

        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(
            rv.data, {
                'exception':
                'NotFoundExc',
                'reason':
                'No message for revision with sha1_git %s.' % new_revision_id
            })

    @given(unknown_revision())
    def test_api_revision_raw_ko_no_rev(self, unknown_revision):

        url = reverse('api-revision-raw-message',
                      url_args={'sha1_git': unknown_revision})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(
            rv.data, {
                'exception': 'NotFoundExc',
                'reason':
                'Revision with sha1_git %s not found.' % unknown_revision
            })

    @given(unknown_origin_id())
    def test_api_revision_with_origin_not_found(self, unknown_origin_id):

        url = reverse('api-revision-origin',
                      url_args={'origin_id': unknown_origin_id})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(
            rv.data, {
                'exception': 'NotFoundExc',
                'reason': 'Origin with id %s not found!' % unknown_origin_id
            })

    @given(origin())
    def test_api_revision_with_origin(self, origin):

        url = reverse('api-revision-origin',
                      url_args={'origin_id': origin['id']})
        rv = self.client.get(url)

        snapshot = self.snapshot_get_latest(origin['id'])
        expected_revision = self.revision_get(
            snapshot['branches']['HEAD']['target'])

        self._enrich_revision(expected_revision)

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, expected_revision)

    @given(origin())
    def test_api_revision_with_origin_and_branch_name(self, origin):

        snapshot = self.snapshot_get_latest(origin['id'])

        branch_name = random.choice(
            list(b for b in snapshot['branches'].keys()
                 if snapshot['branches'][b]['target_type'] == 'revision'))

        url = reverse('api-revision-origin',
                      url_args={
                          'origin_id': origin['id'],
                          'branch_name': branch_name
                      })

        rv = self.client.get(url)

        expected_revision = self.revision_get(
            snapshot['branches'][branch_name]['target'])

        self._enrich_revision(expected_revision)

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, expected_revision)

    @given(origin_with_multiple_visits())
    def test_api_revision_with_origin_and_branch_name_and_ts(self, origin):

        visit = random.choice(self.origin_visit_get(origin['id']))

        snapshot = self.snapshot_get(visit['snapshot'])

        branch_name = random.choice(
            list(b for b in snapshot['branches'].keys()
                 if snapshot['branches'][b]['target_type'] == 'revision'))

        url = reverse('api-revision-origin',
                      url_args={
                          'origin_id': origin['id'],
                          'branch_name': branch_name,
                          'ts': visit['date']
                      })

        rv = self.client.get(url)

        expected_revision = self.revision_get(
            snapshot['branches'][branch_name]['target'])

        self._enrich_revision(expected_revision)

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, expected_revision)

    @given(origin_with_multiple_visits())
    def test_api_revision_with_origin_and_branch_name_and_ts_escapes(
            self, origin):
        visit = random.choice(self.origin_visit_get(origin['id']))

        snapshot = self.snapshot_get(visit['snapshot'])

        branch_name = random.choice(
            list(b for b in snapshot['branches'].keys()
                 if snapshot['branches'][b]['target_type'] == 'revision'))

        date = parse_timestamp(visit['date'])

        formatted_date = date.strftime('Today is %B %d, %Y at %X')

        url = reverse('api-revision-origin',
                      url_args={
                          'origin_id': origin['id'],
                          'branch_name': branch_name,
                          'ts': formatted_date
                      })

        rv = self.client.get(url)

        expected_revision = self.revision_get(
            snapshot['branches'][branch_name]['target'])

        self._enrich_revision(expected_revision)

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, expected_revision)

    @given(unknown_origin_id())
    def test_api_directory_through_revision_origin_ko(self, unknown_origin_id):

        url = reverse('api-revision-origin-directory',
                      url_args={'origin_id': unknown_origin_id})
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(
            rv.data, {
                'exception': 'NotFoundExc',
                'reason': 'Origin with id %s not found!' % unknown_origin_id
            })

    @given(origin())
    def test_api_directory_through_revision_origin(self, origin):

        url = reverse('api-revision-origin-directory',
                      url_args={'origin_id': origin['id']})
        rv = self.client.get(url)

        snapshot = self.snapshot_get_latest(origin['id'])
        revision_id = snapshot['branches']['HEAD']['target']
        revision = self.revision_get(revision_id)
        directory = self.directory_ls(revision['directory'])

        for entry in directory:
            if entry['type'] == 'dir':
                entry['target_url'] = reverse(
                    'api-directory', url_args={'sha1_git': entry['target']})
                entry['dir_url'] = reverse('api-revision-origin-directory',
                                           url_args={
                                               'origin_id': origin['id'],
                                               'path': entry['name']
                                           })
            elif entry['type'] == 'file':
                entry['target_url'] = reverse(
                    'api-content',
                    url_args={'q': 'sha1_git:%s' % entry['target']})
                entry['file_url'] = reverse('api-revision-origin-directory',
                                            url_args={
                                                'origin_id': origin['id'],
                                                'path': entry['name']
                                            })
            elif entry['type'] == 'rev':
                entry['target_url'] = reverse(
                    'api-revision', url_args={'sha1_git': entry['target']})
                entry['rev_url'] = reverse('api-revision-origin-directory',
                                           url_args={
                                               'origin_id': origin['id'],
                                               'path': entry['name']
                                           })

        expected_result = {
            'content': directory,
            'path': '.',
            'revision': revision_id,
            'type': 'dir'
        }

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, expected_result)

    @given(revision())
    def test_api_revision_log(self, revision):

        per_page = 10

        url = reverse('api-revision-log',
                      url_args={'sha1_git': revision},
                      query_params={'per_page': per_page})

        rv = self.client.get(url)

        expected_log = self.revision_log(revision, limit=per_page + 1)
        expected_log = list(map(self._enrich_revision, expected_log))

        has_next = len(expected_log) > per_page

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data,
                         expected_log[:-1] if has_next else expected_log)

        if has_next:
            self.assertIn('Link', rv)
            next_log_url = reverse(
                'api-revision-log',
                url_args={'sha1_git': expected_log[-1]['id']},
                query_params={'per_page': per_page})
            self.assertIn(next_log_url, rv['Link'])

    @given(unknown_revision())
    def test_api_revision_log_not_found(self, unknown_revision):

        url = reverse('api-revision-log',
                      url_args={'sha1_git': unknown_revision})

        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(
            rv.data, {
                'exception': 'NotFoundExc',
                'reason':
                'Revision with sha1_git %s not found.' % unknown_revision
            })
        self.assertFalse(rv.has_header('Link'))

    @given(revision())
    def test_api_revision_log_context(self, revision):

        revisions = self.revision_log(revision, limit=4)

        prev_rev = revisions[0]['id']
        rev = revisions[-1]['id']

        per_page = 10

        url = reverse('api-revision-log',
                      url_args={
                          'sha1_git': rev,
                          'prev_sha1s': prev_rev
                      },
                      query_params={'per_page': per_page})

        rv = self.client.get(url)

        expected_log = self.revision_log(rev, limit=per_page)
        prev_revision = self.revision_get(prev_rev)
        expected_log.insert(0, prev_revision)
        expected_log = list(map(self._enrich_revision, expected_log))

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, expected_log)

    @given(origin())
    def test_api_revision_log_by(self, origin):

        per_page = 10

        url = reverse('api-revision-origin-log',
                      url_args={'origin_id': origin['id']},
                      query_params={'per_page': per_page})

        rv = self.client.get(url)

        snapshot = self.snapshot_get_latest(origin['id'])

        expected_log = self.revision_log(
            snapshot['branches']['HEAD']['target'], limit=per_page + 1)

        expected_log = list(map(self._enrich_revision, expected_log))

        has_next = len(expected_log) > per_page

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data,
                         expected_log[:-1] if has_next else expected_log)
        if has_next:
            self.assertIn('Link', rv)
            next_log_url = reverse('api-revision-origin-log',
                                   url_args={
                                       'origin_id': origin['id'],
                                       'branch_name': 'HEAD'
                                   },
                                   query_params={
                                       'per_page': per_page,
                                       'sha1_git': expected_log[-1]['id']
                                   })
            self.assertIn(next_log_url, rv['Link'])

    @given(origin())
    def test_api_revision_log_by_ko(self, origin):

        invalid_branch_name = 'foobar'

        url = reverse('api-revision-origin-log',
                      url_args={
                          'origin_id': origin['id'],
                          'branch_name': invalid_branch_name
                      })

        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertFalse(rv.has_header('Link'))
        self.assertEqual(
            rv.data, {
                'exception':
                'NotFoundExc',
                'reason':
                'Revision for origin %s and branch %s not found.' %
                (origin['id'], invalid_branch_name)
            })

    @patch('swh.web.api.views.revision._revision_directory_by')
    def test_api_revision_directory_ko_not_found(self, mock_rev_dir):
        # given
        mock_rev_dir.side_effect = NotFoundExc('Not found')

        # then
        rv = self.client.get('/api/1/revision/999/directory/some/path/to/dir/')

        self.assertEqual(rv.status_code, 404)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, {
            'exception': 'NotFoundExc',
            'reason': 'Not found'
        })

        mock_rev_dir.assert_called_once_with(
            {'sha1_git': '999'},
            'some/path/to/dir',
            '/api/1/revision/999/directory/some/path/to/dir/',
            with_data=False)

    @patch('swh.web.api.views.revision._revision_directory_by')
    def test_api_revision_directory_ok_returns_dir_entries(self, mock_rev_dir):
        stub_dir = {
            'type':
            'dir',
            'revision':
            '999',
            'content': [{
                'sha1_git':
                '789',
                'type':
                'file',
                'target':
                '101',
                'target_url':
                '/api/1/content/sha1_git:101/',
                'name':
                'somefile',
                'file_url':
                '/api/1/revision/999/directory/some/path/'
                'somefile/'
            }, {
                'sha1_git':
                '123',
                'type':
                'dir',
                'target':
                '456',
                'target_url':
                '/api/1/directory/456/',
                'name':
                'to-subdir',
                'dir_url':
                '/api/1/revision/999/directory/some/path/'
                'to-subdir/',
            }]
        }

        # given
        mock_rev_dir.return_value = stub_dir

        # then
        rv = self.client.get('/api/1/revision/999/directory/some/path/')

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, stub_dir)

        mock_rev_dir.assert_called_once_with(
            {'sha1_git': '999'},
            'some/path',
            '/api/1/revision/999/directory/some/path/',
            with_data=False)

    @patch('swh.web.api.views.revision._revision_directory_by')
    def test_api_revision_directory_ok_returns_content(self, mock_rev_dir):
        stub_content = {
            'type': 'file',
            'revision': '999',
            'content': {
                'sha1_git': '789',
                'sha1': '101',
                'data_url': '/api/1/content/101/raw/',
            }
        }

        # given
        mock_rev_dir.return_value = stub_content

        # then
        url = '/api/1/revision/666/directory/some/other/path/'
        rv = self.client.get(url)

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        self.assertEqual(rv.data, stub_content)

        mock_rev_dir.assert_called_once_with({'sha1_git': '666'},
                                             'some/other/path',
                                             url,
                                             with_data=False)

    def _enrich_revision(self, revision):
        author_url = reverse('api-person',
                             url_args={'person_id': revision['author']['id']})

        committer_url = reverse(
            'api-person', url_args={'person_id': revision['committer']['id']})

        directory_url = reverse('api-directory',
                                url_args={'sha1_git': revision['directory']})

        history_url = reverse('api-revision-log',
                              url_args={'sha1_git': revision['id']})

        parents_id_url = []
        for p in revision['parents']:
            parents_id_url.append({
                'id':
                p,
                'url':
                reverse('api-revision', url_args={'sha1_git': p})
            })

        revision_url = reverse('api-revision',
                               url_args={'sha1_git': revision['id']})

        revision['author_url'] = author_url
        revision['committer_url'] = committer_url
        revision['directory_url'] = directory_url
        revision['history_url'] = history_url
        revision['url'] = revision_url
        revision['parents'] = parents_id_url

        return revision

    @given(revision())
    def test_api_revision_uppercase(self, revision):
        url = reverse('api-revision-uppercase-checksum',
                      url_args={'sha1_git': revision.upper()})

        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 302)

        redirect_url = reverse('api-revision', url_args={'sha1_git': revision})

        self.assertEqual(resp['location'], redirect_url)
コード例 #5
0
ファイル: test_badges.py プロジェクト: shivam2003sy/swh-web
    unknown_snapshot,
)
from swh.web.tests.utils import check_http_get_response


@given(content())
def test_content_badge(client, content):
    _test_badge_endpoints(client, "content", content["sha1_git"])


@given(directory())
def test_directory_badge(client, directory):
    _test_badge_endpoints(client, "directory", directory)


@given(origin())
def test_origin_badge(client, origin):
    _test_badge_endpoints(client, "origin", origin["url"])


@given(release())
def test_release_badge(client, release):
    _test_badge_endpoints(client, "release", release)


@given(revision())
def test_revision_badge(client, revision):
    _test_badge_endpoints(client, "revision", revision)


@given(snapshot())
コード例 #6
0
class SwhBrowseOriginTest(WebTestCase):

    @given(origin_with_multiple_visits())
    def test_origin_visits_browse(self, origin):

        url = reverse('browse-origin-visits',
                      url_args={'origin_type': origin['type'],
                                'origin_url': origin['url']})
        resp = self.client.get(url)

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('origin-visits.html')

        url = reverse('browse-origin-visits',
                      url_args={'origin_url': origin['url']})
        resp = self.client.get(url)

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('origin-visits.html')

        visits = self.origin_visit_get(origin['id'])

        for v in visits:
            vdate = format_utc_iso_date(v['date'], '%Y-%m-%dT%H:%M:%SZ')
            browse_dir_url = reverse('browse-origin-directory',
                                     url_args={'origin_url': origin['url'],
                                               'timestamp': vdate})
            self.assertContains(resp, browse_dir_url)

    def origin_content_view_helper(self, origin_info, origin_visits,
                                   origin_branches, origin_releases,
                                   root_dir_sha1, content,
                                   visit_id=None, timestamp=None):

        content_path = '/'.join(content['path'].split('/')[1:])

        url_args = {'origin_type': origin_info['type'],
                    'origin_url': origin_info['url'],
                    'path': content_path}

        if not visit_id:
            visit_id = origin_visits[-1]['visit']

        query_params = {}

        if timestamp:
            url_args['timestamp'] = timestamp

        if visit_id:
            query_params['visit_id'] = visit_id

        url = reverse('browse-origin-content',
                      url_args=url_args,
                      query_params=query_params)

        resp = self.client.get(url)

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('content.html')

        self.assertContains(resp, '<code class="%s">' %
                                  content['hljs_language'])
        self.assertContains(resp, escape(content['data']))

        split_path = content_path.split('/')

        filename = split_path[-1]
        path = content_path.replace(filename, '')[:-1]

        path_info = gen_path_info(path)

        del url_args['path']

        if timestamp:
            url_args['timestamp'] = \
                format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
                                    '%Y-%m-%dT%H:%M:%S')

        root_dir_url = reverse('browse-origin-directory',
                               url_args=url_args,
                               query_params=query_params)

        self.assertContains(resp, '<li class="swh-path">',
                            count=len(path_info)+1)

        self.assertContains(resp, '<a href="%s">%s</a>' %
                            (root_dir_url, root_dir_sha1[:7]))

        for p in path_info:
            url_args['path'] = p['path']
            dir_url = reverse('browse-origin-directory',
                              url_args=url_args,
                              query_params=query_params)
            self.assertContains(resp, '<a href="%s">%s</a>' %
                                (dir_url, p['name']))

        self.assertContains(resp, '<li>%s</li>' % filename)

        query_string = 'sha1_git:' + content['sha1_git']

        url_raw = reverse('browse-content-raw',
                          url_args={'query_string': query_string},
                          query_params={'filename': filename})
        self.assertContains(resp, url_raw)

        if 'args' in url_args:
            del url_args['path']

        origin_branches_url = reverse('browse-origin-branches',
                                      url_args=url_args,
                                      query_params=query_params)

        self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
                                  (origin_branches_url, len(origin_branches)))

        origin_releases_url = reverse('browse-origin-releases',
                                      url_args=url_args,
                                      query_params=query_params)

        self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
                                  (origin_releases_url, len(origin_releases)))

        self.assertContains(resp, '<li class="swh-branch">',
                            count=len(origin_branches))

        url_args['path'] = content_path

        for branch in origin_branches:
            query_params['branch'] = branch['name']
            root_dir_branch_url = reverse('browse-origin-content',
                                          url_args=url_args,
                                          query_params=query_params)

        self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)

        self.assertContains(resp, '<li class="swh-release">',
                            count=len(origin_releases))

        query_params['branch'] = None
        for release in origin_releases:
            query_params['release'] = release['name']
            root_dir_release_url = reverse('browse-origin-content',
                                           url_args=url_args,
                                           query_params=query_params)

            self.assertContains(resp, '<a href="%s">' % root_dir_release_url)

        del url_args['origin_type']

        url = reverse('browse-origin-content',
                      url_args=url_args,
                      query_params=query_params)

        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('content.html')

        swh_cnt_id = get_swh_persistent_id('content', content['sha1_git'])
        swh_cnt_id_url = reverse('browse-swh-id',
                                 url_args={'swh_id': swh_cnt_id})
        self.assertContains(resp, swh_cnt_id)
        self.assertContains(resp, swh_cnt_id_url)

        self.assertContains(resp, 'swh-take-new-snapshot')

    @given(origin_with_multiple_visits())
    def test_origin_content_view(self, origin):

        origin_visits = self.origin_visit_get(origin['id'])

        def _get_test_data(visit_idx):
            snapshot = self.snapshot_get(origin_visits[visit_idx]['snapshot'])
            head_rev_id = snapshot['branches']['HEAD']['target']
            head_rev = self.revision_get(head_rev_id)
            dir_content = self.directory_ls(head_rev['directory'])
            dir_files = [e for e in dir_content if e['type'] == 'file']
            dir_file = random.choice(dir_files)
            branches, releases = process_snapshot_branches(snapshot)
            return {
                'branches': branches,
                'releases': releases,
                'root_dir_sha1': head_rev['directory'],
                'content': get_content(dir_file['checksums']['sha1']),
                'visit': origin_visits[visit_idx]
            }

        test_data = _get_test_data(-1)

        self.origin_content_view_helper(origin,
                                        origin_visits,
                                        test_data['branches'],
                                        test_data['releases'],
                                        test_data['root_dir_sha1'],
                                        test_data['content'])

        self.origin_content_view_helper(origin,
                                        origin_visits,
                                        test_data['branches'],
                                        test_data['releases'],
                                        test_data['root_dir_sha1'],
                                        test_data['content'],
                                        timestamp=test_data['visit']['date'])

        visit_unix_ts = parse_timestamp(test_data['visit']['date']).timestamp()
        visit_unix_ts = int(visit_unix_ts)

        self.origin_content_view_helper(origin,
                                        origin_visits,
                                        test_data['branches'],
                                        test_data['releases'],
                                        test_data['root_dir_sha1'],
                                        test_data['content'],
                                        timestamp=visit_unix_ts)

        test_data = _get_test_data(0)

        self.origin_content_view_helper(origin,
                                        origin_visits,
                                        test_data['branches'],
                                        test_data['releases'],
                                        test_data['root_dir_sha1'],
                                        test_data['content'],
                                        visit_id=test_data['visit']['visit'])

    def origin_directory_view_helper(self, origin_info, origin_visits,
                                     origin_branches, origin_releases,
                                     root_directory_sha1, directory_entries,
                                     visit_id=None, timestamp=None, path=None):

        dirs = [e for e in directory_entries
                if e['type'] in ('dir', 'rev')]
        files = [e for e in directory_entries
                 if e['type'] == 'file']

        if not visit_id:
            visit_id = origin_visits[-1]['visit']

        url_args = {'origin_url': origin_info['url']}

        query_params = {}

        if timestamp:
            url_args['timestamp'] = timestamp
        else:
            query_params['visit_id'] = visit_id

        if path:
            url_args['path'] = path

        url = reverse('browse-origin-directory',
                      url_args=url_args,
                      query_params=query_params)

        resp = self.client.get(url)

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('directory.html')

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('directory.html')

        self.assertContains(resp, '<td class="swh-directory">',
                            count=len(dirs))
        self.assertContains(resp, '<td class="swh-content">',
                            count=len(files))

        if timestamp:
            url_args['timestamp'] = \
                format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
                                    '%Y-%m-%dT%H:%M:%S')

        for d in dirs:
            if d['type'] == 'rev':
                dir_url = reverse('browse-revision',
                                  url_args={'sha1_git': d['target']})
            else:
                dir_path = d['name']
                if path:
                    dir_path = "%s/%s" % (path, d['name'])
                dir_url_args = dict(url_args)
                dir_url_args['path'] = dir_path
                dir_url = reverse('browse-origin-directory',
                                  url_args=dir_url_args,
                                  query_params=query_params)
            self.assertContains(resp, dir_url)

        for f in files:
            file_path = f['name']
            if path:
                file_path = "%s/%s" % (path, f['name'])
            file_url_args = dict(url_args)
            file_url_args['path'] = file_path
            file_url = reverse('browse-origin-content',
                               url_args=file_url_args,
                               query_params=query_params)
            self.assertContains(resp, file_url)

        if 'path' in url_args:
            del url_args['path']

        root_dir_branch_url = \
            reverse('browse-origin-directory',
                    url_args=url_args,
                    query_params=query_params)

        nb_bc_paths = 1
        if path:
            nb_bc_paths = len(path.split('/')) + 1

        self.assertContains(resp, '<li class="swh-path">', count=nb_bc_paths)
        self.assertContains(resp, '<a href="%s">%s</a>' %
                                  (root_dir_branch_url,
                                   root_directory_sha1[:7]))

        origin_branches_url = reverse('browse-origin-branches',
                                      url_args=url_args,
                                      query_params=query_params)

        self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
                                  (origin_branches_url, len(origin_branches)))

        origin_releases_url = reverse('browse-origin-releases',
                                      url_args=url_args,
                                      query_params=query_params)

        nb_releases = len(origin_releases)
        if nb_releases > 0:
            self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
                                      (origin_releases_url, nb_releases))

        if path:
            url_args['path'] = path

        self.assertContains(resp, '<li class="swh-branch">',
                            count=len(origin_branches))

        for branch in origin_branches:
            query_params['branch'] = branch['name']
            root_dir_branch_url = \
                reverse('browse-origin-directory',
                        url_args=url_args,
                        query_params=query_params)

            self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)

        self.assertContains(resp, '<li class="swh-release">',
                            count=len(origin_releases))

        query_params['branch'] = None
        for release in origin_releases:
            query_params['release'] = release['name']
            root_dir_release_url = \
                reverse('browse-origin-directory',
                        url_args=url_args,
                        query_params=query_params)

            self.assertContains(resp, '<a href="%s">' % root_dir_release_url)

        self.assertContains(resp, 'vault-cook-directory')
        self.assertContains(resp, 'vault-cook-revision')

        swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa
        swh_dir_id_url = reverse('browse-swh-id',
                                 url_args={'swh_id': swh_dir_id})
        self.assertContains(resp, swh_dir_id)
        self.assertContains(resp, swh_dir_id_url)

        self.assertContains(resp, 'swh-take-new-snapshot')

    @given(origin())
    def test_origin_root_directory_view(self, origin):

        origin_visits = self.origin_visit_get(origin['id'])

        visit = origin_visits[-1]
        snapshot = self.snapshot_get(visit['snapshot'])
        head_rev_id = snapshot['branches']['HEAD']['target']
        head_rev = self.revision_get(head_rev_id)
        root_dir_sha1 = head_rev['directory']
        dir_content = self.directory_ls(root_dir_sha1)
        branches, releases = process_snapshot_branches(snapshot)
        visit_unix_ts = parse_timestamp(visit['date']).timestamp()
        visit_unix_ts = int(visit_unix_ts)

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          dir_content)

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          dir_content,
                                          visit_id=visit['visit'])

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          dir_content,
                                          timestamp=visit_unix_ts)

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          dir_content,
                                          timestamp=visit['date'])

        origin = dict(origin)
        del origin['type']

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          dir_content)

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          dir_content,
                                          visit_id=visit['visit'])

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          dir_content,
                                          timestamp=visit_unix_ts)

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          dir_content,
                                          timestamp=visit['date'])

    @given(origin())
    def test_origin_sub_directory_view(self, origin):

        origin_visits = self.origin_visit_get(origin['id'])

        visit = origin_visits[-1]
        snapshot = self.snapshot_get(visit['snapshot'])
        head_rev_id = snapshot['branches']['HEAD']['target']
        head_rev = self.revision_get(head_rev_id)
        root_dir_sha1 = head_rev['directory']
        subdirs = [e for e in self.directory_ls(root_dir_sha1)
                   if e['type'] == 'dir']
        branches, releases = process_snapshot_branches(snapshot)
        visit_unix_ts = parse_timestamp(visit['date']).timestamp()
        visit_unix_ts = int(visit_unix_ts)

        if len(subdirs) == 0:
            return

        subdir = random.choice(subdirs)
        subdir_content = self.directory_ls(subdir['target'])
        subdir_path = subdir['name']

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          subdir_content,
                                          path=subdir_path)

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          subdir_content,
                                          path=subdir_path,
                                          visit_id=visit['visit'])

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          subdir_content,
                                          path=subdir_path,
                                          timestamp=visit_unix_ts)

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          subdir_content,
                                          path=subdir_path,
                                          timestamp=visit['date'])

        origin = dict(origin)
        del origin['type']

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          subdir_content,
                                          path=subdir_path)

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          subdir_content,
                                          path=subdir_path,
                                          visit_id=visit['visit'])

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          subdir_content,
                                          path=subdir_path,
                                          timestamp=visit_unix_ts)

        self.origin_directory_view_helper(origin, origin_visits,
                                          branches,
                                          releases,
                                          root_dir_sha1,
                                          subdir_content,
                                          path=subdir_path,
                                          timestamp=visit['date'])

    def origin_branches_helper(self, origin_info, origin_snapshot):
        url_args = {'origin_type': origin_info['type'],
                    'origin_url': origin_info['url']}

        url = reverse('browse-origin-branches',
                      url_args=url_args)

        resp = self.client.get(url)

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('branches.html')

        origin_branches = origin_snapshot[0]
        origin_releases = origin_snapshot[1]

        origin_branches_url = reverse('browse-origin-branches',
                                      url_args=url_args)

        self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
                                  (origin_branches_url, len(origin_branches)))

        origin_releases_url = reverse('browse-origin-releases',
                                      url_args=url_args)

        nb_releases = len(origin_releases)
        if nb_releases > 0:
            self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
                                      (origin_releases_url, nb_releases))

        self.assertContains(resp, '<tr class="swh-branch-entry',
                            count=len(origin_branches))

        for branch in origin_branches:
            browse_branch_url = reverse(
                'browse-origin-directory',
                url_args={'origin_type': origin_info['type'],
                          'origin_url': origin_info['url']},
                query_params={'branch': branch['name']})
            self.assertContains(resp, '<a href="%s">' %
                                      escape(browse_branch_url))

            browse_revision_url = reverse(
                'browse-revision',
                url_args={'sha1_git': branch['revision']},
                query_params={'origin_type': origin_info['type'],
                              'origin': origin_info['url']})
            self.assertContains(resp, '<a href="%s">' %
                                      escape(browse_revision_url))

    @given(origin())
    def test_origin_branches(self, origin):

        origin_visits = self.origin_visit_get(origin['id'])

        visit = origin_visits[-1]
        snapshot = self.snapshot_get(visit['snapshot'])
        snapshot_content = process_snapshot_branches(snapshot)

        self.origin_branches_helper(origin, snapshot_content)

        origin = dict(origin)
        origin['type'] = None

        self.origin_branches_helper(origin, snapshot_content)

    def origin_releases_helper(self, origin_info, origin_snapshot):
        url_args = {'origin_type': origin_info['type'],
                    'origin_url': origin_info['url']}

        url = reverse('browse-origin-releases',
                      url_args=url_args)

        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('releases.html')

        origin_branches = origin_snapshot[0]
        origin_releases = origin_snapshot[1]

        origin_branches_url = reverse('browse-origin-branches',
                                      url_args=url_args)

        self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
                                  (origin_branches_url, len(origin_branches)))

        origin_releases_url = reverse('browse-origin-releases',
                                      url_args=url_args)

        nb_releases = len(origin_releases)
        if nb_releases > 0:
            self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
                                      (origin_releases_url, nb_releases))

        self.assertContains(resp, '<tr class="swh-release-entry',
                            count=nb_releases)

        for release in origin_releases:
            browse_release_url = reverse(
                'browse-release',
                url_args={'sha1_git': release['id']},
                query_params={'origin': origin_info['url']})
            browse_revision_url = reverse(
                'browse-revision',
                url_args={'sha1_git': release['target']},
                query_params={'origin': origin_info['url']})

            self.assertContains(resp, '<a href="%s">' %
                                      escape(browse_release_url))
            self.assertContains(resp, '<a href="%s">' %
                                      escape(browse_revision_url))

    @given(origin())
    def test_origin_releases(self, origin):

        origin_visits = self.origin_visit_get(origin['id'])

        visit = origin_visits[-1]
        snapshot = self.snapshot_get(visit['snapshot'])
        snapshot_content = process_snapshot_branches(snapshot)

        self.origin_releases_helper(origin, snapshot_content)

        origin = dict(origin)
        origin['type'] = None

        self.origin_releases_helper(origin, snapshot_content)

    @patch('swh.web.browse.views.utils.snapshot_context.request_content')
    @patch('swh.web.common.origin_visits.get_origin_visits')
    @patch('swh.web.browse.utils.get_origin_visit_snapshot')
    @patch('swh.web.browse.utils.service')
    @patch('swh.web.browse.views.origin.service')
    @patch('swh.web.browse.views.utils.snapshot_context.service')
    @patch('swh.web.browse.views.origin.get_origin_info')
    def test_origin_request_errors(self, mock_get_origin_info,
                                   mock_snapshot_service,
                                   mock_origin_service,
                                   mock_utils_service,
                                   mock_get_origin_visit_snapshot,
                                   mock_get_origin_visits,
                                   mock_request_content):

        mock_get_origin_info.side_effect = \
            NotFoundExc('origin not found')
        url = reverse('browse-origin-visits',
                      url_args={'origin_type': 'foo',
                                'origin_url': 'bar'})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertContains(resp, 'origin not found', status_code=404)

        mock_utils_service.lookup_origin.side_effect = None
        mock_utils_service.lookup_origin.return_value = {'type': 'foo',
                                                         'url': 'bar',
                                                         'id': 457}
        mock_get_origin_visits.return_value = []
        url = reverse('browse-origin-directory',
                      url_args={'origin_type': 'foo',
                                'origin_url': 'bar'})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertContains(resp, "No visit", status_code=404)

        mock_get_origin_visits.return_value = [{'visit': 1}]
        mock_get_origin_visit_snapshot.side_effect = \
            NotFoundExc('visit not found')
        url = reverse('browse-origin-directory',
                      url_args={'origin_type': 'foo',
                                'origin_url': 'bar'},
                      query_params={'visit_id': 2})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')

        mock_get_origin_visits.return_value = [{
            'date': '2015-09-26T09:30:52.373449+00:00',
            'metadata': {},
            'origin': 457,
            'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
            'status': 'full',
            'visit': 1
        }]
        mock_get_origin_visit_snapshot.side_effect = None
        mock_get_origin_visit_snapshot.return_value = (
            [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
              'name': 'HEAD',
              'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
              'date': '04 May 2017, 13:27 UTC',
              'message': ''}],
            []
        )
        mock_utils_service.lookup_snapshot_size.return_value = {
            'revision': 1,
            'release': 0
        }
        mock_utils_service.lookup_directory.side_effect = \
            NotFoundExc('Directory not found')
        url = reverse('browse-origin-directory',
                      url_args={'origin_type': 'foo',
                                'origin_url': 'bar'})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertContains(resp, 'Directory not found', status_code=404)

        with patch('swh.web.browse.views.utils.snapshot_context.'
                   'get_snapshot_context') as mock_get_snapshot_context:
            mock_get_snapshot_context.side_effect = \
                NotFoundExc('Snapshot not found')
            url = reverse('browse-origin-directory',
                          url_args={'origin_type': 'foo',
                                    'origin_url': 'bar'})
            resp = self.client.get(url)
            self.assertEqual(resp.status_code, 404)
            self.assertTemplateUsed('error.html')
            self.assertContains(resp, 'Snapshot not found', status_code=404)

        mock_origin_service.lookup_origin.side_effect = None
        mock_origin_service.lookup_origin.return_value = {'type': 'foo',
                                                          'url': 'bar',
                                                          'id': 457}
        mock_get_origin_visits.return_value = []
        url = reverse('browse-origin-content',
                      url_args={'origin_type': 'foo',
                                'origin_url': 'bar',
                                'path': 'foo'})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertContains(resp, "No visit", status_code=404)

        mock_get_origin_visits.return_value = [{'visit': 1}]
        mock_get_origin_visit_snapshot.side_effect = \
            NotFoundExc('visit not found')
        url = reverse('browse-origin-content',
                      url_args={'origin_type': 'foo',
                                'origin_url': 'bar',
                                'path': 'foo'},
                      query_params={'visit_id': 2})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')

        mock_get_origin_visits.return_value = [{
            'date': '2015-09-26T09:30:52.373449+00:00',
            'metadata': {},
            'origin': 457,
            'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
            'status': 'full',
            'visit': 1
        }]
        mock_get_origin_visit_snapshot.side_effect = None
        mock_get_origin_visit_snapshot.return_value = ([], [])
        url = reverse('browse-origin-content',
                      url_args={'origin_type': 'foo',
                                'origin_url': 'bar',
                                'path': 'baz'})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertRegex(resp.content.decode('utf-8'),
                         'Origin.*has an empty list of branches')

        mock_get_origin_visit_snapshot.return_value = (
            [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
              'name': 'HEAD',
              'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
              'date': '04 May 2017, 13:27 UTC',
              'message': ''}],
            []
        )
        mock_snapshot_service.lookup_directory_with_path.return_value = \
            {'target': '5ecd9f37b7a2d2e9980d201acd6286116f2ba1f1'}
        mock_request_content.side_effect = \
            NotFoundExc('Content not found')
        url = reverse('browse-origin-content',
                      url_args={'origin_type': 'foo',
                                'origin_url': 'bar',
                                'path': 'baz'})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertContains(resp, 'Content not found', status_code=404)

    @patch('swh.web.common.origin_visits.get_origin_visits')
    @patch('swh.web.browse.utils.get_origin_visit_snapshot')
    @patch('swh.web.browse.utils.service')
    def test_origin_empty_snapshot(self, mock_utils_service,
                                   mock_get_origin_visit_snapshot,
                                   mock_get_origin_visits):

        mock_get_origin_visits.return_value = [{
            'date': '2015-09-26T09:30:52.373449+00:00',
            'metadata': {},
            'origin': 457,
            'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
            'status': 'full',
            'visit': 1
        }]
        mock_get_origin_visit_snapshot.return_value = ([], [])
        mock_utils_service.lookup_snapshot_size.return_value = {
            'revision': 0,
            'release': 0
        }
        url = reverse('browse-origin-directory',
                      url_args={'origin_type': 'foo',
                                'origin_url': 'bar'})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('content.html')
        self.assertRegex(resp.content.decode('utf-8'), 'snapshot.*is empty')
コード例 #7
0
class SwhBrowseRevisionTest(WebTestCase):

    @given(revision())
    def test_revision_browse(self, revision):

        url = reverse('browse-revision',
                      url_args={'sha1_git': revision})

        revision_data = self.revision_get(revision)

        author_id = revision_data['author']['id']
        author_name = revision_data['author']['name']
        committer_id = revision_data['committer']['id']
        committer_name = revision_data['committer']['name']
        dir_id = revision_data['directory']

        author_url = reverse('browse-person',
                             url_args={'person_id': author_id})
        committer_url = reverse('browse-person',
                                url_args={'person_id': committer_id})

        directory_url = reverse('browse-directory',
                                url_args={'sha1_git': dir_id})

        history_url = reverse('browse-revision-log',
                              url_args={'sha1_git': revision})

        resp = self.client.get(url)

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('browse/revision.html')
        self.assertContains(resp, '<a href="%s">%s</a>' %
                                  (author_url, author_name))
        self.assertContains(resp, '<a href="%s">%s</a>' %
                                  (committer_url, committer_name))
        self.assertContains(resp, directory_url)
        self.assertContains(resp, history_url)

        for parent in revision_data['parents']:
            parent_url = reverse('browse-revision',
                                 url_args={'sha1_git': parent})
            self.assertContains(resp, '<a href="%s">%s</a>' %
                                (parent_url, parent))

        author_date = revision_data['date']
        committer_date = revision_data['committer_date']

        message_lines = revision_data['message'].split('\n')

        self.assertContains(resp, format_utc_iso_date(author_date))
        self.assertContains(resp, format_utc_iso_date(committer_date))
        self.assertContains(resp, escape(message_lines[0]))
        self.assertContains(resp, escape('\n'.join(message_lines[1:])))

    @given(origin())
    def test_revision_origin_browse(self, origin):

        snapshot = self.snapshot_get_latest(origin['id'])
        revision = snapshot['branches']['HEAD']['target']
        revision_data = self.revision_get(revision)
        dir_id = revision_data['directory']

        origin_directory_url = reverse('browse-origin-directory',
                                       url_args={'origin_url': origin['url']},
                                       query_params={'revision': revision})

        origin_revision_log_url = reverse('browse-origin-log',
                                          url_args={'origin_url': origin['url']}, # noqa
                                          query_params={'revision': revision})

        url = reverse('browse-revision',
                      url_args={'sha1_git': revision},
                      query_params={'origin': origin['url']})

        resp = self.client.get(url)

        self.assertContains(resp, origin_directory_url)

        self.assertContains(resp, origin_revision_log_url)

        for parent in revision_data['parents']:
            parent_url = reverse('browse-revision',
                                 url_args={'sha1_git': parent},
                                 query_params={'origin': origin['url']})
            self.assertContains(resp, '<a href="%s">%s</a>' %
                                (parent_url, parent))

        self.assertContains(resp, 'vault-cook-directory')
        self.assertContains(resp, 'vault-cook-revision')

        swh_rev_id = get_swh_persistent_id('revision', revision)
        swh_rev_id_url = reverse('browse-swh-id',
                                 url_args={'swh_id': swh_rev_id})
        self.assertContains(resp, swh_rev_id)
        self.assertContains(resp, swh_rev_id_url)

        swh_dir_id = get_swh_persistent_id('directory', dir_id)
        swh_dir_id_url = reverse('browse-swh-id',
                                 url_args={'swh_id': swh_dir_id})
        self.assertContains(resp, swh_dir_id)
        self.assertContains(resp, swh_dir_id_url)

        self.assertContains(resp, 'swh-take-new-snapshot')

    @given(revision())
    def test_revision_log_browse(self, revision):
        per_page = 10

        revision_log = self.revision_log(revision)

        revision_log_sorted = \
            sorted(revision_log,
                   key=lambda rev: -parse_timestamp(
                       rev['committer_date']).timestamp())

        url = reverse('browse-revision-log',
                      url_args={'sha1_git': revision},
                      query_params={'per_page': per_page})

        resp = self.client.get(url)

        next_page_url = reverse('browse-revision-log',
                                url_args={'sha1_git': revision},
                                query_params={'offset': per_page,
                                              'per_page': per_page})

        nb_log_entries = per_page
        if len(revision_log_sorted) < per_page:
            nb_log_entries = len(revision_log_sorted)

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('browse/revision-log.html')
        self.assertContains(resp, '<tr class="swh-revision-log-entry',
                            count=nb_log_entries)
        self.assertContains(resp, '<a class="page-link">Newer</a>')

        if len(revision_log_sorted) > per_page:
            self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
                                escape(next_page_url))

        for log in revision_log_sorted[:per_page]:
            revision_url = reverse('browse-revision',
                                   url_args={'sha1_git': log['id']})
            self.assertContains(resp, log['id'][:7])
            self.assertContains(resp, log['author']['name'])
            self.assertContains(resp, format_utc_iso_date(log['date']))
            self.assertContains(resp, escape(log['message']))
            self.assertContains(resp, format_utc_iso_date(log['committer_date'])) # noqa
            self.assertContains(resp, revision_url)

        if len(revision_log_sorted) <= per_page:
            return

        resp = self.client.get(next_page_url)

        prev_page_url = reverse('browse-revision-log',
                                url_args={'sha1_git': revision},
                                query_params={'per_page': per_page})
        next_page_url = reverse('browse-revision-log',
                                url_args={'sha1_git': revision},
                                query_params={'offset': 2 * per_page,
                                              'per_page': per_page})

        nb_log_entries = len(revision_log_sorted) - per_page
        if nb_log_entries > per_page:
            nb_log_entries = per_page

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('browse/revision-log.html')
        self.assertContains(resp, '<tr class="swh-revision-log-entry',
                            count=nb_log_entries)

        self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' %
                            escape(prev_page_url))

        if len(revision_log_sorted) > 2 * per_page:
            self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
                                escape(next_page_url))

        if len(revision_log_sorted) <= 2 * per_page:
            return

        resp = self.client.get(next_page_url)

        prev_page_url = reverse('browse-revision-log',
                                url_args={'sha1_git': revision},
                                query_params={'offset': per_page,
                                              'per_page': per_page})
        next_page_url = reverse('browse-revision-log',
                                url_args={'sha1_git': revision},
                                query_params={'offset': 3 * per_page,
                                              'per_page': per_page})

        nb_log_entries = len(revision_log_sorted) - 2 * per_page
        if nb_log_entries > per_page:
            nb_log_entries = per_page

        self.assertEqual(resp.status_code, 200)
        self.assertTemplateUsed('browse/revision-log.html')
        self.assertContains(resp, '<tr class="swh-revision-log-entry',
                            count=nb_log_entries)
        self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' %
                            escape(prev_page_url))

        if len(revision_log_sorted) > 3 * per_page:
            self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
                                escape(next_page_url))

    @given(revision(), unknown_revision(), new_origin())
    def test_revision_request_errors(self, revision, unknown_revision,
                                     new_origin):

        url = reverse('browse-revision',
                      url_args={'sha1_git': unknown_revision})
        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertContains(resp,
                            'Revision with sha1_git %s not found' %
                            unknown_revision, status_code=404)

        url = reverse('browse-revision',
                      url_args={'sha1_git': revision},
                      query_params={'origin_type': new_origin['type'],
                                    'origin': new_origin['url']})

        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 404)
        self.assertTemplateUsed('error.html')
        self.assertContains(resp, 'the origin mentioned in your request'
                                  ' appears broken', status_code=404)

    @given(revision())
    def test_revision_uppercase(self, revision):
        url = reverse('browse-revision-uppercase-checksum',
                      url_args={'sha1_git': revision.upper()})

        resp = self.client.get(url)
        self.assertEqual(resp.status_code, 302)

        redirect_url = reverse('browse-revision',
                               url_args={'sha1_git': revision})

        self.assertEqual(resp['location'], redirect_url)
コード例 #8
0
            if "origin_url" in snp_ctx_params:
                expected_cnt_context["origin"] = origin["url"]
                expected_dir_context["origin"] = origin["url"]
                expected_rev_context["origin"] = origin["url"]
                expected_snp_context["origin"] = origin["url"]

            assert swhid_cnt_parsed.qualifiers() == expected_cnt_context
            assert swhid_dir_parsed.qualifiers() == expected_dir_context
            assert swhid_rev_parsed.qualifiers() == expected_rev_context
            assert swhid_snp_parsed.qualifiers() == expected_snp_context

            if "release_name" in snp_ctx_params:
                assert swhid_rel_parsed.qualifiers() == expected_rev_context


@given(origin(), directory())
def test_get_swhids_info_characters_and_url_escaping(archive_data, origin,
                                                     directory):
    snapshot_context = get_snapshot_context(origin_url=origin["url"])
    snapshot_context["origin_info"][
        "url"] = "http://example.org/?project=abc;def%"
    path = "/foo;/bar%"

    swhid_info = get_swhids_info(
        [SWHObjectInfo(object_type=DIRECTORY, object_id=directory)],
        snapshot_context=snapshot_context,
        extra_context={"path": path},
    )[0]

    # check special characters in SWHID have been escaped
    assert (swhid_info["context"]["origin"] ==
コード例 #9
0
class SwhBrowseOriginSaveTest(WebTestCase):
    @given(origin())
    def test_recaptcha_activation_in_gui(self, origin):

        swh_web_config = get_config()

        for captcha_activated in (True, False):

            swh_web_config.update({
                'grecaptcha': {
                    'activated': captcha_activated,
                    'site_key': ''
                }
            })

            url = reverse('browse-origin-save')
            resp = self.client.get(url)

            captcha_script_url = 'https://www.google.com/recaptcha/api.js'
            captcha_dom_elt = '<div class="g-recaptcha"'

            if captcha_activated:
                self.assertContains(resp, captcha_script_url)
                self.assertContains(resp, captcha_dom_elt)
            else:
                self.assertNotContains(resp, captcha_script_url)
                self.assertNotContains(resp, captcha_dom_elt)

            url = reverse('browse-origin-directory',
                          url_args={
                              'origin_type': origin['type'],
                              'origin_url': origin['url']
                          })

            resp = self.client.get(url)

            if captcha_activated:
                self.assertContains(resp, captcha_script_url)
                self.assertContains(resp, captcha_dom_elt)
            else:
                self.assertNotContains(resp, captcha_script_url)
                self.assertNotContains(resp, captcha_dom_elt)

    @patch('swh.web.browse.views.origin_save.create_save_origin_request')
    def test_recaptcha_not_activated_server_side(
            self, mock_create_save_origin_request):

        swh_web_config = get_config()

        swh_web_config.update(
            {'grecaptcha': {
                'activated': False,
                'site_key': ''
            }})

        origin_type = 'git'
        origin_url = 'https://github.com/python/cpython'

        expected_data = {
            'origin_type': origin_type,
            'origin_url': origin_url,
            'save_request_date': datetime.now().isoformat(),
            'save_request_status': SAVE_REQUEST_ACCEPTED,
            'save_task_status': SAVE_TASK_NOT_YET_SCHEDULED,
            'visit_date': None
        }

        mock_create_save_origin_request.return_value = expected_data

        url = reverse('browse-origin-save-request',
                      url_args={
                          'origin_type': origin_type,
                          'origin_url': origin_url
                      })
        resp = self.client.post(url, data={}, content_type='application/json')

        save_request_data = json.loads(resp.content.decode('utf-8'))

        self.assertEqual(save_request_data, expected_data)

    @patch('swh.web.browse.views.origin_save.is_recaptcha_valid')
    @patch('swh.web.browse.views.origin_save.create_save_origin_request')
    def test_recaptcha_activated_server_side(self,
                                             mock_create_save_origin_request,
                                             mock_is_recaptcha_valid):

        swh_web_config = get_config()

        swh_web_config.update(
            {'grecaptcha': {
                'activated': True,
                'site_key': ''
            }})

        origin_type = 'git'
        origin_url = 'https://github.com/python/cpython'

        expected_data = {
            'origin_type': origin_type,
            'origin_url': origin_url,
            'save_request_date': datetime.now().isoformat(),
            'save_request_status': SAVE_REQUEST_ACCEPTED,
            'save_task_status': SAVE_TASK_NOT_YET_SCHEDULED,
            'visit_date': None
        }

        mock_create_save_origin_request.return_value = expected_data

        for captcha_valid in (False, True):

            mock_is_recaptcha_valid.return_value = captcha_valid

            url = reverse('browse-origin-save-request',
                          url_args={
                              'origin_type': origin_type,
                              'origin_url': origin_url
                          })
            resp = self.client.post(url,
                                    data={},
                                    content_type='application/json')

            if captcha_valid is False:
                self.assertEqual(resp.status_code, 403)
            else:
                save_request_data = json.loads(resp.content.decode('utf-8'))
                self.assertEqual(save_request_data, expected_data)
コード例 #10
0
class SwhIdsApiTestCase(WebTestCase, APITestCase):
    @given(origin(), content(), directory(), release(), revision(), snapshot())
    def test_swh_id_resolve_success(self, origin, content, directory, release,
                                    revision, snapshot):

        for obj_type_short, obj_type, obj_id in (('cnt', CONTENT,
                                                  content['sha1_git']),
                                                 ('dir', DIRECTORY, directory),
                                                 ('rel', RELEASE, release),
                                                 ('rev', REVISION, revision),
                                                 ('snp', SNAPSHOT, snapshot)):

            swh_id = 'swh:1:%s:%s;origin=%s' % (obj_type_short, obj_id,
                                                origin['url'])
            url = reverse('api-resolve-swh-pid', url_args={'swh_id': swh_id})

            resp = self.client.get(url)

            if obj_type == CONTENT:
                url_args = {'query_string': 'sha1_git:%s' % obj_id}
            elif obj_type == SNAPSHOT:
                url_args = {'snapshot_id': obj_id}
            else:
                url_args = {'sha1_git': obj_id}

            browse_rev_url = reverse('browse-%s' % obj_type,
                                     url_args=url_args,
                                     query_params={'origin': origin['url']})

            expected_result = {
                'browse_url': browse_rev_url,
                'metadata': {
                    'origin': origin['url']
                },
                'namespace': 'swh',
                'object_id': obj_id,
                'object_type': obj_type,
                'scheme_version': 1
            }

            self.assertEqual(resp.status_code, 200)
            self.assertEqual(resp.data, expected_result)

    def test_swh_id_resolve_invalid(self):
        rev_id_invalid = '96db9023b8_foo_50d6c108e9a3'
        swh_id = 'swh:1:rev:%s' % rev_id_invalid
        url = reverse('api-resolve-swh-pid', url_args={'swh_id': swh_id})

        resp = self.client.get(url)

        self.assertEqual(resp.status_code, 400)

    @given(unknown_content(), unknown_directory(), unknown_release(),
           unknown_revision(), unknown_snapshot())
    def test_swh_id_resolve_not_found(self, unknown_content, unknown_directory,
                                      unknown_release, unknown_revision,
                                      unknown_snapshot):

        for obj_type_short, obj_id in (('cnt', unknown_content['sha1_git']),
                                       ('dir', unknown_directory),
                                       ('rel', unknown_release),
                                       ('rev', unknown_revision),
                                       ('snp', unknown_snapshot)):

            swh_id = 'swh:1:%s:%s' % (obj_type_short, obj_id)

            url = reverse('api-resolve-swh-pid', url_args={'swh_id': swh_id})

            resp = self.client.get(url)

            self.assertEqual(resp.status_code, 404)
コード例 #11
0
ファイル: test_service.py プロジェクト: monperrus/swh-web
class ServiceTestCase(WebTestCase):
    @given(contents())
    def test_lookup_multiple_hashes_all_present(self, contents):
        input_data = []
        expected_output = []
        for cnt in contents:
            input_data.append({'sha1': cnt['sha1']})
            expected_output.append({'sha1': cnt['sha1'], 'found': True})

        self.assertEqual(service.lookup_multiple_hashes(input_data),
                         expected_output)

    @given(contents(), unknown_contents())
    def test_lookup_multiple_hashes_some_missing(self, contents,
                                                 unknown_contents):
        input_contents = list(itertools.chain(contents, unknown_contents))
        random.shuffle(input_contents)

        input_data = []
        expected_output = []
        for cnt in input_contents:
            input_data.append({'sha1': cnt['sha1']})
            expected_output.append({
                'sha1': cnt['sha1'],
                'found': cnt in contents
            })

        self.assertEqual(service.lookup_multiple_hashes(input_data),
                         expected_output)

    @given(unknown_content())
    def test_lookup_hash_does_not_exist(self, unknown_content):

        actual_lookup = service.lookup_hash('sha1_git:%s' %
                                            unknown_content['sha1_git'])

        self.assertEqual(actual_lookup, {'found': None, 'algo': 'sha1_git'})

    @given(content())
    def test_lookup_hash_exist(self, content):

        actual_lookup = service.lookup_hash('sha1:%s' % content['sha1'])

        content_metadata = self.content_get_metadata(content['sha1'])

        self.assertEqual({
            'found': content_metadata,
            'algo': 'sha1'
        }, actual_lookup)

    @given(unknown_content())
    def test_search_hash_does_not_exist(self, content):

        actual_lookup = service.search_hash('sha1_git:%s' %
                                            content['sha1_git'])

        self.assertEqual({'found': False}, actual_lookup)

    @given(content())
    def test_search_hash_exist(self, content):

        actual_lookup = service.search_hash('sha1:%s' % content['sha1'])

        self.assertEqual({'found': True}, actual_lookup)

    @pytest.mark.skipif(ctags_json_missing,
                        reason="requires ctags with json output support")
    @given(contents_with_ctags())
    def test_lookup_content_ctags(self, contents_with_ctags):

        content_sha1 = random.choice(contents_with_ctags['sha1s'])
        self.content_add_ctags(content_sha1)
        actual_ctags = \
            list(service.lookup_content_ctags('sha1:%s' % content_sha1))

        expected_data = list(self.content_get_ctags(content_sha1))
        for ctag in expected_data:
            ctag['id'] = content_sha1

        self.assertEqual(actual_ctags, expected_data)

    @given(unknown_content())
    def test_lookup_content_ctags_no_hash(self, unknown_content):

        actual_ctags = \
            list(service.lookup_content_ctags('sha1:%s' %
                                              unknown_content['sha1']))

        self.assertEqual(actual_ctags, [])

    @given(content())
    def test_lookup_content_filetype(self, content):

        self.content_add_mimetype(content['sha1'])
        actual_filetype = service.lookup_content_filetype(content['sha1'])

        expected_filetype = self.content_get_mimetype(content['sha1'])
        self.assertEqual(actual_filetype, expected_filetype)

    @given(content())
    def test_lookup_content_language(self, content):

        self.content_add_language(content['sha1'])
        actual_language = service.lookup_content_language(content['sha1'])

        expected_language = self.content_get_language(content['sha1'])
        self.assertEqual(actual_language, expected_language)

    @given(contents_with_ctags())
    def test_lookup_expression(self, contents_with_ctags):

        per_page = 10
        expected_ctags = []

        for content_sha1 in contents_with_ctags['sha1s']:
            if len(expected_ctags) == per_page:
                break
            self.content_add_ctags(content_sha1)
            for ctag in self.content_get_ctags(content_sha1):
                if len(expected_ctags) == per_page:
                    break
                if ctag['name'] == contents_with_ctags['symbol_name']:
                    del ctag['id']
                    ctag['sha1'] = content_sha1
                    expected_ctags.append(ctag)

        actual_ctags = \
            list(service.lookup_expression(contents_with_ctags['symbol_name'],
                                           last_sha1=None, per_page=10))

        self.assertEqual(actual_ctags, expected_ctags)

    def test_lookup_expression_no_result(self):

        expected_ctags = []

        actual_ctags = \
            list(service.lookup_expression('barfoo', last_sha1=None,
                                           per_page=10))
        self.assertEqual(actual_ctags, expected_ctags)

    @pytest.mark.skipif(fossology_missing,
                        reason="requires fossology-nomossa installed")
    @given(content())
    def test_lookup_content_license(self, content):

        self.content_add_license(content['sha1'])
        actual_license = service.lookup_content_license(content['sha1'])

        expected_license = self.content_get_license(content['sha1'])
        self.assertEqual(actual_license, expected_license)

    def test_stat_counters(self):
        actual_stats = service.stat_counters()
        self.assertEqual(actual_stats, self.storage.stat_counters())

    @given(new_origin(), visit_dates())
    def test_lookup_origin_visits(self, new_origin, visit_dates):

        origin_id = self.storage.origin_add_one(new_origin)
        for ts in visit_dates:
            self.storage.origin_visit_add(origin_id, ts)

        actual_origin_visits = list(
            service.lookup_origin_visits(origin_id, per_page=100))

        expected_visits = self.origin_visit_get(origin_id)

        self.assertEqual(actual_origin_visits, expected_visits)

    @given(new_origin(), visit_dates())
    def test_lookup_origin_visit(self, new_origin, visit_dates):
        origin_id = self.storage.origin_add_one(new_origin)
        visits = []
        for ts in visit_dates:
            visits.append(self.storage.origin_visit_add(origin_id, ts))

        visit = random.choice(visits)['visit']
        actual_origin_visit = service.lookup_origin_visit(origin_id, visit)

        expected_visit = dict(
            self.storage.origin_visit_get_by(origin_id, visit))
        expected_visit['date'] = expected_visit['date'].isoformat()
        expected_visit['metadata'] = {}

        self.assertEqual(actual_origin_visit, expected_visit)

    @given(new_origin())
    def test_lookup_origin(self, new_origin):
        origin_id = self.storage.origin_add_one(new_origin)

        actual_origin = service.lookup_origin({'id': origin_id})
        expected_origin = self.storage.origin_get({'id': origin_id})
        self.assertEqual(actual_origin, expected_origin)

        actual_origin = service.lookup_origin({
            'type': new_origin['type'],
            'url': new_origin['url']
        })
        expected_origin = self.storage.origin_get({
            'type': new_origin['type'],
            'url': new_origin['url']
        })
        self.assertEqual(actual_origin, expected_origin)

    @given(invalid_sha1())
    def test_lookup_release_ko_id_checksum_not_a_sha1(self, invalid_sha1):
        with self.assertRaises(BadInputExc) as cm:
            service.lookup_release(invalid_sha1)
        self.assertIn('invalid checksum', cm.exception.args[0].lower())

    @given(sha256())
    def test_lookup_release_ko_id_checksum_too_long(self, sha256):
        with self.assertRaises(BadInputExc) as cm:
            service.lookup_release(sha256)
        self.assertEqual('Only sha1_git is supported.', cm.exception.args[0])

    @given(directory())
    def test_lookup_directory_with_path_not_found(self, directory):
        path = 'some/invalid/path/here'
        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_directory_with_path(directory, path)
        self.assertEqual(
            'Directory entry with path %s from %s '
            'not found' % (path, directory), cm.exception.args[0])

    @given(directory())
    def test_lookup_directory_with_path_found(self, directory):
        directory_content = self.directory_ls(directory)
        directory_entry = random.choice(directory_content)
        path = directory_entry['name']
        actual_result = service.lookup_directory_with_path(directory, path)
        self.assertEqual(actual_result, directory_entry)

    @given(release())
    def test_lookup_release(self, release):
        actual_release = service.lookup_release(release)

        self.assertEqual(actual_release, self.release_get(release))

    @given(revision(), invalid_sha1(), sha256())
    def test_lookup_revision_with_context_ko_not_a_sha1(
            self, revision, invalid_sha1, sha256):
        sha1_git_root = revision
        sha1_git = invalid_sha1

        with self.assertRaises(BadInputExc) as cm:
            service.lookup_revision_with_context(sha1_git_root, sha1_git)
        self.assertIn('Invalid checksum query string', cm.exception.args[0])

        sha1_git = sha256

        with self.assertRaises(BadInputExc) as cm:
            service.lookup_revision_with_context(sha1_git_root, sha1_git)
        self.assertIn('Only sha1_git is supported', cm.exception.args[0])

    @given(revision(), unknown_revision())
    def test_lookup_revision_with_context_ko_sha1_git_does_not_exist(
            self, revision, unknown_revision):
        sha1_git_root = revision
        sha1_git = unknown_revision

        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_revision_with_context(sha1_git_root, sha1_git)
        self.assertIn('Revision %s not found' % sha1_git, cm.exception.args[0])

    @given(revision(), unknown_revision())
    def test_lookup_revision_with_context_ko_root_sha1_git_does_not_exist(
            self, revision, unknown_revision):
        sha1_git_root = unknown_revision
        sha1_git = revision

        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_revision_with_context(sha1_git_root, sha1_git)
        self.assertIn('Revision root %s not found' % sha1_git_root,
                      cm.exception.args[0])

    @given(ancestor_revisions())
    def test_lookup_revision_with_context(self, ancestor_revisions):
        sha1_git = ancestor_revisions['sha1_git']
        root_sha1_git = ancestor_revisions['sha1_git_root']
        for sha1_git_root in (root_sha1_git, {
                'id': hash_to_bytes(root_sha1_git)
        }):
            actual_revision = \
                service.lookup_revision_with_context(sha1_git_root,
                                                     sha1_git)

            children = []
            for rev in self.revision_log(root_sha1_git):
                for p_rev in rev['parents']:
                    p_rev_hex = hash_to_hex(p_rev)
                    if p_rev_hex == sha1_git:
                        children.append(rev['id'])

            expected_revision = self.revision_get(sha1_git)
            expected_revision['children'] = children
            self.assertEqual(actual_revision, expected_revision)

    @given(non_ancestor_revisions())
    def test_lookup_revision_with_context_ko(self, non_ancestor_revisions):
        sha1_git = non_ancestor_revisions['sha1_git']
        root_sha1_git = non_ancestor_revisions['sha1_git_root']

        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_revision_with_context(root_sha1_git, sha1_git)
        self.assertIn(
            'Revision %s is not an ancestor of %s' % (sha1_git, root_sha1_git),
            cm.exception.args[0])

    @given(unknown_revision())
    def test_lookup_directory_with_revision_not_found(self, unknown_revision):

        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_directory_with_revision(unknown_revision)
        self.assertIn('Revision %s not found' % unknown_revision,
                      cm.exception.args[0])

    @given(revision())
    def test_lookup_directory_with_revision_ko_path_to_nowhere(self, revision):

        invalid_path = 'path/to/something/unknown'
        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_directory_with_revision(revision, invalid_path)
        exception_text = cm.exception.args[0].lower()
        self.assertIn('directory or file', exception_text)
        self.assertIn(invalid_path, exception_text)
        self.assertIn('revision %s' % revision, exception_text)
        self.assertIn('not found', exception_text)

    @given(revision_with_submodules())
    def test_lookup_directory_with_revision_submodules(
            self, revision_with_submodules):

        rev_sha1_git = revision_with_submodules['rev_sha1_git']
        rev_dir_path = revision_with_submodules['rev_dir_rev_path']

        actual_data = service.lookup_directory_with_revision(
            rev_sha1_git, rev_dir_path)

        revision = self.revision_get(revision_with_submodules['rev_sha1_git'])
        directory = self.directory_ls(revision['directory'])
        rev_entry = next(e for e in directory if e['name'] == rev_dir_path)

        expected_data = {
            'content': self.revision_get(rev_entry['target']),
            'path': rev_dir_path,
            'revision': rev_sha1_git,
            'type': 'rev'
        }

        self.assertEqual(actual_data, expected_data)

    @given(revision())
    def test_lookup_directory_with_revision_without_path(self, revision):

        actual_directory_entries = \
            service.lookup_directory_with_revision(revision)

        revision_data = self.revision_get(revision)
        expected_directory_entries = \
            self.directory_ls(revision_data['directory'])

        self.assertEqual(actual_directory_entries['type'], 'dir')
        self.assertEqual(actual_directory_entries['content'],
                         expected_directory_entries)

    @given(revision())
    def test_lookup_directory_with_revision_with_path(self, revision):

        revision_data = self.revision_get(revision)
        dir_entries = [
            e for e in self.directory_ls(revision_data['directory'])
            if e['type'] in ('file', 'dir')
        ]
        expected_dir_entry = random.choice(dir_entries)

        actual_dir_entry = \
            service.lookup_directory_with_revision(revision,
                                                   expected_dir_entry['name'])

        self.assertEqual(actual_dir_entry['type'], expected_dir_entry['type'])
        self.assertEqual(actual_dir_entry['revision'], revision)
        self.assertEqual(actual_dir_entry['path'], expected_dir_entry['name'])
        if actual_dir_entry['type'] == 'file':
            del actual_dir_entry['content']['checksums']['blake2s256']
            for key in ('checksums', 'status', 'length'):
                self.assertEqual(actual_dir_entry['content'][key],
                                 expected_dir_entry[key])
        else:
            sub_dir_entries = self.directory_ls(expected_dir_entry['target'])
            self.assertEqual(actual_dir_entry['content'], sub_dir_entries)

    @given(revision())
    def test_lookup_directory_with_revision_with_path_to_file_and_data(
            self, revision):

        revision_data = self.revision_get(revision)
        dir_entries = [
            e for e in self.directory_ls(revision_data['directory'])
            if e['type'] == 'file'
        ]
        expected_dir_entry = random.choice(dir_entries)
        expected_data = \
            self.content_get(expected_dir_entry['checksums']['sha1'])

        actual_dir_entry = \
            service.lookup_directory_with_revision(revision,
                                                   expected_dir_entry['name'],
                                                   with_data=True)

        self.assertEqual(actual_dir_entry['type'], expected_dir_entry['type'])
        self.assertEqual(actual_dir_entry['revision'], revision)
        self.assertEqual(actual_dir_entry['path'], expected_dir_entry['name'])
        del actual_dir_entry['content']['checksums']['blake2s256']
        for key in ('checksums', 'status', 'length'):
            self.assertEqual(actual_dir_entry['content'][key],
                             expected_dir_entry[key])
        self.assertEqual(actual_dir_entry['content']['data'],
                         expected_data['data'])

    @given(revision())
    def test_lookup_revision(self, revision):
        actual_revision = service.lookup_revision(revision)
        self.assertEqual(actual_revision, self.revision_get(revision))

    @given(new_revision())
    def test_lookup_revision_invalid_msg(self, new_revision):

        new_revision['message'] = b'elegant fix for bug \xff'
        self.storage.revision_add([new_revision])

        revision = service.lookup_revision(hash_to_hex(new_revision['id']))
        self.assertEqual(revision['message'], None)
        self.assertEqual(revision['message_decoding_failed'], True)

    @given(new_revision())
    def test_lookup_revision_msg_ok(self, new_revision):

        self.storage.revision_add([new_revision])

        revision_message = service.lookup_revision_message(
            hash_to_hex(new_revision['id']))

        self.assertEqual(revision_message,
                         {'message': new_revision['message']})

    @given(new_revision())
    def test_lookup_revision_msg_absent(self, new_revision):

        del new_revision['message']
        self.storage.revision_add([new_revision])

        new_revision_id = hash_to_hex(new_revision['id'])

        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_revision_message(new_revision_id)

        self.assertEqual(
            cm.exception.args[0],
            'No message for revision with sha1_git %s.' % new_revision_id)

    @given(unknown_revision())
    def test_lookup_revision_msg_no_rev(self, unknown_revision):

        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_revision_message(unknown_revision)

        self.assertEqual(
            cm.exception.args[0],
            'Revision with sha1_git %s not found.' % unknown_revision)

    @given(revisions())
    def test_lookup_revision_multiple(self, revisions):

        actual_revisions = list(service.lookup_revision_multiple(revisions))

        expected_revisions = []
        for rev in revisions:
            expected_revisions.append(self.revision_get(rev))

        self.assertEqual(actual_revisions, expected_revisions)

    @given(unknown_revisions())
    def test_lookup_revision_multiple_none_found(self, unknown_revisions):

        actual_revisions = \
            list(service.lookup_revision_multiple(unknown_revisions))

        self.assertEqual(actual_revisions, [None] * len(unknown_revisions))

    @given(revision())
    def test_lookup_revision_log(self, revision):

        actual_revision_log = \
            list(service.lookup_revision_log(revision, limit=25))
        expected_revision_log = self.revision_log(revision, limit=25)

        self.assertEqual(actual_revision_log, expected_revision_log)

    def _get_origin_branches(self, origin):
        origin_visit = self.origin_visit_get(origin['id'])[-1]
        snapshot = self.snapshot_get(origin_visit['snapshot'])
        branches = {
            k: v
            for (k, v) in snapshot['branches'].items()
            if v['target_type'] == 'revision'
        }
        return branches

    @given(origin())
    def test_lookup_revision_log_by(self, origin):

        branches = self._get_origin_branches(origin)
        branch_name = random.choice(list(branches.keys()))

        actual_log =  \
            list(service.lookup_revision_log_by(origin['id'], branch_name,
                                                None, limit=25))

        expected_log = \
            self.revision_log(branches[branch_name]['target'], limit=25)

        self.assertEqual(actual_log, expected_log)

    @given(origin())
    def test_lookup_revision_log_by_notfound(self, origin):

        with self.assertRaises(NotFoundExc):
            service.lookup_revision_log_by(origin['id'],
                                           'unknown_branch_name',
                                           None,
                                           limit=100)

    @given(unknown_content())
    def test_lookup_content_raw_not_found(self, unknown_content):

        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_content_raw('sha1:' + unknown_content['sha1'])

        self.assertIn(
            cm.exception.args[0],
            'Content with %s checksum equals to %s not found!' %
            ('sha1', unknown_content['sha1']))

    @given(content())
    def test_lookup_content_raw(self, content):

        actual_content = service.lookup_content_raw('sha256:%s' %
                                                    content['sha256'])

        expected_content = self.content_get(content['sha1'])

        self.assertEqual(actual_content, expected_content)

    @given(unknown_content())
    def test_lookup_content_not_found(self, unknown_content):

        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_content('sha1:%s' % unknown_content['sha1'])

        self.assertIn(
            cm.exception.args[0],
            'Content with %s checksum equals to %s not found!' %
            ('sha1', unknown_content['sha1']))

    @given(content())
    def test_lookup_content_with_sha1(self, content):

        actual_content = service.lookup_content('sha1:%s' % content['sha1'])

        expected_content = self.content_get_metadata(content['sha1'])

        self.assertEqual(actual_content, expected_content)

    @given(content())
    def test_lookup_content_with_sha256(self, content):

        actual_content = service.lookup_content('sha256:%s' %
                                                content['sha256'])

        expected_content = self.content_get_metadata(content['sha1'])

        self.assertEqual(actual_content, expected_content)

    @given(revision())
    def test_lookup_person(self, revision):

        rev_data = self.revision_get(revision)

        actual_person = service.lookup_person(rev_data['author']['id'])

        self.assertEqual(actual_person, rev_data['author'])

    def test_lookup_directory_bad_checksum(self):

        with self.assertRaises(BadInputExc):
            service.lookup_directory('directory_id')

    @given(unknown_directory())
    def test_lookup_directory_not_found(self, unknown_directory):

        with self.assertRaises(NotFoundExc) as cm:
            service.lookup_directory(unknown_directory)

        self.assertIn(
            'Directory with sha1_git %s not found' % unknown_directory,
            cm.exception.args[0])

    @given(directory())
    def test_lookup_directory(self, directory):

        actual_directory_ls = list(service.lookup_directory(directory))

        expected_directory_ls = self.directory_ls(directory)

        self.assertEqual(actual_directory_ls, expected_directory_ls)

    @given(empty_directory())
    def test_lookup_directory_empty(self, empty_directory):

        actual_directory_ls = list(service.lookup_directory(empty_directory))

        self.assertEqual(actual_directory_ls, [])

    @given(origin())
    def test_lookup_revision_by_nothing_found(self, origin):

        with self.assertRaises(NotFoundExc):
            service.lookup_revision_by(origin['id'], 'invalid-branch-name')

    @given(origin())
    def test_lookup_revision_by(self, origin):

        branches = self._get_origin_branches(origin)
        branch_name = random.choice(list(branches.keys()))

        actual_revision =  \
            service.lookup_revision_by(origin['id'], branch_name, None)

        expected_revision = \
            self.revision_get(branches[branch_name]['target'])

        self.assertEqual(actual_revision, expected_revision)

    @given(origin(), revision())
    def test_lookup_revision_with_context_by_ko(self, origin, revision):

        with self.assertRaises(NotFoundExc):
            service.lookup_revision_with_context_by(origin['id'],
                                                    'invalid-branch-name',
                                                    None, revision)

    @given(origin())
    def test_lookup_revision_with_context_by(self, origin):

        branches = self._get_origin_branches(origin)
        branch_name = random.choice(list(branches.keys()))

        root_rev = branches[branch_name]['target']
        root_rev_log = self.revision_log(root_rev)

        children = defaultdict(list)

        for rev in root_rev_log:
            for rev_p in rev['parents']:
                children[rev_p].append(rev['id'])

        rev = root_rev_log[-1]['id']

        actual_root_rev, actual_rev = service.lookup_revision_with_context_by(
            origin['id'], branch_name, None, rev)

        expected_root_rev = self.revision_get(root_rev)
        expected_rev = self.revision_get(rev)
        expected_rev['children'] = children[rev]

        self.assertEqual(actual_root_rev, expected_root_rev)
        self.assertEqual(actual_rev, expected_rev)

    def test_lookup_revision_through_ko_not_implemented(self):

        with self.assertRaises(NotImplementedError):
            service.lookup_revision_through({
                'something-unknown': 10,
            })

    @given(origin())
    def test_lookup_revision_through_with_context_by(self, origin):

        branches = self._get_origin_branches(origin)
        branch_name = random.choice(list(branches.keys()))

        root_rev = branches[branch_name]['target']
        root_rev_log = self.revision_log(root_rev)
        rev = root_rev_log[-1]['id']

        self.assertEqual(
            service.lookup_revision_through({
                'origin_id': origin['id'],
                'branch_name': branch_name,
                'ts': None,
                'sha1_git': rev
            }),
            service.lookup_revision_with_context_by(origin['id'], branch_name,
                                                    None, rev))

    @given(origin())
    def test_lookup_revision_through_with_revision_by(self, origin):

        branches = self._get_origin_branches(origin)
        branch_name = random.choice(list(branches.keys()))

        self.assertEqual(
            service.lookup_revision_through({
                'origin_id': origin['id'],
                'branch_name': branch_name,
                'ts': None,
            }), service.lookup_revision_by(origin['id'], branch_name, None))

    @given(ancestor_revisions())
    def test_lookup_revision_through_with_context(self, ancestor_revisions):

        sha1_git = ancestor_revisions['sha1_git']
        sha1_git_root = ancestor_revisions['sha1_git_root']

        self.assertEqual(
            service.lookup_revision_through({
                'sha1_git_root': sha1_git_root,
                'sha1_git': sha1_git,
            }), service.lookup_revision_with_context(sha1_git_root, sha1_git))

    @given(revision())
    def test_lookup_revision_through_with_revision(self, revision):

        self.assertEqual(
            service.lookup_revision_through({'sha1_git': revision}),
            service.lookup_revision(revision))

    @given(revision())
    def test_lookup_directory_through_revision_ko_not_found(self, revision):

        with self.assertRaises(NotFoundExc):
            service.lookup_directory_through_revision({'sha1_git': revision},
                                                      'some/invalid/path')

    @given(revision())
    def test_lookup_directory_through_revision_ok(self, revision):

        revision_data = self.revision_get(revision)
        dir_entries = [
            e for e in self.directory_ls(revision_data['directory'])
            if e['type'] == 'file'
        ]
        dir_entry = random.choice(dir_entries)

        self.assertEqual(
            service.lookup_directory_through_revision({'sha1_git': revision},
                                                      dir_entry['name']),
            (revision,
             service.lookup_directory_with_revision(revision,
                                                    dir_entry['name'])))

    @given(revision())
    def test_lookup_directory_through_revision_ok_with_data(self, revision):

        revision_data = self.revision_get(revision)
        dir_entries = [
            e for e in self.directory_ls(revision_data['directory'])
            if e['type'] == 'file'
        ]
        dir_entry = random.choice(dir_entries)

        self.assertEqual(
            service.lookup_directory_through_revision({'sha1_git': revision},
                                                      dir_entry['name'],
                                                      with_data=True),
            (revision,
             service.lookup_directory_with_revision(
                 revision, dir_entry['name'], with_data=True)))

    @given(new_origins(20))
    def test_lookup_origins(self, new_origins):

        nb_origins = len(new_origins)
        expected_origins = self.storage.origin_add(new_origins)

        origin_from_idx = random.randint(1, nb_origins - 1) - 1
        origin_from = expected_origins[origin_from_idx]['id']
        max_origin_idx = expected_origins[-1]['id']
        origin_count = random.randint(1, max_origin_idx - origin_from)

        actual_origins = list(service.lookup_origins(origin_from,
                                                     origin_count))
        expected_origins = list(
            self.storage.origin_get_range(origin_from, origin_count))

        self.assertEqual(actual_origins, expected_origins)
コード例 #12
0
    content,
    directory,
    origin,
    release,
    revision,
    snapshot,
    unknown_content,
    unknown_directory,
    unknown_release,
    unknown_revision,
    unknown_snapshot,
)
from swh.web.tests.utils import check_api_get_responses, check_api_post_responses


@given(origin(), content(), directory(), release(), revision(), snapshot())
def test_swhid_resolve_success(
    api_client, client, origin, content, directory, release, revision, snapshot
):

    for obj_type, obj_id in (
        (CONTENT, content["sha1_git"]),
        (DIRECTORY, directory),
        (RELEASE, release),
        (REVISION, revision),
        (SNAPSHOT, snapshot),
    ):

        swhid = gen_swhid(obj_type, obj_id, metadata={"origin": origin["url"]})
        url = reverse("api-1-resolve-swhid", url_args={"swhid": swhid})