def test_lookup_directory_with_path_found(archive_data, directory): directory_content = archive_data.directory_ls(directory) directory_entry = random.choice(directory_content) path = directory_entry["name"] actual_result = archive.lookup_directory_with_path(directory, path) assert actual_result == directory_entry @given(release()) def test_lookup_release(archive_data, release): actual_release = archive.lookup_release(release) assert actual_release == archive_data.release_get(release) @given(revision(), invalid_sha1(), sha256()) def test_lookup_revision_with_context_ko_not_a_sha1(revision, invalid_sha1, sha256): sha1_git_root = revision sha1_git = invalid_sha1 with pytest.raises(BadInputExc) as e: archive.lookup_revision_with_context(sha1_git_root, sha1_git) assert e.match("Invalid checksum query string") sha1_git = sha256 with pytest.raises(BadInputExc) as e: archive.lookup_revision_with_context(sha1_git_root, sha1_git) assert e.match("Only sha1_git is supported")
content_data["filetype_url"] = reverse("api-1-content-filetype", url_args={"q": query_string}, request=request) content_data["language_url"] = reverse("api-1-content-language", url_args={"q": query_string}, request=request) content_data["license_url"] = reverse("api-1-content-license", url_args={"q": query_string}, request=request) assert enriched_content == content_data @given(revision()) def test_enrich_revision_without_children_or_parent(api_request_factory, archive_data, revision): revision_data = archive_data.revision_get(revision) del revision_data["parents"] url = reverse("api-1-revision", url_args={"sha1_git": revision}) request = api_request_factory.get(url) actual_revision = utils.enrich_revision(revision_data, request) revision_data["url"] = reverse("api-1-revision", url_args={"sha1_git": revision}, request=request)
class SwhBrowseIdTest(WebTestCase): @given(content()) def test_content_id_browse(self, content): cnt_sha1_git = content['sha1_git'] swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git url = reverse('browse-swh-id', url_args={'swh_id': swh_id}) query_string = 'sha1_git:' + cnt_sha1_git content_browse_url = reverse('browse-content', url_args={'query_string': query_string}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) self.assertEqual(resp['location'], content_browse_url) @given(directory()) def test_directory_id_browse(self, directory): swh_id = swh_id_prefix + 'dir:' + directory url = reverse('browse-swh-id', url_args={'swh_id': swh_id}) directory_browse_url = reverse('browse-directory', url_args={'sha1_git': directory}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) self.assertEqual(resp['location'], directory_browse_url) @given(revision()) def test_revision_id_browse(self, revision): swh_id = swh_id_prefix + 'rev:' + revision url = reverse('browse-swh-id', url_args={'swh_id': swh_id}) revision_browse_url = reverse('browse-revision', url_args={'sha1_git': revision}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) self.assertEqual(resp['location'], revision_browse_url) query_params = {'origin_type': 'git', 'origin': 'https://github.com/user/repo'} url = reverse('browse-swh-id', url_args={'swh_id': swh_id}, query_params=query_params) revision_browse_url = reverse('browse-revision', url_args={'sha1_git': revision}, query_params=query_params) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) self.assertEqual(resp['location'], revision_browse_url) @given(release()) def test_release_id_browse(self, release): swh_id = swh_id_prefix + 'rel:' + release url = reverse('browse-swh-id', url_args={'swh_id': swh_id}) release_browse_url = reverse('browse-release', url_args={'sha1_git': release}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) self.assertEqual(resp['location'], release_browse_url) query_params = {'origin_type': 'git', 'origin': 'https://github.com/user/repo'} url = reverse('browse-swh-id', url_args={'swh_id': swh_id}, query_params=query_params) release_browse_url = reverse('browse-release', url_args={'sha1_git': release}, query_params=query_params) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) self.assertEqual(resp['location'], release_browse_url) @given(snapshot()) def test_snapshot_id_browse(self, snapshot): swh_id = swh_id_prefix + 'snp:' + snapshot url = reverse('browse-swh-id', url_args={'swh_id': swh_id}) snapshot_browse_url = reverse('browse-snapshot', url_args={'snapshot_id': snapshot}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) self.assertEqual(resp['location'], snapshot_browse_url) query_params = {'origin_type': 'git', 'origin': 'https://github.com/user/repo'} url = reverse('browse-swh-id', url_args={'swh_id': swh_id}, query_params=query_params) release_browse_url = reverse('browse-snapshot', url_args={'snapshot_id': snapshot}, query_params=query_params) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) self.assertEqual(resp['location'], release_browse_url) @given(release()) def test_bad_id_browse(self, release): swh_id = swh_id_prefix + 'foo:' + release url = reverse('browse-swh-id', url_args={'swh_id': swh_id}) resp = self.client.get(url) self.assertEqual(resp.status_code, 400) @given(content()) def test_content_id_optional_parts_browse(self, content): cnt_sha1_git = content['sha1_git'] optional_parts = ';lines=4-20;origin=https://github.com/user/repo' swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git + optional_parts url = reverse('browse-swh-id', url_args={'swh_id': swh_id}) query_string = 'sha1_git:' + cnt_sha1_git content_browse_url = reverse( 'browse-content', url_args={'query_string': query_string}, query_params={'origin': 'https://github.com/user/repo'}) content_browse_url += '#L4-L20' resp = self.client.get(url) self.assertEqual(resp.status_code, 302) self.assertEqual(resp['location'], content_browse_url)
@given(directory()) def test_directory_badge(client, directory): _test_badge_endpoints(client, "directory", directory) @given(origin()) def test_origin_badge(client, origin): _test_badge_endpoints(client, "origin", origin["url"]) @given(release()) def test_release_badge(client, release): _test_badge_endpoints(client, "release", release) @given(revision()) def test_revision_badge(client, revision): _test_badge_endpoints(client, "revision", revision) @given(snapshot()) def test_snapshot_badge(client, snapshot): _test_badge_endpoints(client, "snapshot", snapshot) @given( unknown_content(), unknown_directory(), new_origin(), unknown_release(), unknown_revision(),
from swh.web.common.identifiers import gen_swhid from swh.web.common.utils import format_utc_iso_date, parse_iso8601_date_to_utc, reverse from swh.web.tests.django_asserts import assert_contains, assert_not_contains from swh.web.tests.strategies import ( directory, new_origin, new_person, new_swh_date, origin, revision, unknown_revision, ) from swh.web.tests.utils import check_html_get_response @given(revision()) def test_revision_browse(client, archive_data, revision): _revision_browse_checks(client, archive_data, revision) @given(origin()) def test_revision_origin_snapshot_browse(client, archive_data, origin): snapshot = archive_data.snapshot_get_latest(origin["url"]) revision = archive_data.snapshot_get_head(snapshot) _revision_browse_checks(client, archive_data, revision, origin_url=origin["url"]) _revision_browse_checks(client, archive_data, revision, snapshot=snapshot) _revision_browse_checks(
class RevisionApiTestCase(WebTestCase, APITestCase): @given(revision()) def test_api_revision(self, revision): url = reverse('api-revision', url_args={'sha1_git': revision}) rv = self.client.get(url) expected_revision = self.revision_get(revision) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) @given(unknown_revision()) def test_api_revision_not_found(self, unknown_revision): url = reverse('api-revision', url_args={'sha1_git': unknown_revision}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual( rv.data, { 'exception': 'NotFoundExc', 'reason': 'Revision with sha1_git %s not found.' % unknown_revision }) @given(revision()) def test_api_revision_raw_ok(self, revision): url = reverse('api-revision-raw-message', url_args={'sha1_git': revision}) rv = self.client.get(url) expected_message = self.revision_get(revision)['message'] self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/octet-stream') self.assertEqual(rv.content, expected_message.encode()) @given(new_revision()) def test_api_revision_raw_ok_no_msg(self, new_revision): del new_revision['message'] self.storage.revision_add([new_revision]) new_revision_id = hash_to_hex(new_revision['id']) url = reverse('api-revision-raw-message', url_args={'sha1_git': new_revision_id}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual( rv.data, { 'exception': 'NotFoundExc', 'reason': 'No message for revision with sha1_git %s.' % new_revision_id }) @given(unknown_revision()) def test_api_revision_raw_ko_no_rev(self, unknown_revision): url = reverse('api-revision-raw-message', url_args={'sha1_git': unknown_revision}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual( rv.data, { 'exception': 'NotFoundExc', 'reason': 'Revision with sha1_git %s not found.' % unknown_revision }) @given(unknown_origin_id()) def test_api_revision_with_origin_not_found(self, unknown_origin_id): url = reverse('api-revision-origin', url_args={'origin_id': unknown_origin_id}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual( rv.data, { 'exception': 'NotFoundExc', 'reason': 'Origin with id %s not found!' % unknown_origin_id }) @given(origin()) def test_api_revision_with_origin(self, origin): url = reverse('api-revision-origin', url_args={'origin_id': origin['id']}) rv = self.client.get(url) snapshot = self.snapshot_get_latest(origin['id']) expected_revision = self.revision_get( snapshot['branches']['HEAD']['target']) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) @given(origin()) def test_api_revision_with_origin_and_branch_name(self, origin): snapshot = self.snapshot_get_latest(origin['id']) branch_name = random.choice( list(b for b in snapshot['branches'].keys() if snapshot['branches'][b]['target_type'] == 'revision')) url = reverse('api-revision-origin', url_args={ 'origin_id': origin['id'], 'branch_name': branch_name }) rv = self.client.get(url) expected_revision = self.revision_get( snapshot['branches'][branch_name]['target']) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) @given(origin_with_multiple_visits()) def test_api_revision_with_origin_and_branch_name_and_ts(self, origin): visit = random.choice(self.origin_visit_get(origin['id'])) snapshot = self.snapshot_get(visit['snapshot']) branch_name = random.choice( list(b for b in snapshot['branches'].keys() if snapshot['branches'][b]['target_type'] == 'revision')) url = reverse('api-revision-origin', url_args={ 'origin_id': origin['id'], 'branch_name': branch_name, 'ts': visit['date'] }) rv = self.client.get(url) expected_revision = self.revision_get( snapshot['branches'][branch_name]['target']) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) @given(origin_with_multiple_visits()) def test_api_revision_with_origin_and_branch_name_and_ts_escapes( self, origin): visit = random.choice(self.origin_visit_get(origin['id'])) snapshot = self.snapshot_get(visit['snapshot']) branch_name = random.choice( list(b for b in snapshot['branches'].keys() if snapshot['branches'][b]['target_type'] == 'revision')) date = parse_timestamp(visit['date']) formatted_date = date.strftime('Today is %B %d, %Y at %X') url = reverse('api-revision-origin', url_args={ 'origin_id': origin['id'], 'branch_name': branch_name, 'ts': formatted_date }) rv = self.client.get(url) expected_revision = self.revision_get( snapshot['branches'][branch_name]['target']) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) @given(unknown_origin_id()) def test_api_directory_through_revision_origin_ko(self, unknown_origin_id): url = reverse('api-revision-origin-directory', url_args={'origin_id': unknown_origin_id}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual( rv.data, { 'exception': 'NotFoundExc', 'reason': 'Origin with id %s not found!' % unknown_origin_id }) @given(origin()) def test_api_directory_through_revision_origin(self, origin): url = reverse('api-revision-origin-directory', url_args={'origin_id': origin['id']}) rv = self.client.get(url) snapshot = self.snapshot_get_latest(origin['id']) revision_id = snapshot['branches']['HEAD']['target'] revision = self.revision_get(revision_id) directory = self.directory_ls(revision['directory']) for entry in directory: if entry['type'] == 'dir': entry['target_url'] = reverse( 'api-directory', url_args={'sha1_git': entry['target']}) entry['dir_url'] = reverse('api-revision-origin-directory', url_args={ 'origin_id': origin['id'], 'path': entry['name'] }) elif entry['type'] == 'file': entry['target_url'] = reverse( 'api-content', url_args={'q': 'sha1_git:%s' % entry['target']}) entry['file_url'] = reverse('api-revision-origin-directory', url_args={ 'origin_id': origin['id'], 'path': entry['name'] }) elif entry['type'] == 'rev': entry['target_url'] = reverse( 'api-revision', url_args={'sha1_git': entry['target']}) entry['rev_url'] = reverse('api-revision-origin-directory', url_args={ 'origin_id': origin['id'], 'path': entry['name'] }) expected_result = { 'content': directory, 'path': '.', 'revision': revision_id, 'type': 'dir' } self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_result) @given(revision()) def test_api_revision_log(self, revision): per_page = 10 url = reverse('api-revision-log', url_args={'sha1_git': revision}, query_params={'per_page': per_page}) rv = self.client.get(url) expected_log = self.revision_log(revision, limit=per_page + 1) expected_log = list(map(self._enrich_revision, expected_log)) has_next = len(expected_log) > per_page self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_log[:-1] if has_next else expected_log) if has_next: self.assertIn('Link', rv) next_log_url = reverse( 'api-revision-log', url_args={'sha1_git': expected_log[-1]['id']}, query_params={'per_page': per_page}) self.assertIn(next_log_url, rv['Link']) @given(unknown_revision()) def test_api_revision_log_not_found(self, unknown_revision): url = reverse('api-revision-log', url_args={'sha1_git': unknown_revision}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual( rv.data, { 'exception': 'NotFoundExc', 'reason': 'Revision with sha1_git %s not found.' % unknown_revision }) self.assertFalse(rv.has_header('Link')) @given(revision()) def test_api_revision_log_context(self, revision): revisions = self.revision_log(revision, limit=4) prev_rev = revisions[0]['id'] rev = revisions[-1]['id'] per_page = 10 url = reverse('api-revision-log', url_args={ 'sha1_git': rev, 'prev_sha1s': prev_rev }, query_params={'per_page': per_page}) rv = self.client.get(url) expected_log = self.revision_log(rev, limit=per_page) prev_revision = self.revision_get(prev_rev) expected_log.insert(0, prev_revision) expected_log = list(map(self._enrich_revision, expected_log)) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_log) @given(origin()) def test_api_revision_log_by(self, origin): per_page = 10 url = reverse('api-revision-origin-log', url_args={'origin_id': origin['id']}, query_params={'per_page': per_page}) rv = self.client.get(url) snapshot = self.snapshot_get_latest(origin['id']) expected_log = self.revision_log( snapshot['branches']['HEAD']['target'], limit=per_page + 1) expected_log = list(map(self._enrich_revision, expected_log)) has_next = len(expected_log) > per_page self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_log[:-1] if has_next else expected_log) if has_next: self.assertIn('Link', rv) next_log_url = reverse('api-revision-origin-log', url_args={ 'origin_id': origin['id'], 'branch_name': 'HEAD' }, query_params={ 'per_page': per_page, 'sha1_git': expected_log[-1]['id'] }) self.assertIn(next_log_url, rv['Link']) @given(origin()) def test_api_revision_log_by_ko(self, origin): invalid_branch_name = 'foobar' url = reverse('api-revision-origin-log', url_args={ 'origin_id': origin['id'], 'branch_name': invalid_branch_name }) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertFalse(rv.has_header('Link')) self.assertEqual( rv.data, { 'exception': 'NotFoundExc', 'reason': 'Revision for origin %s and branch %s not found.' % (origin['id'], invalid_branch_name) }) @patch('swh.web.api.views.revision._revision_directory_by') def test_api_revision_directory_ko_not_found(self, mock_rev_dir): # given mock_rev_dir.side_effect = NotFoundExc('Not found') # then rv = self.client.get('/api/1/revision/999/directory/some/path/to/dir/') self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Not found' }) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path/to/dir', '/api/1/revision/999/directory/some/path/to/dir/', with_data=False) @patch('swh.web.api.views.revision._revision_directory_by') def test_api_revision_directory_ok_returns_dir_entries(self, mock_rev_dir): stub_dir = { 'type': 'dir', 'revision': '999', 'content': [{ 'sha1_git': '789', 'type': 'file', 'target': '101', 'target_url': '/api/1/content/sha1_git:101/', 'name': 'somefile', 'file_url': '/api/1/revision/999/directory/some/path/' 'somefile/' }, { 'sha1_git': '123', 'type': 'dir', 'target': '456', 'target_url': '/api/1/directory/456/', 'name': 'to-subdir', 'dir_url': '/api/1/revision/999/directory/some/path/' 'to-subdir/', }] } # given mock_rev_dir.return_value = stub_dir # then rv = self.client.get('/api/1/revision/999/directory/some/path/') self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, stub_dir) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path', '/api/1/revision/999/directory/some/path/', with_data=False) @patch('swh.web.api.views.revision._revision_directory_by') def test_api_revision_directory_ok_returns_content(self, mock_rev_dir): stub_content = { 'type': 'file', 'revision': '999', 'content': { 'sha1_git': '789', 'sha1': '101', 'data_url': '/api/1/content/101/raw/', } } # given mock_rev_dir.return_value = stub_content # then url = '/api/1/revision/666/directory/some/other/path/' rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, stub_content) mock_rev_dir.assert_called_once_with({'sha1_git': '666'}, 'some/other/path', url, with_data=False) def _enrich_revision(self, revision): author_url = reverse('api-person', url_args={'person_id': revision['author']['id']}) committer_url = reverse( 'api-person', url_args={'person_id': revision['committer']['id']}) directory_url = reverse('api-directory', url_args={'sha1_git': revision['directory']}) history_url = reverse('api-revision-log', url_args={'sha1_git': revision['id']}) parents_id_url = [] for p in revision['parents']: parents_id_url.append({ 'id': p, 'url': reverse('api-revision', url_args={'sha1_git': p}) }) revision_url = reverse('api-revision', url_args={'sha1_git': revision['id']}) revision['author_url'] = author_url revision['committer_url'] = committer_url revision['directory_url'] = directory_url revision['history_url'] = history_url revision['url'] = revision_url revision['parents'] = parents_id_url return revision @given(revision()) def test_api_revision_uppercase(self, revision): url = reverse('api-revision-uppercase-checksum', url_args={'sha1_git': revision.upper()}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) redirect_url = reverse('api-revision', url_args={'sha1_git': revision}) self.assertEqual(resp['location'], redirect_url)
from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.model import ( Directory, DirectoryEntry, Revision, RevisionType, TimestampWithTimezone, ) from swh.web.api.utils import enrich_content, enrich_directory_entry, enrich_revision from swh.web.common.utils import reverse from swh.web.tests.data import random_sha1 from swh.web.tests.strategies import content, new_person, new_swh_date, revision from swh.web.tests.utils import check_api_get_responses, check_http_get_response @given(revision()) def test_api_revision(api_client, archive_data, revision): url = reverse("api-1-revision", url_args={"sha1_git": revision}) rv = check_api_get_responses(api_client, url, status_code=200) expected_revision = archive_data.revision_get(revision) enrich_revision(expected_revision, rv.wsgi_request) assert rv.data == expected_revision def test_api_revision_not_found(api_client): unknown_revision_ = random_sha1() url = reverse("api-1-revision", url_args={"sha1_git": unknown_revision_})
class SwhBrowseRevisionTest(WebTestCase): @given(revision()) def test_revision_browse(self, revision): url = reverse('browse-revision', url_args={'sha1_git': revision}) revision_data = self.revision_get(revision) author_id = revision_data['author']['id'] author_name = revision_data['author']['name'] committer_id = revision_data['committer']['id'] committer_name = revision_data['committer']['name'] dir_id = revision_data['directory'] author_url = reverse('browse-person', url_args={'person_id': author_id}) committer_url = reverse('browse-person', url_args={'person_id': committer_id}) directory_url = reverse('browse-directory', url_args={'sha1_git': dir_id}) history_url = reverse('browse-revision-log', url_args={'sha1_git': revision}) resp = self.client.get(url) self.assertEqual(resp.status_code, 200) self.assertTemplateUsed('browse/revision.html') self.assertContains(resp, '<a href="%s">%s</a>' % (author_url, author_name)) self.assertContains(resp, '<a href="%s">%s</a>' % (committer_url, committer_name)) self.assertContains(resp, directory_url) self.assertContains(resp, history_url) for parent in revision_data['parents']: parent_url = reverse('browse-revision', url_args={'sha1_git': parent}) self.assertContains(resp, '<a href="%s">%s</a>' % (parent_url, parent)) author_date = revision_data['date'] committer_date = revision_data['committer_date'] message_lines = revision_data['message'].split('\n') self.assertContains(resp, format_utc_iso_date(author_date)) self.assertContains(resp, format_utc_iso_date(committer_date)) self.assertContains(resp, escape(message_lines[0])) self.assertContains(resp, escape('\n'.join(message_lines[1:]))) @given(origin()) def test_revision_origin_browse(self, origin): snapshot = self.snapshot_get_latest(origin['id']) revision = snapshot['branches']['HEAD']['target'] revision_data = self.revision_get(revision) dir_id = revision_data['directory'] origin_directory_url = reverse('browse-origin-directory', url_args={'origin_url': origin['url']}, query_params={'revision': revision}) origin_revision_log_url = reverse('browse-origin-log', url_args={'origin_url': origin['url']}, # noqa query_params={'revision': revision}) url = reverse('browse-revision', url_args={'sha1_git': revision}, query_params={'origin': origin['url']}) resp = self.client.get(url) self.assertContains(resp, origin_directory_url) self.assertContains(resp, origin_revision_log_url) for parent in revision_data['parents']: parent_url = reverse('browse-revision', url_args={'sha1_git': parent}, query_params={'origin': origin['url']}) self.assertContains(resp, '<a href="%s">%s</a>' % (parent_url, parent)) self.assertContains(resp, 'vault-cook-directory') self.assertContains(resp, 'vault-cook-revision') swh_rev_id = get_swh_persistent_id('revision', revision) swh_rev_id_url = reverse('browse-swh-id', url_args={'swh_id': swh_rev_id}) self.assertContains(resp, swh_rev_id) self.assertContains(resp, swh_rev_id_url) swh_dir_id = get_swh_persistent_id('directory', dir_id) swh_dir_id_url = reverse('browse-swh-id', url_args={'swh_id': swh_dir_id}) self.assertContains(resp, swh_dir_id) self.assertContains(resp, swh_dir_id_url) self.assertContains(resp, 'swh-take-new-snapshot') @given(revision()) def test_revision_log_browse(self, revision): per_page = 10 revision_log = self.revision_log(revision) revision_log_sorted = \ sorted(revision_log, key=lambda rev: -parse_timestamp( rev['committer_date']).timestamp()) url = reverse('browse-revision-log', url_args={'sha1_git': revision}, query_params={'per_page': per_page}) resp = self.client.get(url) next_page_url = reverse('browse-revision-log', url_args={'sha1_git': revision}, query_params={'offset': per_page, 'per_page': per_page}) nb_log_entries = per_page if len(revision_log_sorted) < per_page: nb_log_entries = len(revision_log_sorted) self.assertEqual(resp.status_code, 200) self.assertTemplateUsed('browse/revision-log.html') self.assertContains(resp, '<tr class="swh-revision-log-entry', count=nb_log_entries) self.assertContains(resp, '<a class="page-link">Newer</a>') if len(revision_log_sorted) > per_page: self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa escape(next_page_url)) for log in revision_log_sorted[:per_page]: revision_url = reverse('browse-revision', url_args={'sha1_git': log['id']}) self.assertContains(resp, log['id'][:7]) self.assertContains(resp, log['author']['name']) self.assertContains(resp, format_utc_iso_date(log['date'])) self.assertContains(resp, escape(log['message'])) self.assertContains(resp, format_utc_iso_date(log['committer_date'])) # noqa self.assertContains(resp, revision_url) if len(revision_log_sorted) <= per_page: return resp = self.client.get(next_page_url) prev_page_url = reverse('browse-revision-log', url_args={'sha1_git': revision}, query_params={'per_page': per_page}) next_page_url = reverse('browse-revision-log', url_args={'sha1_git': revision}, query_params={'offset': 2 * per_page, 'per_page': per_page}) nb_log_entries = len(revision_log_sorted) - per_page if nb_log_entries > per_page: nb_log_entries = per_page self.assertEqual(resp.status_code, 200) self.assertTemplateUsed('browse/revision-log.html') self.assertContains(resp, '<tr class="swh-revision-log-entry', count=nb_log_entries) self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' % escape(prev_page_url)) if len(revision_log_sorted) > 2 * per_page: self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa escape(next_page_url)) if len(revision_log_sorted) <= 2 * per_page: return resp = self.client.get(next_page_url) prev_page_url = reverse('browse-revision-log', url_args={'sha1_git': revision}, query_params={'offset': per_page, 'per_page': per_page}) next_page_url = reverse('browse-revision-log', url_args={'sha1_git': revision}, query_params={'offset': 3 * per_page, 'per_page': per_page}) nb_log_entries = len(revision_log_sorted) - 2 * per_page if nb_log_entries > per_page: nb_log_entries = per_page self.assertEqual(resp.status_code, 200) self.assertTemplateUsed('browse/revision-log.html') self.assertContains(resp, '<tr class="swh-revision-log-entry', count=nb_log_entries) self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' % escape(prev_page_url)) if len(revision_log_sorted) > 3 * per_page: self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa escape(next_page_url)) @given(revision(), unknown_revision(), new_origin()) def test_revision_request_errors(self, revision, unknown_revision, new_origin): url = reverse('browse-revision', url_args={'sha1_git': unknown_revision}) resp = self.client.get(url) self.assertEqual(resp.status_code, 404) self.assertTemplateUsed('error.html') self.assertContains(resp, 'Revision with sha1_git %s not found' % unknown_revision, status_code=404) url = reverse('browse-revision', url_args={'sha1_git': revision}, query_params={'origin_type': new_origin['type'], 'origin': new_origin['url']}) resp = self.client.get(url) self.assertEqual(resp.status_code, 404) self.assertTemplateUsed('error.html') self.assertContains(resp, 'the origin mentioned in your request' ' appears broken', status_code=404) @given(revision()) def test_revision_uppercase(self, revision): url = reverse('browse-revision-uppercase-checksum', url_args={'sha1_git': revision.upper()}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) redirect_url = reverse('browse-revision', url_args={'sha1_git': revision}) self.assertEqual(resp['location'], redirect_url)
assert actual_data == {"a": "some-data"} mock_ffk.assert_called_once_with(input_data, {"a", "c"}) def test_error_response_handler(mocker, api_client): mock_archive = mocker.patch("swh.web.api.views.stat.archive") mock_archive.stat_counters.side_effect = Exception("Something went wrong") url = reverse("api-1-stat-counters") resp = api_client.get(url) assert resp.status_code == 500 assert "traceback" in resp.data assert "Traceback" in resp.data["traceback"] @given(content(), directory(), revision()) def test_api_endpoints_have_cors_headers(client, content, directory, revision): url = reverse("api-1-stat-counters") resp = check_http_get_response(client, url, status_code=200, http_origin="https://example.org") assert ACCESS_CONTROL_ALLOW_ORIGIN in resp swhids = [ gen_swhid(CONTENT, content["sha1_git"]), gen_swhid(DIRECTORY, directory), gen_swhid(REVISION, revision), ] url = reverse("api-1-known")
from swh.web.common.utils import reverse from swh.web.tests.strategies import ( directory, revision, unknown_directory, unknown_revision, ) from swh.web.tests.utils import ( check_api_get_responses, check_api_post_responses, check_http_get_response, check_http_post_response, ) @given(directory(), revision()) def test_api_vault_cook(api_client, mocker, directory, revision): mock_archive = mocker.patch("swh.web.api.views.vault.archive") for obj_type, obj_id in ( ("directory", directory), ("revision_gitfast", revision), ): fetch_url = reverse( f"api-1-vault-fetch-{obj_type}", url_args={f"{obj_type[:3]}_id": obj_id}, ) stub_cook = { "type": obj_type, "progress_msg": None,
metadata={"origin": "test"}) == expected_swhid + ";origin=test") assert (gen_swhid(swh_object_type, sha1_git, metadata={"origin": None}) == expected_swhid) with pytest.raises(BadInputExc) as e: gen_swhid("foo", sha1_git) assert e.match("Invalid object") with pytest.raises(BadInputExc) as e: gen_swhid(swh_object_type, "not a valid id") assert e.match("Invalid object") @given(content(), directory(), release(), revision(), snapshot()) def test_resolve_swhid_legacy(content, directory, release, revision, snapshot): for obj_type, obj_id in ( (CONTENT, content["sha1_git"]), (DIRECTORY, directory), (RELEASE, release), (REVISION, revision), (SNAPSHOT, snapshot), ): swhid = gen_swhid(obj_type, obj_id) url_args = {} if obj_type == CONTENT: url_args["query_string"] = f"sha1_git:{obj_id}" elif obj_type == SNAPSHOT:
class SwhIdsApiTestCase(WebTestCase, APITestCase): @given(origin(), content(), directory(), release(), revision(), snapshot()) def test_swh_id_resolve_success(self, origin, content, directory, release, revision, snapshot): for obj_type_short, obj_type, obj_id in (('cnt', CONTENT, content['sha1_git']), ('dir', DIRECTORY, directory), ('rel', RELEASE, release), ('rev', REVISION, revision), ('snp', SNAPSHOT, snapshot)): swh_id = 'swh:1:%s:%s;origin=%s' % (obj_type_short, obj_id, origin['url']) url = reverse('api-resolve-swh-pid', url_args={'swh_id': swh_id}) resp = self.client.get(url) if obj_type == CONTENT: url_args = {'query_string': 'sha1_git:%s' % obj_id} elif obj_type == SNAPSHOT: url_args = {'snapshot_id': obj_id} else: url_args = {'sha1_git': obj_id} browse_rev_url = reverse('browse-%s' % obj_type, url_args=url_args, query_params={'origin': origin['url']}) expected_result = { 'browse_url': browse_rev_url, 'metadata': { 'origin': origin['url'] }, 'namespace': 'swh', 'object_id': obj_id, 'object_type': obj_type, 'scheme_version': 1 } self.assertEqual(resp.status_code, 200) self.assertEqual(resp.data, expected_result) def test_swh_id_resolve_invalid(self): rev_id_invalid = '96db9023b8_foo_50d6c108e9a3' swh_id = 'swh:1:rev:%s' % rev_id_invalid url = reverse('api-resolve-swh-pid', url_args={'swh_id': swh_id}) resp = self.client.get(url) self.assertEqual(resp.status_code, 400) @given(unknown_content(), unknown_directory(), unknown_release(), unknown_revision(), unknown_snapshot()) def test_swh_id_resolve_not_found(self, unknown_content, unknown_directory, unknown_release, unknown_revision, unknown_snapshot): for obj_type_short, obj_id in (('cnt', unknown_content['sha1_git']), ('dir', unknown_directory), ('rel', unknown_release), ('rev', unknown_revision), ('snp', unknown_snapshot)): swh_id = 'swh:1:%s:%s' % (obj_type_short, obj_id) url = reverse('api-resolve-swh-pid', url_args={'swh_id': swh_id}) resp = self.client.get(url) self.assertEqual(resp.status_code, 404)
class ServiceTestCase(WebTestCase): @given(contents()) def test_lookup_multiple_hashes_all_present(self, contents): input_data = [] expected_output = [] for cnt in contents: input_data.append({'sha1': cnt['sha1']}) expected_output.append({'sha1': cnt['sha1'], 'found': True}) self.assertEqual(service.lookup_multiple_hashes(input_data), expected_output) @given(contents(), unknown_contents()) def test_lookup_multiple_hashes_some_missing(self, contents, unknown_contents): input_contents = list(itertools.chain(contents, unknown_contents)) random.shuffle(input_contents) input_data = [] expected_output = [] for cnt in input_contents: input_data.append({'sha1': cnt['sha1']}) expected_output.append({ 'sha1': cnt['sha1'], 'found': cnt in contents }) self.assertEqual(service.lookup_multiple_hashes(input_data), expected_output) @given(unknown_content()) def test_lookup_hash_does_not_exist(self, unknown_content): actual_lookup = service.lookup_hash('sha1_git:%s' % unknown_content['sha1_git']) self.assertEqual(actual_lookup, {'found': None, 'algo': 'sha1_git'}) @given(content()) def test_lookup_hash_exist(self, content): actual_lookup = service.lookup_hash('sha1:%s' % content['sha1']) content_metadata = self.content_get_metadata(content['sha1']) self.assertEqual({ 'found': content_metadata, 'algo': 'sha1' }, actual_lookup) @given(unknown_content()) def test_search_hash_does_not_exist(self, content): actual_lookup = service.search_hash('sha1_git:%s' % content['sha1_git']) self.assertEqual({'found': False}, actual_lookup) @given(content()) def test_search_hash_exist(self, content): actual_lookup = service.search_hash('sha1:%s' % content['sha1']) self.assertEqual({'found': True}, actual_lookup) @pytest.mark.skipif(ctags_json_missing, reason="requires ctags with json output support") @given(contents_with_ctags()) def test_lookup_content_ctags(self, contents_with_ctags): content_sha1 = random.choice(contents_with_ctags['sha1s']) self.content_add_ctags(content_sha1) actual_ctags = \ list(service.lookup_content_ctags('sha1:%s' % content_sha1)) expected_data = list(self.content_get_ctags(content_sha1)) for ctag in expected_data: ctag['id'] = content_sha1 self.assertEqual(actual_ctags, expected_data) @given(unknown_content()) def test_lookup_content_ctags_no_hash(self, unknown_content): actual_ctags = \ list(service.lookup_content_ctags('sha1:%s' % unknown_content['sha1'])) self.assertEqual(actual_ctags, []) @given(content()) def test_lookup_content_filetype(self, content): self.content_add_mimetype(content['sha1']) actual_filetype = service.lookup_content_filetype(content['sha1']) expected_filetype = self.content_get_mimetype(content['sha1']) self.assertEqual(actual_filetype, expected_filetype) @given(content()) def test_lookup_content_language(self, content): self.content_add_language(content['sha1']) actual_language = service.lookup_content_language(content['sha1']) expected_language = self.content_get_language(content['sha1']) self.assertEqual(actual_language, expected_language) @given(contents_with_ctags()) def test_lookup_expression(self, contents_with_ctags): per_page = 10 expected_ctags = [] for content_sha1 in contents_with_ctags['sha1s']: if len(expected_ctags) == per_page: break self.content_add_ctags(content_sha1) for ctag in self.content_get_ctags(content_sha1): if len(expected_ctags) == per_page: break if ctag['name'] == contents_with_ctags['symbol_name']: del ctag['id'] ctag['sha1'] = content_sha1 expected_ctags.append(ctag) actual_ctags = \ list(service.lookup_expression(contents_with_ctags['symbol_name'], last_sha1=None, per_page=10)) self.assertEqual(actual_ctags, expected_ctags) def test_lookup_expression_no_result(self): expected_ctags = [] actual_ctags = \ list(service.lookup_expression('barfoo', last_sha1=None, per_page=10)) self.assertEqual(actual_ctags, expected_ctags) @pytest.mark.skipif(fossology_missing, reason="requires fossology-nomossa installed") @given(content()) def test_lookup_content_license(self, content): self.content_add_license(content['sha1']) actual_license = service.lookup_content_license(content['sha1']) expected_license = self.content_get_license(content['sha1']) self.assertEqual(actual_license, expected_license) def test_stat_counters(self): actual_stats = service.stat_counters() self.assertEqual(actual_stats, self.storage.stat_counters()) @given(new_origin(), visit_dates()) def test_lookup_origin_visits(self, new_origin, visit_dates): origin_id = self.storage.origin_add_one(new_origin) for ts in visit_dates: self.storage.origin_visit_add(origin_id, ts) actual_origin_visits = list( service.lookup_origin_visits(origin_id, per_page=100)) expected_visits = self.origin_visit_get(origin_id) self.assertEqual(actual_origin_visits, expected_visits) @given(new_origin(), visit_dates()) def test_lookup_origin_visit(self, new_origin, visit_dates): origin_id = self.storage.origin_add_one(new_origin) visits = [] for ts in visit_dates: visits.append(self.storage.origin_visit_add(origin_id, ts)) visit = random.choice(visits)['visit'] actual_origin_visit = service.lookup_origin_visit(origin_id, visit) expected_visit = dict( self.storage.origin_visit_get_by(origin_id, visit)) expected_visit['date'] = expected_visit['date'].isoformat() expected_visit['metadata'] = {} self.assertEqual(actual_origin_visit, expected_visit) @given(new_origin()) def test_lookup_origin(self, new_origin): origin_id = self.storage.origin_add_one(new_origin) actual_origin = service.lookup_origin({'id': origin_id}) expected_origin = self.storage.origin_get({'id': origin_id}) self.assertEqual(actual_origin, expected_origin) actual_origin = service.lookup_origin({ 'type': new_origin['type'], 'url': new_origin['url'] }) expected_origin = self.storage.origin_get({ 'type': new_origin['type'], 'url': new_origin['url'] }) self.assertEqual(actual_origin, expected_origin) @given(invalid_sha1()) def test_lookup_release_ko_id_checksum_not_a_sha1(self, invalid_sha1): with self.assertRaises(BadInputExc) as cm: service.lookup_release(invalid_sha1) self.assertIn('invalid checksum', cm.exception.args[0].lower()) @given(sha256()) def test_lookup_release_ko_id_checksum_too_long(self, sha256): with self.assertRaises(BadInputExc) as cm: service.lookup_release(sha256) self.assertEqual('Only sha1_git is supported.', cm.exception.args[0]) @given(directory()) def test_lookup_directory_with_path_not_found(self, directory): path = 'some/invalid/path/here' with self.assertRaises(NotFoundExc) as cm: service.lookup_directory_with_path(directory, path) self.assertEqual( 'Directory entry with path %s from %s ' 'not found' % (path, directory), cm.exception.args[0]) @given(directory()) def test_lookup_directory_with_path_found(self, directory): directory_content = self.directory_ls(directory) directory_entry = random.choice(directory_content) path = directory_entry['name'] actual_result = service.lookup_directory_with_path(directory, path) self.assertEqual(actual_result, directory_entry) @given(release()) def test_lookup_release(self, release): actual_release = service.lookup_release(release) self.assertEqual(actual_release, self.release_get(release)) @given(revision(), invalid_sha1(), sha256()) def test_lookup_revision_with_context_ko_not_a_sha1( self, revision, invalid_sha1, sha256): sha1_git_root = revision sha1_git = invalid_sha1 with self.assertRaises(BadInputExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Invalid checksum query string', cm.exception.args[0]) sha1_git = sha256 with self.assertRaises(BadInputExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Only sha1_git is supported', cm.exception.args[0]) @given(revision(), unknown_revision()) def test_lookup_revision_with_context_ko_sha1_git_does_not_exist( self, revision, unknown_revision): sha1_git_root = revision sha1_git = unknown_revision with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Revision %s not found' % sha1_git, cm.exception.args[0]) @given(revision(), unknown_revision()) def test_lookup_revision_with_context_ko_root_sha1_git_does_not_exist( self, revision, unknown_revision): sha1_git_root = unknown_revision sha1_git = revision with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_with_context(sha1_git_root, sha1_git) self.assertIn('Revision root %s not found' % sha1_git_root, cm.exception.args[0]) @given(ancestor_revisions()) def test_lookup_revision_with_context(self, ancestor_revisions): sha1_git = ancestor_revisions['sha1_git'] root_sha1_git = ancestor_revisions['sha1_git_root'] for sha1_git_root in (root_sha1_git, { 'id': hash_to_bytes(root_sha1_git) }): actual_revision = \ service.lookup_revision_with_context(sha1_git_root, sha1_git) children = [] for rev in self.revision_log(root_sha1_git): for p_rev in rev['parents']: p_rev_hex = hash_to_hex(p_rev) if p_rev_hex == sha1_git: children.append(rev['id']) expected_revision = self.revision_get(sha1_git) expected_revision['children'] = children self.assertEqual(actual_revision, expected_revision) @given(non_ancestor_revisions()) def test_lookup_revision_with_context_ko(self, non_ancestor_revisions): sha1_git = non_ancestor_revisions['sha1_git'] root_sha1_git = non_ancestor_revisions['sha1_git_root'] with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_with_context(root_sha1_git, sha1_git) self.assertIn( 'Revision %s is not an ancestor of %s' % (sha1_git, root_sha1_git), cm.exception.args[0]) @given(unknown_revision()) def test_lookup_directory_with_revision_not_found(self, unknown_revision): with self.assertRaises(NotFoundExc) as cm: service.lookup_directory_with_revision(unknown_revision) self.assertIn('Revision %s not found' % unknown_revision, cm.exception.args[0]) @given(revision()) def test_lookup_directory_with_revision_ko_path_to_nowhere(self, revision): invalid_path = 'path/to/something/unknown' with self.assertRaises(NotFoundExc) as cm: service.lookup_directory_with_revision(revision, invalid_path) exception_text = cm.exception.args[0].lower() self.assertIn('directory or file', exception_text) self.assertIn(invalid_path, exception_text) self.assertIn('revision %s' % revision, exception_text) self.assertIn('not found', exception_text) @given(revision_with_submodules()) def test_lookup_directory_with_revision_submodules( self, revision_with_submodules): rev_sha1_git = revision_with_submodules['rev_sha1_git'] rev_dir_path = revision_with_submodules['rev_dir_rev_path'] actual_data = service.lookup_directory_with_revision( rev_sha1_git, rev_dir_path) revision = self.revision_get(revision_with_submodules['rev_sha1_git']) directory = self.directory_ls(revision['directory']) rev_entry = next(e for e in directory if e['name'] == rev_dir_path) expected_data = { 'content': self.revision_get(rev_entry['target']), 'path': rev_dir_path, 'revision': rev_sha1_git, 'type': 'rev' } self.assertEqual(actual_data, expected_data) @given(revision()) def test_lookup_directory_with_revision_without_path(self, revision): actual_directory_entries = \ service.lookup_directory_with_revision(revision) revision_data = self.revision_get(revision) expected_directory_entries = \ self.directory_ls(revision_data['directory']) self.assertEqual(actual_directory_entries['type'], 'dir') self.assertEqual(actual_directory_entries['content'], expected_directory_entries) @given(revision()) def test_lookup_directory_with_revision_with_path(self, revision): revision_data = self.revision_get(revision) dir_entries = [ e for e in self.directory_ls(revision_data['directory']) if e['type'] in ('file', 'dir') ] expected_dir_entry = random.choice(dir_entries) actual_dir_entry = \ service.lookup_directory_with_revision(revision, expected_dir_entry['name']) self.assertEqual(actual_dir_entry['type'], expected_dir_entry['type']) self.assertEqual(actual_dir_entry['revision'], revision) self.assertEqual(actual_dir_entry['path'], expected_dir_entry['name']) if actual_dir_entry['type'] == 'file': del actual_dir_entry['content']['checksums']['blake2s256'] for key in ('checksums', 'status', 'length'): self.assertEqual(actual_dir_entry['content'][key], expected_dir_entry[key]) else: sub_dir_entries = self.directory_ls(expected_dir_entry['target']) self.assertEqual(actual_dir_entry['content'], sub_dir_entries) @given(revision()) def test_lookup_directory_with_revision_with_path_to_file_and_data( self, revision): revision_data = self.revision_get(revision) dir_entries = [ e for e in self.directory_ls(revision_data['directory']) if e['type'] == 'file' ] expected_dir_entry = random.choice(dir_entries) expected_data = \ self.content_get(expected_dir_entry['checksums']['sha1']) actual_dir_entry = \ service.lookup_directory_with_revision(revision, expected_dir_entry['name'], with_data=True) self.assertEqual(actual_dir_entry['type'], expected_dir_entry['type']) self.assertEqual(actual_dir_entry['revision'], revision) self.assertEqual(actual_dir_entry['path'], expected_dir_entry['name']) del actual_dir_entry['content']['checksums']['blake2s256'] for key in ('checksums', 'status', 'length'): self.assertEqual(actual_dir_entry['content'][key], expected_dir_entry[key]) self.assertEqual(actual_dir_entry['content']['data'], expected_data['data']) @given(revision()) def test_lookup_revision(self, revision): actual_revision = service.lookup_revision(revision) self.assertEqual(actual_revision, self.revision_get(revision)) @given(new_revision()) def test_lookup_revision_invalid_msg(self, new_revision): new_revision['message'] = b'elegant fix for bug \xff' self.storage.revision_add([new_revision]) revision = service.lookup_revision(hash_to_hex(new_revision['id'])) self.assertEqual(revision['message'], None) self.assertEqual(revision['message_decoding_failed'], True) @given(new_revision()) def test_lookup_revision_msg_ok(self, new_revision): self.storage.revision_add([new_revision]) revision_message = service.lookup_revision_message( hash_to_hex(new_revision['id'])) self.assertEqual(revision_message, {'message': new_revision['message']}) @given(new_revision()) def test_lookup_revision_msg_absent(self, new_revision): del new_revision['message'] self.storage.revision_add([new_revision]) new_revision_id = hash_to_hex(new_revision['id']) with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_message(new_revision_id) self.assertEqual( cm.exception.args[0], 'No message for revision with sha1_git %s.' % new_revision_id) @given(unknown_revision()) def test_lookup_revision_msg_no_rev(self, unknown_revision): with self.assertRaises(NotFoundExc) as cm: service.lookup_revision_message(unknown_revision) self.assertEqual( cm.exception.args[0], 'Revision with sha1_git %s not found.' % unknown_revision) @given(revisions()) def test_lookup_revision_multiple(self, revisions): actual_revisions = list(service.lookup_revision_multiple(revisions)) expected_revisions = [] for rev in revisions: expected_revisions.append(self.revision_get(rev)) self.assertEqual(actual_revisions, expected_revisions) @given(unknown_revisions()) def test_lookup_revision_multiple_none_found(self, unknown_revisions): actual_revisions = \ list(service.lookup_revision_multiple(unknown_revisions)) self.assertEqual(actual_revisions, [None] * len(unknown_revisions)) @given(revision()) def test_lookup_revision_log(self, revision): actual_revision_log = \ list(service.lookup_revision_log(revision, limit=25)) expected_revision_log = self.revision_log(revision, limit=25) self.assertEqual(actual_revision_log, expected_revision_log) def _get_origin_branches(self, origin): origin_visit = self.origin_visit_get(origin['id'])[-1] snapshot = self.snapshot_get(origin_visit['snapshot']) branches = { k: v for (k, v) in snapshot['branches'].items() if v['target_type'] == 'revision' } return branches @given(origin()) def test_lookup_revision_log_by(self, origin): branches = self._get_origin_branches(origin) branch_name = random.choice(list(branches.keys())) actual_log = \ list(service.lookup_revision_log_by(origin['id'], branch_name, None, limit=25)) expected_log = \ self.revision_log(branches[branch_name]['target'], limit=25) self.assertEqual(actual_log, expected_log) @given(origin()) def test_lookup_revision_log_by_notfound(self, origin): with self.assertRaises(NotFoundExc): service.lookup_revision_log_by(origin['id'], 'unknown_branch_name', None, limit=100) @given(unknown_content()) def test_lookup_content_raw_not_found(self, unknown_content): with self.assertRaises(NotFoundExc) as cm: service.lookup_content_raw('sha1:' + unknown_content['sha1']) self.assertIn( cm.exception.args[0], 'Content with %s checksum equals to %s not found!' % ('sha1', unknown_content['sha1'])) @given(content()) def test_lookup_content_raw(self, content): actual_content = service.lookup_content_raw('sha256:%s' % content['sha256']) expected_content = self.content_get(content['sha1']) self.assertEqual(actual_content, expected_content) @given(unknown_content()) def test_lookup_content_not_found(self, unknown_content): with self.assertRaises(NotFoundExc) as cm: service.lookup_content('sha1:%s' % unknown_content['sha1']) self.assertIn( cm.exception.args[0], 'Content with %s checksum equals to %s not found!' % ('sha1', unknown_content['sha1'])) @given(content()) def test_lookup_content_with_sha1(self, content): actual_content = service.lookup_content('sha1:%s' % content['sha1']) expected_content = self.content_get_metadata(content['sha1']) self.assertEqual(actual_content, expected_content) @given(content()) def test_lookup_content_with_sha256(self, content): actual_content = service.lookup_content('sha256:%s' % content['sha256']) expected_content = self.content_get_metadata(content['sha1']) self.assertEqual(actual_content, expected_content) @given(revision()) def test_lookup_person(self, revision): rev_data = self.revision_get(revision) actual_person = service.lookup_person(rev_data['author']['id']) self.assertEqual(actual_person, rev_data['author']) def test_lookup_directory_bad_checksum(self): with self.assertRaises(BadInputExc): service.lookup_directory('directory_id') @given(unknown_directory()) def test_lookup_directory_not_found(self, unknown_directory): with self.assertRaises(NotFoundExc) as cm: service.lookup_directory(unknown_directory) self.assertIn( 'Directory with sha1_git %s not found' % unknown_directory, cm.exception.args[0]) @given(directory()) def test_lookup_directory(self, directory): actual_directory_ls = list(service.lookup_directory(directory)) expected_directory_ls = self.directory_ls(directory) self.assertEqual(actual_directory_ls, expected_directory_ls) @given(empty_directory()) def test_lookup_directory_empty(self, empty_directory): actual_directory_ls = list(service.lookup_directory(empty_directory)) self.assertEqual(actual_directory_ls, []) @given(origin()) def test_lookup_revision_by_nothing_found(self, origin): with self.assertRaises(NotFoundExc): service.lookup_revision_by(origin['id'], 'invalid-branch-name') @given(origin()) def test_lookup_revision_by(self, origin): branches = self._get_origin_branches(origin) branch_name = random.choice(list(branches.keys())) actual_revision = \ service.lookup_revision_by(origin['id'], branch_name, None) expected_revision = \ self.revision_get(branches[branch_name]['target']) self.assertEqual(actual_revision, expected_revision) @given(origin(), revision()) def test_lookup_revision_with_context_by_ko(self, origin, revision): with self.assertRaises(NotFoundExc): service.lookup_revision_with_context_by(origin['id'], 'invalid-branch-name', None, revision) @given(origin()) def test_lookup_revision_with_context_by(self, origin): branches = self._get_origin_branches(origin) branch_name = random.choice(list(branches.keys())) root_rev = branches[branch_name]['target'] root_rev_log = self.revision_log(root_rev) children = defaultdict(list) for rev in root_rev_log: for rev_p in rev['parents']: children[rev_p].append(rev['id']) rev = root_rev_log[-1]['id'] actual_root_rev, actual_rev = service.lookup_revision_with_context_by( origin['id'], branch_name, None, rev) expected_root_rev = self.revision_get(root_rev) expected_rev = self.revision_get(rev) expected_rev['children'] = children[rev] self.assertEqual(actual_root_rev, expected_root_rev) self.assertEqual(actual_rev, expected_rev) def test_lookup_revision_through_ko_not_implemented(self): with self.assertRaises(NotImplementedError): service.lookup_revision_through({ 'something-unknown': 10, }) @given(origin()) def test_lookup_revision_through_with_context_by(self, origin): branches = self._get_origin_branches(origin) branch_name = random.choice(list(branches.keys())) root_rev = branches[branch_name]['target'] root_rev_log = self.revision_log(root_rev) rev = root_rev_log[-1]['id'] self.assertEqual( service.lookup_revision_through({ 'origin_id': origin['id'], 'branch_name': branch_name, 'ts': None, 'sha1_git': rev }), service.lookup_revision_with_context_by(origin['id'], branch_name, None, rev)) @given(origin()) def test_lookup_revision_through_with_revision_by(self, origin): branches = self._get_origin_branches(origin) branch_name = random.choice(list(branches.keys())) self.assertEqual( service.lookup_revision_through({ 'origin_id': origin['id'], 'branch_name': branch_name, 'ts': None, }), service.lookup_revision_by(origin['id'], branch_name, None)) @given(ancestor_revisions()) def test_lookup_revision_through_with_context(self, ancestor_revisions): sha1_git = ancestor_revisions['sha1_git'] sha1_git_root = ancestor_revisions['sha1_git_root'] self.assertEqual( service.lookup_revision_through({ 'sha1_git_root': sha1_git_root, 'sha1_git': sha1_git, }), service.lookup_revision_with_context(sha1_git_root, sha1_git)) @given(revision()) def test_lookup_revision_through_with_revision(self, revision): self.assertEqual( service.lookup_revision_through({'sha1_git': revision}), service.lookup_revision(revision)) @given(revision()) def test_lookup_directory_through_revision_ko_not_found(self, revision): with self.assertRaises(NotFoundExc): service.lookup_directory_through_revision({'sha1_git': revision}, 'some/invalid/path') @given(revision()) def test_lookup_directory_through_revision_ok(self, revision): revision_data = self.revision_get(revision) dir_entries = [ e for e in self.directory_ls(revision_data['directory']) if e['type'] == 'file' ] dir_entry = random.choice(dir_entries) self.assertEqual( service.lookup_directory_through_revision({'sha1_git': revision}, dir_entry['name']), (revision, service.lookup_directory_with_revision(revision, dir_entry['name']))) @given(revision()) def test_lookup_directory_through_revision_ok_with_data(self, revision): revision_data = self.revision_get(revision) dir_entries = [ e for e in self.directory_ls(revision_data['directory']) if e['type'] == 'file' ] dir_entry = random.choice(dir_entries) self.assertEqual( service.lookup_directory_through_revision({'sha1_git': revision}, dir_entry['name'], with_data=True), (revision, service.lookup_directory_with_revision( revision, dir_entry['name'], with_data=True))) @given(new_origins(20)) def test_lookup_origins(self, new_origins): nb_origins = len(new_origins) expected_origins = self.storage.origin_add(new_origins) origin_from_idx = random.randint(1, nb_origins - 1) - 1 origin_from = expected_origins[origin_from_idx]['id'] max_origin_idx = expected_origins[-1]['id'] origin_count = random.randint(1, max_origin_idx - origin_from) actual_origins = list(service.lookup_origins(origin_from, origin_count)) expected_origins = list( self.storage.origin_get_range(origin_from, origin_count)) self.assertEqual(actual_origins, expected_origins)