def testcase_01_upload_and_download_using_dnf_rpm(self): # create yum-repo, -importer, -distributor with deleting(self.pulp, *create_yum_repo( self.pulp, 'test_22_rpm_repo_for_dnf')) as (repo, (importer, (distributor))): # create and perform an rpm url upload with deleting(self.pulp, upload_url_rpm(self.pulp, self.rpm_url)) as upload: # assign the upload to the repo response = upload.import_to(self.pulp, repo) self.assertPulpOK() Task.wait_for_report(self.pulp, response) # check the content is accessible response = repo.publish(self.pulp, distributor.id) self.assertPulpOK() Task.wait_for_report(self.pulp, response) # fetch the package through the repo pulp_rpm_url = distributor.content_url( self.pulp, url_basename(self.rpm_url)) pulp_repo = distributor.content_url(self.pulp) with closing(temp_url(pulp_rpm_url)) as tmpfile: assert url_basename(self.rpm_url).startswith( rpm_metadata(tmpfile)['unit_key']['name']) assert "bear" == download_package_with_dnf( self.pulp, pulp_repo, "bear")
def testcase_03_parallel_upload_rpms(self): import gevent with deleting(self.pulp, *create_yum_repo(self.pulp, 'upload_test_rpm_repo')) as (repo, (importer, (distributor))): # create and perform an rpm url upload jobs = [gevent.spawn(lambda: self.rpm_uploader(self.pulp, url, repo, distributor)) for url in \ [self.rpm_url_bear, self.rpm_url_mouse]] gevent.joinall(jobs, raise_error=True)
def _test_02_repo_publish_finish(self): self.el.update(self.pulp, {'event_listener': ['repo.publish.finish']}) self.el.reload(self.pulp) repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] repo, importer, [distributor] = YumRepo( id='publish_error_repo', importer=Importer.from_role(repo_role), distributors=[ YumDistributor(distributor_type_id='invalid_distributor_id', relative_url='xyz') ]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): response = repo.publish(self.pulp, 'invalid_distributor_id') self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the publish task failed Task.wait_for_report(self.pulp, response) task = Task.from_report(self.pulp, response) # assert the bin contains a request with a fained task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request_count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [ task.id for task in tasks ], 'invalid request.body:Task.id: %s' % el_task.id
def test_01_package_category_create(self): response = self.repo1.within_repo_search( self.pulp, data={"criteria": {"type_ids": ["package_group"],"limit": 1}} ) self.assertPulp(code=200) result = Association.from_response(response) groupList = [] # make a list of names for i in range(0, len(result)): groupList.append(result[i].data['metadata']['name']) #create metadata for package category import data = package_category_metadata(self.repo1.id+"_category1", self.repo1.id, groupList) #actually upload category with deleting(self.pulp, Upload.create(self.pulp, data=data)) as (upload,) : Task.wait_for_report(self.pulp, upload.import_to(self.pulp, self.repo1)) self.assertPulp(code=200) #check that group is there and contains specified packages response = self.repo1.within_repo_search( self.pulp, data={"criteria": {"type_ids": ["package_category"],\ "filters": {"unit": {"id": data["unit_key"]["id"]}}}} ) self.assertPulp(code=200) result = Association.from_response(response) self.assertEqual(result[0].data["metadata"]["packagegroupids"], data["unit_metadata"]["packagegroupids"])
def test_01_repo_sync_finish(self): self.el.update(self.pulp, {'event_types': ['repo.sync.finish']}) self.el.reload(self.pulp) with deleting( self.pulp, *create_yum_repo( self.pulp, 'sync_error_repo', feed='http://example.com/repos/none')) as (repo, (importer, distributor)): response = repo.sync(self.pulp) self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the sync did not succeed Task.wait_for_report(self.pulp, response) tasks = Task.from_report(self.pulp, response) # assert the bin contains request with a failed task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [ task.id for task in tasks ], 'invalid request.body:Task.id: %s' % el_task.id
def test_01_repo_sync_finish(self): self.el.update(self.pulp, {'event_types': ['repo.sync.finish']}) self.el.reload(self.pulp) repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] repo, importer, [distributor] = YumRepo( id='sync_error_repo', importer=YumImporter(feed='http://example.com/repos/none'), distributors=[YumDistributor(relative_url='/repos/none') ]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): response = repo.sync(self.pulp) self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the sync did not succeed Task.wait_for_report(self.pulp, response) tasks = Task.from_report(self.pulp, response) # assert the bin contains request with a failed task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) # doesn't work and won't get fixed --- disabling # assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [ task.id for task in tasks ], 'invalid request.body:Task.id: %s' % el_task.id
def testcase_02_upload_rpm(self): # create yum-repo, -importer, -distributor repo, importer, [distributor] = YumRepo(id='upload_test_rpm_repo', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor) as (repo, importer, distributor): # create and perform an rpm url upload self.rpm_uploader(self.pulp, self.rpm_url_bear, repo, distributor)
def test_02_repo_publish_finish(self): self.el.update(self.pulp, {'event_listener': ['repo.publish.finish']}) self.el.reload(self.pulp) with deleting( self.pulp, *create_yum_repo( self.pulp, 'publish_error_repo', feed= 'https://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/zoo/' )) as (repo, (importer, distributor)): response = repo.publish(self.pulp, 'invalid_distributor_id') self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the publish task failed Task.wait_for_report(self.pulp, response) task = Task.from_report(self.pulp, response) # assert the bin contains a request with a fained task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request_count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [ task.id for task in tasks ], 'invalid request.body:Task.id: %s' % el_task.id
def testcase_02_upload_rpm(self): # create yum-repo, -importer, -distributor with deleting(self.pulp, *create_yum_repo( self.pulp, 'upload_test_rpm_repo')) as (repo, (importer, (distributor))): # create and perform an rpm url upload self.rpm_uploader(self.pulp, self.rpm_url_bear, repo, distributor)
def testcase_03_parallel_upload_rpms(self): import gevent repo, importer, [distributor] = YumRepo(id='upload_test_rpm_repo', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor) as (repo, importer, distributor): # create and perform an rpm url upload jobs = [gevent.spawn(lambda: self.rpm_uploader(self.pulp, url, repo, distributor)) for url in \ [self.rpm_url_bear, self.rpm_url_mouse]] gevent.joinall(jobs, raise_error=True)
def testcase_01_upload_and_download_using_dnf_rpm(self): # create yum-repo, -importer, -distributor with deleting(self.pulp, *create_yum_repo(self.pulp, 'test_22_rpm_repo_for_dnf')) as (repo, (importer, (distributor))): # create and perform an rpm url upload with deleting(self.pulp, upload_url_rpm(self.pulp, self.rpm_url)) as upload: # assign the upload to the repo response = upload.import_to(self.pulp, repo) self.assertPulpOK() Task.wait_for_report(self.pulp, response) # check the content is accessible response = repo.publish(self.pulp, distributor.id) self.assertPulpOK() Task.wait_for_report(self.pulp, response) # fetch the package through the repo pulp_rpm_url = distributor.content_url(self.pulp, url_basename(self.rpm_url)) pulp_repo = distributor.content_url(self.pulp) with closing(temp_url(pulp_rpm_url)) as tmpfile: assert url_basename(self.rpm_url).startswith(rpm_metadata(tmpfile)['unit_key']['name']) assert "bear" == download_package_with_dnf(self.pulp, pulp_repo, "bear")
def testcase_03_parallel_upload_rpms(self): import gevent with deleting(self.pulp, *create_yum_repo( self.pulp, 'upload_test_rpm_repo')) as (repo, (importer, (distributor))): # create and perform an rpm url upload jobs = [gevent.spawn(lambda: self.rpm_uploader(self.pulp, url, repo, distributor)) for url in \ [self.rpm_url_bear, self.rpm_url_mouse]] gevent.joinall(jobs, raise_error=True)
def testcase_01_upload_and_download_using_dnf_rpm(self): # create yum-repo, -importer, -distributor repo, importer, [distributor] = YumRepo(id='test_22_rpm_repo_for_dnf', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): # create and perform an rpm url upload with deleting(self.pulp, upload_url_rpm(self.pulp, self.rpm_url)) as (upload,): # assign the upload to the repo response = upload.import_to(self.pulp, repo) self.assertPulpOK() Task.wait_for_report(self.pulp, response) # check the content is accessible response = repo.publish(self.pulp, distributor.id) self.assertPulpOK() Task.wait_for_report(self.pulp, response) # fetch the package through the repo pulp_rpm_url = distributor.content_url(self.pulp, url_basename(self.rpm_url)) pulp_repo = distributor.content_url(self.pulp) with closing(temp_url(pulp_rpm_url)) as tmpfile: assert url_basename(self.rpm_url).startswith(rpm_metadata(tmpfile)['unit_key']['name']) assert "bear" == download_package_with_dnf(self.pulp, pulp_repo, "bear")
def rpm_uploader(pulp, url, repo, distributor): '''perform an upload''' # create an already fed upload object with deleting(pulp, upload_url_rpm(pulp, url)) as upload: # assing upload to repo Task.wait_for_report(pulp, upload.import_to(pulp, repo)) # publish the content Task.wait_for_report(pulp, repo.publish(pulp, distributor.id)) # download the rpm from pulp now pulp_rpm_url = distributor.content_url(pulp, url_basename(url)) with closing(temp_url(pulp_rpm_url)) as tmpfile: # make sure the rpm fetched has the same name as the one uploaded assert url_basename(url).startswith(rpm_metadata(tmpfile)['unit_key']['name'])
def iso_uploader(pulp, url, repo, distributor): '''perform an upload''' # create an already fed upload object with deleting(pulp, upload_url_iso(pulp, url)) as (upload,): # assing upload to repo Task.wait_for_report(pulp, upload.import_to(pulp, repo)) # publish the content Task.wait_for_report(pulp, repo.publish(pulp, distributor.id)) # download the rpm from pulp now pulp_iso_url = distributor.content_url(pulp, url_basename(url)) with closing(temp_url(pulp_iso_url)) as tmpfile: # make sure the iso fetched has the same checksum as the one uploaded assert upload.data['unit_key']['checksum'] == iso_metadata(tmpfile)['unit_key']['checksum']
def rpm_uploader(pulp, url, repo, distributor): '''perform an upload''' # create an already fed upload object with deleting(pulp, upload_url_rpm(pulp, url)) as (upload, ): # assing upload to repo Task.wait_for_report(pulp, upload.import_to(pulp, repo)) # publish the content Task.wait_for_report(pulp, repo.publish(pulp, distributor.id)) # download the rpm from pulp now pulp_rpm_url = distributor.content_url(pulp, url_basename(url)) with closing(temp_url(pulp_rpm_url)) as tmpfile: # make sure the rpm fetched has the same name as the one uploaded assert url_basename(url).startswith( rpm_metadata(tmpfile)['unit_key']['name'])
def iso_uploader(pulp, url, repo, distributor): '''perform an upload''' # create an already fed upload object with deleting(pulp, upload_url_iso(pulp, url)) as upload: # assing upload to repo Task.wait_for_report(pulp, upload.import_to(pulp, repo)) # publish the content Task.wait_for_report(pulp, repo.publish(pulp, distributor.id)) # download the rpm from pulp now pulp_iso_url = distributor.content_url(pulp, url_basename(url)) with closing(temp_url(pulp_iso_url)) as tmpfile: # make sure the iso fetched has the same checksum as the one uploaded assert upload.data['unit_key']['checksum'] == iso_metadata( tmpfile)['unit_key']['checksum']
def test_01_repo_sync_finish(self): self.el.update(self.pulp, {'event_types': ['repo.sync.finish']}) self.el.reload(self.pulp) with deleting(self.pulp, *create_yum_repo(self.pulp, 'sync_error_repo', feed='http://example.com/repos/none')) as (repo, (importer, distributor)): response = repo.sync(self.pulp) self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the sync did not succeed Task.wait_for_report(self.pulp, response) tasks = Task.from_report(self.pulp, response) # assert the bin contains request with a failed task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [task.id for task in tasks], 'invalid request.body:Task.id: %s' % el_task.id
def _test_02_repo_publish_finish(self): self.el.update(self.pulp, {'event_listener': ['repo.publish.finish']}) self.el.reload(self.pulp) with deleting(self.pulp, *create_yum_repo(self.pulp, 'publish_error_repo', feed='https://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/zoo/')) as (repo, (importer, distributor)): response = repo.publish(self.pulp, 'invalid_distributor_id') self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the publish task failed Task.wait_for_report(self.pulp, response) task = Task.from_report(self.pulp, response) # assert the bin contains a request with a fained task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request_count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [task.id for task in tasks], 'invalid request.body:Task.id: %s' % el_task.id
def test_03_package_group_copy(self): response = self.repo1.within_repo_search( self.pulp, data={"criteria": {"type_ids": ["rpm"],"limit": 5}} ) self.assertPulp(code=200) result = Association.from_response(response) rpmlist = [] # make a list of names for i in range(0, len(result)): rpmlist.append(result[i].data['metadata']['name']) #create metadata for package group import data = package_group_metadata(self.repo1.id+"_group3", self.repo1.id, rpmlist) #actually upload group with deleting(self.pulp, Upload.create(self.pulp, data=data)) as (upload,): Task.wait_for_report(self.pulp, upload.import_to(self.pulp, self.repo1)) self.assertPulp(code=200) #copy group to other repo response = self.repo2.copy( self.pulp, self.repo1.id, data={ 'criteria': { 'type_ids': ['package_group'], 'filters': {"unit": {"name": data["unit_metadata"]["name"]}} }, } ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) #check that group is there and contains specified packages response = self.repo2.within_repo_search( self.pulp, data={"criteria": {"type_ids": ["package_group"],\ "filters": {"unit": {"id": data["unit_key"]["id"]}}}} ) self.assertPulp(code=200) result = Association.from_response(response) self.assertEqual(result[0].data["metadata"]["mandatory_package_names"], data["unit_metadata"]["mandatory_package_names"])
def test_01_cud(self): # instantiate default http listener el0 = EventListener.http(self.bin.url) response = el0.create(self.pulp) self.assertPulpOK() # update the listener with deleting(self.pulp, EventListener.from_response(response)) as el1: # get listener assert EventListener.get(self.pulp, el1.id) == el1, 'failed fetching %s' % el1 # list listeners assert el1 in EventListener.list( self.pulp), 'failed listing %s' % el1 # assert updating works el1.update( self.pulp, {'event_types': ['repo.sync.finish', 'repo.sync.start']}) self.assertPulpOK() el1.reload(self.pulp) assert sorted(el1.data['event_types']) == \ sorted(['repo.sync.finish', 'repo.sync.start']), 'update failed: %s' % el1
def _test_02_repo_publish_finish(self): self.el.update(self.pulp, {'event_listener': ['repo.publish.finish']}) self.el.reload(self.pulp) repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] repo, importer, [distributor] = YumRepo(id='publish_error_repo', importer=Importer.from_role(repo_role), distributors=[YumDistributor(distributor_type_id='invalid_distributor_id', relative_url='xyz')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): response = repo.publish(self.pulp, 'invalid_distributor_id') self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the publish task failed Task.wait_for_report(self.pulp, response) task = Task.from_report(self.pulp, response) # assert the bin contains a request with a fained task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request_count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [task.id for task in tasks], 'invalid request.body:Task.id: %s' % el_task.id
def test_02_package_category_delete(self): groupList = [] #create metadata for package category import data = package_category_metadata(self.repo1.id+"_category1", self.repo1.id, groupList) #actually upload category with deleting(self.pulp, Upload.create(self.pulp, data=data)) as (upload,): Task.wait_for_report(self.pulp, upload.import_to(self.pulp, self.repo1)) self.assertPulp(code=200) item={"criteria": {"type_ids": ["package_category"],\ "filters": {"unit": {"id": data["unit_key"]["id"]}}}} response = self.repo1.unassociate_units(self.pulp, item) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) #check that group is NOT there and contains specified packages response = self.repo1.within_repo_search( self.pulp, data={"criteria": {"type_ids": ["package_category"],\ "filters": {"unit": {"id": data["unit_key"]["id"]}}}} ) self.assertPulp(code=200) result = Association.from_response(response) self.assertEqual(len(result), 0)
def test_01_repo_sync_finish(self): self.el.update(self.pulp, {'event_types': ['repo.sync.finish']}) self.el.reload(self.pulp) repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] repo, importer, [distributor] = YumRepo(id='sync_error_repo', importer=YumImporter(feed='http://example.com/repos/none'), distributors=[YumDistributor(relative_url='/repos/none')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): response = repo.sync(self.pulp) self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the sync did not succeed Task.wait_for_report(self.pulp, response) tasks = Task.from_report(self.pulp, response) # assert the bin contains request with a failed task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) # doesn't work and won't get fixed --- disabling # assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [task.id for task in tasks], 'invalid request.body:Task.id: %s' % el_task.id
def tearDownClass(cls): # tear down repo with deleting(cls.pulp, cls.repo, cls.importer, cls.distributor): pass super(EventListenerTest, cls).tearDownClass()
def testcase_02_upload_rpm(self): # create yum-repo, -importer, -distributor with deleting(self.pulp, *create_yum_repo(self.pulp, 'upload_test_rpm_repo')) as (repo, (importer, (distributor))): # create and perform an rpm url upload self.rpm_uploader(self.pulp, self.rpm_url_bear, repo, distributor)
def tearDownClass(cls): cls.node.deactivate(cls.pulp) with deleting(cls.pulp, cls.repo, cls.importer, cls.distributor, cls.node_distributor): pass super(NodeTestRepo, cls).tearDownClass()