def test_12_publish_repo(self): response = self.repo.publish( self.pulp, 'dist_1' ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_99_node_unbind_repo(self): self.node.unbind_repo(self.pulp, self.repo.id, self.node_distributor.id) self.assertPulpOK() # nodes keep the repos after updating child_repos = Repo.list(self.pulp_child) for repo in child_repos: Task.wait_for_report(self.pulp_child, repo.delete(self.pulp_child))
def test_02_associate_importer(self): '''to the importer_config query/queries can be added to specify witch modules have to be synced''' response = self.repo.associate_importer( self.pulp, data={ 'importer_type_id': 'puppet_importer', 'importer_config': { 'feed': self.feed, 'queries': ["stdlib", "yum"] } } ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) importer = self.repo.get_importer(self.pulp, "puppet_importer") self.assertEqual( importer, { 'id': 'puppet_importer', 'importer_type_id': 'puppet_importer', 'repo_id': self.repo.id, 'config': { 'feed': self.feed, 'queries': ["stdlib", "yum"] }, 'last_sync': None } )
def setUpClass(cls): super(PuppetCopyRepoTest, cls).setUpClass() # this repo role is hardwired because of the search strings # refering to exact names as e.g. tomcat7_rhel # The proxy role is considered repo = { 'id': cls.__name__, 'feed': 'https://forge.puppetlabs.com', 'queries': ['tomcat'], 'proxy': ROLES.get('proxy'), } # create source repo and sync it to have modules fetched cls.source_repo, _, _ = PuppetRepo.from_role(repo).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.source_repo.sync(cls.pulp)) # create two destinations repos for copy purpose importer = PuppetImporter(feed=None, queries=[]) distributors = [PuppetDistributor()] cls.dest_repo1, _, _ = PuppetRepo(id=cls.__name__ + '1', importer=importer, distributors=distributors).create(cls.pulp) cls.dest_repo2, _, _ = PuppetRepo(id=cls.__name__ + '2', importer=importer, distributors=distributors).create(cls.pulp) # create data for repo cls.invalid_repo = Repo(data={'id': cls.__name__ + "_invalidrepo"}) # create yum repo cls.yumrepo, _, _ = YumRepo(id=cls.__name__ + 'yum', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp)
def test_05_unassociate_iso_from_copied_repo(self): # unassociate unit from a copied repo response = self.dest_repo1.unassociate_units( self.pulp, data={"criteria": {"type_ids": ["iso"], "filters": {"unit": {"name": "test.iso"}}}} ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_02_assert_unit_update(self): unit = {"name": "pike"} response = self.consumer_group.update_unit( self.pulp, unit, "rpm", options={"apply": True, "reboot": False, "importkeys": False} ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_01_repo_content_applicability(self): response = RepoAppl.applicability(self.pulp, data={ "repo_criteria": {"filters": {"id":{"$in":["test-repo", "test-errata"]}}} } ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_07_no_unassociation_within_repo_with_feed(self): # repos with feed now can delete partial content inside it response = self.source_repo.unassociate_units( self.pulp, data={"criteria": {"type_ids": ["iso"], "filters": {"unit": {"name": "test.iso"}}}} ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def setUpClass(cls): super(PuppetSearchRepoTest, cls).setUpClass() repo_id = cls.__name__ queries = ['tomcat'] cls.repo, _, _ = create_puppet_repo(cls.pulp, repo_id, queries) cls.repo1, _, _ = create_puppet_repo(cls.pulp, repo_id + '1', queries) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp))
def test_05_associate_importer(self): response = self.repo.associate_importer( self.pulp, data={ 'importer_type_id': 'docker_importer', 'importer_config': { 'feed': self.feed, "upstream_name": "busybox" } } ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) importer = self.repo.get_importer(self.pulp, "docker_importer") self.assertEqual( importer, { 'id': 'docker_importer', 'importer_type_id': 'docker_importer', 'repo_id': self.repo.id, 'config': { 'feed': self.feed, "upstream_name": "busybox" }, 'last_sync': None } )
def test_05_repo_publish_finish(self): self.el.update(self.pulp, {'event_types': ['repo.publish.finish']}) self.assertPulpOK() self.el.reload(self.pulp) report = self.repo.publish(self.pulp, self.distributor.id) # wait till publish-induced tasks finish Task.wait_for_report(self.pulp, report) # fetch the tasks spawned for the publish to perform tasks = [task for task in Task.from_report(self.pulp, report) \ if u'pulp:action:publish' in task.data['tags']] assert tasks, 'no tasks induced' # assert bin status self.bin.reload() assert self.bin.request_count == 1, 'invalid event listener requests count: %s' % \ self.bin.request_count el_request = self.bin.requests[0] # assert request method assert el_request.method == 'POST', 'invalid request method: %s' % el_request.method # assert the request was made after all tasks finished tasks_finished_after_request = [task.id for task in tasks if el_request.time < task.finish_time] # doesn't work --- disabling #assert tasks_finished_after_request == [], '%s finished after request at %s' % \ # (tasks_finished_after_request, el_request.time) # the request body contains a task el_task = Task.from_call_report_data(json.loads(el_request.body)) #assert el_task.state == TASK_FINISHED_STATE, 'invalid task state: %s' % el_task.state el_task.reload(self.pulp) # assert proper task was posted assert el_task.id in [task.id for task in tasks], 'invalid task id posted: %s' % el_task.id assert sorted([u'pulp:repository:EventListenerRepo', u'pulp:action:publish']) == sorted(el_task.data['tags']), \ 'invalid task tags: %s' % el_task.data['tags']
def test_01_package_category_create(self): response = self.repo1.within_repo_search( self.pulp, data={"criteria": {"type_ids": ["package_group"],"limit": 1}} ) self.assertPulp(code=200) result = Association.from_response(response) groupList = [] # make a list of names for i in range(0, len(result)): groupList.append(result[i].data['metadata']['name']) #create metadata for package category import data = package_category_metadata(self.repo1.id+"_category1", self.repo1.id, groupList) #actually upload category with deleting(self.pulp, Upload.create(self.pulp, data=data)) as (upload,) : Task.wait_for_report(self.pulp, upload.import_to(self.pulp, self.repo1)) self.assertPulp(code=200) #check that group is there and contains specified packages response = self.repo1.within_repo_search( self.pulp, data={"criteria": {"type_ids": ["package_category"],\ "filters": {"unit": {"id": data["unit_key"]["id"]}}}} ) self.assertPulp(code=200) result = Association.from_response(response) self.assertEqual(result[0].data["metadata"]["packagegroupids"], data["unit_metadata"]["packagegroupids"])
def test_02_repo_sync_start(self): self.el.update(self.pulp, {'event_types': ['repo.sync.start']}) self.assertPulpOK() self.el.reload(self.pulp) report = self.repo.sync(self.pulp) # wait till the sync is done Task.wait_for_report(self.pulp, report) # keep track of all the spawned tasks tasks = Task.from_report(self.pulp, report) assert tasks, 'no tasks induced' # fetch the request as POSTed by pulp event listener to the bin (http://requestb.in/<bin_id>) self.bin.reload() assert self.bin.request_count == 1, 'invalid event listener POST count (%s)' \ % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid request method: %s' % el_request.method # assert the bin was POSTed no later any task finished tasks_finished_before_request = [task.id for task in tasks if el_request.time > task.finish_time] assert tasks_finished_before_request == [], 'tasks %s finished before request at: %s' % \ (tasks_finished_before_request, el_request.time) # FIXME: not yet specified in docs: assert the bin was not POSTed before any task has started # tasks_started_after_request = [task.id for task in tasks if el_request.time < task.start_time] # assert tasks_started_after_request == [], 'tasks %s started after request at: %s' % \ # (tasks_started_after_request, el_request.time) # assert there's a task POSTed to the bin with the same ID pulp reported with sync # request.body contains original POSTed task-report-data --- create a Task object from it el_task = Task.from_call_report_data(json.loads(el_request.body)) assert el_task.state == TASK_RUNNING_STATE, 'invalid task state: %s' % el_task.state el_task.reload(self.pulp) # assert the task is indeed in the tasks list spawned by pulp to perform repo sync assert el_task.id in [task.id for task in tasks], 'invalid task id posted: %s' % el_task.id assert sorted([u'pulp:repository:EventListenerRepo', u'pulp:action:sync']) == sorted(el_task.data['tags']), \ 'invalid task tags: %s' % el_task.data['tags']
def test_03_repo_sync_finish(self): self.el.update(self.pulp, {'event_types': ['repo.sync.finish']}) self.assertPulpOK() self.el.reload(self.pulp) report = self.repo.sync(self.pulp) # wait till the sync is done Task.wait_for_report(self.pulp, report) # fetch the tasks sync-call has spawned tasks = Task.from_report(self.pulp, report) assert tasks, 'no tasks induced' # check the requestsb.in got notified self.bin.reload() assert self.bin.request_count == 1, 'invalid event listener requests count: %s' % \ self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid request method: %s' % el_request.method # assert the bin was posted no sooner than all tasks have finished tasks_finished_after_request = [task.id for task in tasks if el_request.time < task.finish_time] assert tasks_finished_after_request, 'tasks %s finished after request at: %s' % \ (tasks_finished_after_request, el_request.time) # the request body contains a task el_task = Task.from_call_report_data(json.loads(el_request.body)) # doesn't work and won't get fixed --- disabling # assert el_task.state == TASK_FINISHED_STATE, 'invalid task state: %s' % el_task.state el_task.reload(self.pulp) # assert proper task was posted assert el_task.id in [task.id for task in tasks], 'invalid task id posted: %s' % el_task.id assert sorted([u'pulp:repository:EventListenerRepo', u'pulp:action:sync']) == sorted(el_task.data['tags']), \ 'invalid task tags: %s' % el_task.data['tags']
def test_10_repos_no_feed_cannot_be_synced(self): # check that repos without feed cannot be synced response = self.dest_repo2.sync(self.pulp) self.assertPulp(code=202) with self.assertRaises(TaskFailure): with self.pulp.asserting(True): Task.wait_for_report(self.pulp, response)
def test_06_assert_unit_install(self): unit = {"name": "pike"} rpm = RpmUnit(unit, relevant_data_keys=unit.keys()) with self.pulp.asserting(True): response = self.consumer.install_unit(self.pulp, unit, "rpm") Task.wait_for_report(self.pulp, response) assert rpm in RpmUnit.list(self.consumer.cli), "rpm %s not installed on %s" % (rpm, self.consumer)
def test_01_publish_repo_group_with_no_members_bz1148937(self): response = self.repo_group.publish( self.pulp, 'dist_1' ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def setUpClass(cls): super(PublishGroupTest, cls).setUpClass() # create repo_group repo_group=RepoGroup(data={'id': cls.__name__ + "_repo_group"}) response=repo_group.create(cls.pulp) cls.repo_group = RepoGroup.from_response(response) cls.repo_group1 = RepoGroup(data={'id': cls.__name__ + "_repo_group1"}) #associate_distributor with cls.pulp.asserting(True): response = cls.repo_group.associate_distributor( cls.pulp, data={ 'distributor_type_id': 'group_export_distributor', 'distributor_config': { 'http': False, 'https': False }, 'distributor_id': 'dist_1' } ) cls.distributor = GroupDistributor.from_response(response) #create repo repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.repo, importer, [distributor] = YumRepo.from_role(repo_config).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp))
def test_05_unassociate_module_from_copied_repo_1076628(self): # unassociate unit from a copied repo # https://bugzilla.redhat.com/show_bug.cgi?id=1076628 response = self.dest_repo1.unassociate_units( self.pulp, data={"criteria": {"type_ids": ["puppet_module"], "filters": {"unit": {"name": "tomcat7_rhel"}}}} ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_01_applicabilty_consumers(self): #Generate Content Applicability for Updated Consumers response = ConsumersApplicability.regenerate(self.pulp, data={ "consumer_criteria": {"filters": {"id": {"$in": ["sunflower", "voyager"]}}} } ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_06_associate_importer(self): data = YumImporter.from_role(self.repo_role).as_data() response = self.repo.associate_importer(self.pulp, data=data) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) importer = self.repo.get_importer(self.pulp, data['id']) # fixed as a doc bug https://bugzilla.redhat.com/show_bug.cgi?id=1076225 self.assertEqual(importer.id, data['id'])
def test_01_sync_repo_and_publish(self): with self.pulp.asserting(True): response = self.repo.sync(self.pulp) Task.wait_for_report(self.pulp, response) with self.pulp.asserting(True): response = self.repo.publish(self.pulp, self.distributor1.id) Task.wait_for_report(self.pulp, response)
def test_07_assert_nonexisten_unit_uninstall(self): unit = { 'name': '__NO_SUCH_UNIT__' } rpm = RpmUnit(unit, relevant_data_keys=unit.keys()) with self.pulp.asserting(True): task_reports = [consumer.uninstall_unit(self.pulp, unit, 'rpm') for consumer in self.consumers] Task.wait_for_reports(self.pulp, task_reports)
def wrapper_ctx(thing): with calling_method(thing, 'delete', pulp) as thing: yield thing # async-delete hacks if pulp.last_response.status_code == 202: Task.wait_for_report(pulp, pulp.last_response) assert pulp.is_ok, 'deleting %s caused pulp not feeling ok: %s' % \ (thing, pulp.last_response)
def tearDownClass(cls): with cls.pulp.asserting(True): task_reports = [repo.delete(cls.pulp) for repo, _, _ in cls.repos] Task.wait_for_reports(cls.pulp, task_reports) for consumer in cls.consumers: consumer.cli.unregister() super(CliConsumerTest, cls).tearDownClass()
def test_08_publish_repo(self): distributor_facade = IsoDistributor.from_role(self.repo_role) response = self.repo.publish( self.pulp, distributor_facade.distributor_id ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_04_sync_repo(self): '''On the initial sync, all modules (matching any queries if specified) will be downloaded to the Pulp server. On subsequent sync, only new modules and new versions of existing modules will be downloaded. Any modules that were once present in the feed but have been removed will be removed from the Pulp repository as well.''' response = self.repo.sync(self.pulp) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_03_copy_1_module(self): # attention with time this specific module can dissapear as puppetlabs are continiously upload/update/delete them response = self.dest_repo2.copy( self.pulp, self.source_repo.id, data={"criteria": {"type_ids": ["iso"], "filters": {"unit": {"name": "test.iso"}}}}, ) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def tearDown(self): '''delete repo binding; runs within a "correct" agent running ctx''' with self.pulp.asserting(True), \ self.agent.catching(True), \ self.agent.running(self.qpid_handle, frequency=10) \ : report = self.consumer.unbind_distributor(self.pulp, self.repo.id, self.distributor.id) self.assertPulp(code=202) Task.wait_for_report(self.pulp, report)
def test_07_sync_repo(self): x = Repo.get(self.pulp, self.repo.id).data['content_unit_counts']['puppet_module'] response = self.repo.sync(self.pulp) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) y = Repo.get(self.pulp, self.repo.id).data['content_unit_counts']['puppet_module'] #FIXME result can change with time as number of modules is not constant! #check that the second i.e. updated query was also processed. self.assertTrue(x != y)
def test_15_rest_of_clean_up(self): self.repo_group2.delete(self.pulp) self.repo_group3.delete(self.pulp) response = self.repo.delete(self.pulp) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_12_delete_iso_orphans_1109870(self): # https://bugzilla.redhat.com/show_bug.cgi?id=1109870 response = IsoOrphan.delete_all(self.pulp) self.assertPulpOK() Task.wait_for_report(self.pulp, response)
def test_01_copy_all_iso(self): response = self.dest_repo1.copy(self.pulp, self.source_repo.id, data={}) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_02_node_distributor_publish(self): response = self.repo.publish(self.pulp, self.node_distributor.id) self.assertPulpOK() Task.wait_for_report(self.pulp, response)
def bindRepo(self): with self.pulp.asserting(True): report = self.consumer.bind_distributor(self.pulp, self.repo.id, self.distributor.id) self.assertPulp(code=202) Task.wait_for_report(self.pulp, report, timeout=5)
def test_01_bind_distributor(self): with self.pulp.asserting(True): response = self.consumer.bind_distributor(self.pulp, self.repo.id, self.distributor.id) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_07_delete_puppet_orphans_1109870(self): # https://bugzilla.redhat.com/show_bug.cgi?id=1109870 response = PuppetModuleOrphan.delete_all(self.pulp) self.assertPulpOK() Task.wait_for_report(self.pulp, response)
class SimplePuppetSearchRepoTest(PuppetSearchRepoTest): def test_01_search_modules_within_metadata(self): response = self.repo.within_repo_search(self.pulp, data={ "criteria": { "type_ids": ["puppet_module"], "filters": { "unit": { "name": "tomcat" } } } }) self.assertPulp(code=200) result = Association.from_response(response) # check that filter works properly and in the result there are only modules with name 'tomcat' for i in range(0, len(result)): self.assertEqual('tomcat', result[i].data['metadata']['name']) def test_02_search_modules_outside_metadata(self): #get unit id from inside the metadata response = self.repo.within_repo_search(self.pulp, data={ "criteria": { "type_ids": ["puppet_module"], "filters": { "unit": { "name": "tomcat7_rhel" } } } }) self.assertPulp(code=200) result_old = Association.from_response(response) unit_id = result_old[0].data['metadata']['_id'] # perform search outside the metadata, i.e search in the association data response = self.repo.within_repo_search(self.pulp, data={ "criteria": { "type_ids": ["puppet_module"], "filters": { "association": { "unit_id": unit_id } } } }) self.assertPulp(code=200) result_new = Association.from_response(response) #check that there is only one module with this id self.assertTrue(len(result_new) == 1) #check that the search inside and outside metadata is consistent self.assertIn( Association(data={'unit_id': unit_id}, required_data_keys=['unit_id'], relevant_data_keys=['unit_id']), result_new) def test_03_search_invalid_modules(self): response = self.repo.within_repo_search(self.pulp, data={ "criteria": { "type_ids": ["puppet_module"], "filters": { "unit": { "name": "yum" } } } }) self.assertPulp(code=200) result = Association.from_response(response) self.assertTrue(result == []) def test_04_search_repo_in_repos(self): #search among puppet-repos by id repo = Repo.search(self.pulp, data={ "criteria": { "sort": None, "fields": None, "limit": None, "filters": { "$and": [{ "id": "SimplePuppetSearchRepoTest" }, { "notes._repo-type": "puppet-repo" }] }, "skip": None } }) self.assertIn(Repo({"id": self.repo.id}, ['id'], ['id']), repo) def test_05_search_repo_with_regexp(self): #search puppet-repos with .*repo.* repo = Repo.search(self.pulp, data={ "criteria": { "sort": None, "fields": None, "limit": None, "filters": { "$and": [{ "notes._repo-type": "puppet-repo" }, { "id": { "$regex": ".*Repo.*" } }] }, "skip": None } }) #asserting to 2 as we have two repos matched the pattern self.assertTrue(len(repo) == 2) def test_06_delete_repos(self): with self.pulp. async (): self.repo.delete(self.pulp) self.repo1.delete(self.pulp) for response in list(self.pulp.last_response): Task.wait_for_report(self.pulp, response)
def test_07_sync_repo(self): response = self.repo.sync(self.pulp) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_10_delete_orphans(self): delete_response = Orphans.delete(self.pulp) self.assertPulpOK() Task.wait_for_report(self.pulp, delete_response)
def test_01_race_create(self): repo = Repo(data=self.repo_config) with self.pulp.async(): repo.create(self.pulp) repo.create(self.pulp) self.assertIn(ResponseLike(status_code=409), self.pulp.last_response) self.assertIn(ResponseLike(status_code=201), self.pulp.last_response) def test_02_race_delete(self): # see also: https://bugzilla.redhat.com/show_bug.cgi?id=1065455 repo = Repo(data=self.repo_config) with self.pulp.async(): repo.delete(self.pulp) repo.delete(self.pulp) responses = self.pulp.last_response task_reports = [report for report in responses if ResponseLike(status_code=202) == report] passed, failed = 0, 0 for report in task_reports: try: Task.wait_for_report(self.pulp, report) passed += 1 except TaskFailure: failed += 1 if ResponseLike(status_code=404) in responses: self.assertEqual(passed, 1) self.assertEqual(failed, 0) else: self.assertEqual(passed, 1) self.assertEqual(failed, 1)
def test_12_publish_repo(self): response = self.repo.publish(self.pulp, 'dist_1') self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
class SimpleRepoCopyTest(pulp_test.PulpTest): @classmethod def setUpClass(cls): super(SimpleRepoCopyTest, cls).setUpClass() #Destination repo # make sure repos don't exist # no need to wait for repos.delete to happen feed = None dest_repo_name = cls.__name__ + '_copy' dest_repo1 = Repo({'id': dest_repo_name}) dest_repo1.delete(cls.pulp) cls.dest_repo1, _, _ = create_yum_repo(cls.pulp, dest_repo_name, feed) #2nd Destination Repo dest_repo_name = cls.__name__ + '_copy1' dest_repo2 = Repo({'id': dest_repo_name}) dest_repo2.delete(cls.pulp) cls.dest_repo2, _, _ = create_yum_repo(cls.pulp, dest_repo_name, feed) # Source repo default_repo_config = [ repo for repo in ROLES.repos if repo.type == 'rpm' ][0] source_repo_name = cls.__name__ + '_repo' source_repo = Repo({'id': source_repo_name}) source_repo.delete(cls.pulp) cls.source_repo, _, _ = create_yum_repo(cls.pulp, source_repo_name, default_repo_config.feed) Task.wait_for_report(cls.pulp, cls.source_repo.sync(cls.pulp)) def test_01_copy_repo_all(self): response = self.dest_repo1.copy(self.pulp, self.source_repo.id, data={}) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) def test_02_copy_1_rpm(self): # copy 1 rpm response = self.dest_repo2.copy(self.pulp, self.source_repo.id, data={ 'criteria': { 'type_ids': ['rpm'], 'filters': { "unit": { "name": "cow" } } }, }) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) def test_03_check_that_one_rpm(self): # check that there is precisly one module dest_repo2 = Repo.get(self.pulp, self.dest_repo2.id) self.assertEqual(dest_repo2.data['content_unit_counts']['rpm'], 1) # check that one exact module copied i.e. perform the search by modules name response = self.dest_repo2.within_repo_search(self.pulp, data={ "criteria": { "type_ids": ["rpm"], "filters": { "unit": { "name": "cow" } } } }) self.assertPulp(code=200) result = Association.from_response(response) # this means that only one module found with that name self.assertTrue(len(result) == 1) def test_04_unassociate_rpm_from_copied_repo(self): # unassociate unit from a copied repo response = self.dest_repo1.unassociate_units(self.pulp, data={ "criteria": { "type_ids": ["rpm"], "filters": { "unit": { "name": "cow" } } } }) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) def test_05_check_rpm_was_unassociated(self): #perform a search within the repo response = self.dest_repo1.within_repo_search(self.pulp, data={ "criteria": { "type_ids": ["rpm"], "filters": { "unit": { "name": "cow" } } } }) self.assertPulp(code=200) result = Association.from_response(response) self.assertTrue(result == []) def test_06_copy_rpm(self): response = self.dest_repo2.copy(self.pulp, self.source_repo.id, data={ 'criteria': { 'type_ids': ['rpm'] }, }) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) def test_07_copy_category(self): response = self.dest_repo2.copy(self.pulp, self.source_repo.id, data={ 'criteria': { 'type_ids': ['package_category'] }, }) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) def test_08_copy_group(self): response = self.dest_repo2.copy(self.pulp, self.source_repo.id, data={ 'criteria': { 'type_ids': ['package_group'] }, }) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) def test_09_copy_distribution(self): response = self.dest_repo2.copy(self.pulp, self.source_repo.id, data={ 'criteria': { 'type_ids': ['distribution'] }, }) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) def test_10_copy_erratum(self): response = self.dest_repo2.copy(self.pulp, self.source_repo.id, data={ 'criteria': { 'type_ids': ['erratum'] }, }) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) def test_11_copy_srpm(self): response = self.dest_repo2.copy(self.pulp, self.source_repo.id, data={ 'criteria': { 'type_ids': ['srpm'] }, }) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response) @classmethod def tearDownClass(cls): with cls.pulp. async (): for repo_id in [ 'SimpleRepoCopyTest_repo', 'SimpleRepoCopyTest_copy', 'SimpleRepoCopyTest_copy1' ]: Repo({'id': repo_id}).delete(cls.pulp) for response in list(cls.pulp.last_response): Task.wait_for_report(cls.pulp, response) #orphans also should be deleted in cleanup delete_response = Orphans.delete(cls.pulp) Task.wait_for_report(cls.pulp, delete_response)
def test_04_assert_unit_uninstall(self): unit = {'name': 'pike'} response = self.consumer.uninstall_unit(self.pulp, unit, 'rpm') self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_12_delete_repos(self): for repo_id in [self.dest_repo1.id, self.dest_repo2.id, self.source_repo.id, self.yumrepo.id]: response = Repo({'id': repo_id}).delete(self.pulp) Task.wait_for_report(self.pulp, response)
def test_03_delete_repo(self): with self.pulp.asserting(True): response = self.repo.delete(self.pulp) Task.wait_for_report(self.pulp, response)
def test_08_publish_repo(self): distributor_facade = IsoDistributor.from_role(self.repo_role) response = self.repo.publish(self.pulp, distributor_facade.distributor_id) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_01_sync_repo1(self): with self.pulp.asserting(True): response = self.repo1.sync(self.pulp) Task.wait_for_report(self.pulp, response)
def test_09_delete_repo(self): Task.wait_for_report(self.pulp, self.repo.delete(self.pulp)) #check you cannot delete it twice self.repo.delete(self.pulp) self.assertPulp(code=404)
def test_03_delete_distributor(self): with self.pulp.asserting(True): response = self.distributor.delete(self.pulp) Task.wait_for_report(self.pulp, response)
def test_09_delete_repo(self): Task.wait_for_report(self.pulp, self.repo.delete(self.pulp))
def test_08_unbind_repo(self): with self.pulp.asserting(True): response = self.consumer.unbind_distributor( self.pulp, self.repo.id, self.distributor.id) Task.wait_for_report(self.pulp, response)
def test_11_repos_no_feed_cannot_be_synced(self): # check that repos without feed cannot be synced response = self.dest_repo2.sync(self.pulp) self.assertPulp(code=202) with self.assertRaises(TaskFailure): Task.wait_for_report(self.pulp, response)
def test_02_publish_repo(self): with self.pulp.asserting(True): response = self.repo.publish(self.pulp, self.distributor.id) Task.wait_for_report(self.pulp, response)
def test_07_sync_repo_914(self): # https://pulp.plan.io/issues/914 response = self.repo.sync(self.pulp) self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_06_delete_repo(self): response = self.repo.delete(self.pulp) Task.wait_for_report(self.pulp, response)
def test_04_child_repo_sync(self): response = self.node.sync_repo(self.pulp, self.repo.id) self.assertPulpOK() Task.wait_for_report(self.pulp, response)
def test_01_publish_repo_group_with_no_members_bz1148937(self): response = self.repo_group.publish(self.pulp, 'dist_1') self.assertPulp(code=202) Task.wait_for_report(self.pulp, response)
def test_13_delete_repo(self): response = self.repo.delete(self.pulp) Task.wait_for_report(self.pulp, response) #check you cannot delete it twice self.repo.delete(self.pulp) self.assertPulp(code=404)
def test_11_delete_repos(self): response = self.dest_repo1.delete(self.pulp) Task.wait_for_report(self.pulp, response) response = self.dest_repo2.delete(self.pulp) Task.wait_for_report(self.pulp, response)