def test_01_repo_sync_finish(self): self.el.update(self.pulp, {'event_types': ['repo.sync.finish']}) self.el.reload(self.pulp) repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] repo, importer, [distributor] = YumRepo( id='sync_error_repo', importer=YumImporter(feed='http://example.com/repos/none'), distributors=[YumDistributor(relative_url='/repos/none') ]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): response = repo.sync(self.pulp) self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the sync did not succeed Task.wait_for_report(self.pulp, response) tasks = Task.from_report(self.pulp, response) # assert the bin contains request with a failed task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) # doesn't work and won't get fixed --- disabling # assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [ task.id for task in tasks ], 'invalid request.body:Task.id: %s' % el_task.id
def _test_02_repo_publish_finish(self): self.el.update(self.pulp, {'event_listener': ['repo.publish.finish']}) self.el.reload(self.pulp) repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] repo, importer, [distributor] = YumRepo( id='publish_error_repo', importer=Importer.from_role(repo_role), distributors=[ YumDistributor(distributor_type_id='invalid_distributor_id', relative_url='xyz') ]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): response = repo.publish(self.pulp, 'invalid_distributor_id') self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the publish task failed Task.wait_for_report(self.pulp, response) task = Task.from_report(self.pulp, response) # assert the bin contains a request with a fained task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request_count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [ task.id for task in tasks ], 'invalid request.body:Task.id: %s' % el_task.id
def setUpClass(cls): super(SimpleRepoCopyTest, cls).setUpClass() #Destination repo # make sure repos don't exist # no need to wait for repos.delete to happen dest_repo_name = cls.__name__ + '_copy' dest_repo1 = Repo({'id': dest_repo_name}) dest_repo1.delete(cls.pulp) cls.dest_repo1, _, _ = YumRepo( id=dest_repo_name, importer=YumImporter(None), distributors=[YumDistributor(relative_url='abc')]).create(cls.pulp) #2nd Destination Repo dest_repo_name = cls.__name__ + '_copy1' dest_repo2 = Repo({'id': dest_repo_name}) dest_repo2.delete(cls.pulp) cls.dest_repo2, _, _ = YumRepo( id=dest_repo_name, importer=YumImporter(None), distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp) # Source repo default_repo_config = [ repo for repo in ROLES.repos if repo.type == 'rpm' ][0] cls.source_repo, _, _ = YumRepo.from_role(default_repo_config).create( cls.pulp) Task.wait_for_report(cls.pulp, cls.source_repo.sync(cls.pulp))
def testcase_02_upload_rpm(self): # create yum-repo, -importer, -distributor repo, importer, [distributor] = YumRepo(id='upload_test_rpm_repo', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor) as (repo, importer, distributor): # create and perform an rpm url upload self.rpm_uploader(self.pulp, self.rpm_url_bear, repo, distributor)
def setUpClass(cls): super(PublishGroupTest, cls).setUpClass() # create repo_group repo_group = RepoGroup(data={'id': cls.__name__ + "_repo_group"}) response = repo_group.create(cls.pulp) cls.repo_group = RepoGroup.from_response(response) cls.repo_group1 = RepoGroup(data={'id': cls.__name__ + "_repo_group1"}) #associate_distributor with cls.pulp.asserting(True): response = cls.repo_group.associate_distributor( cls.pulp, data={ 'distributor_type_id': 'group_export_distributor', 'distributor_config': { 'http': False, 'https': False }, 'distributor_id': 'dist_1' }) cls.distributor = GroupDistributor.from_response(response) #create repo repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.repo, importer, [ distributor ] = YumRepo.from_role(repo_config).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp))
def setUpClass(cls): super(PublishGroupTest, cls).setUpClass() # create repo_group repo_group=RepoGroup(data={'id': cls.__name__ + "_repo_group"}) response=repo_group.create(cls.pulp) cls.repo_group = RepoGroup.from_response(response) cls.repo_group1 = RepoGroup(data={'id': cls.__name__ + "_repo_group1"}) #associate_distributor with cls.pulp.asserting(True): response = cls.repo_group.associate_distributor( cls.pulp, data={ 'distributor_type_id': 'group_export_distributor', 'distributor_config': { 'http': False, 'https': False }, 'distributor_id': 'dist_1' } ) cls.distributor = GroupDistributor.from_response(response) #create repo repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.repo, importer, [distributor] = YumRepo.from_role(repo_config).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp))
def setUpClass(cls): super(ImporterDistributorTest, cls).setUpClass() #create repo with inporter/distributer associated repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.repo, _, [cls.distributor] = YumRepo.from_role(repo_config).create(cls.pulp) cls.importer = cls.repo.get_importer(cls.pulp, "yum_importer") cls.repo1 = Repo(data={'id': cls.__name__ + "_repo1"})
def setUpClass(cls): super(PuppetCopyRepoTest, cls).setUpClass() # this repo role is hardwired because of the search strings # refering to exact names as e.g. tomcat7_rhel # The proxy role is considered repo = { 'id': cls.__name__, 'feed': 'https://forge.puppetlabs.com', 'queries': ['tomcat'], 'proxy': ROLES.get('proxy'), } # create source repo and sync it to have modules fetched cls.source_repo, _, _ = PuppetRepo.from_role(repo).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.source_repo.sync(cls.pulp)) # create two destinations repos for copy purpose importer = PuppetImporter(feed=None, queries=[]) distributors = [PuppetDistributor()] cls.dest_repo1, _, _ = PuppetRepo(id=cls.__name__ + '1', importer=importer, distributors=distributors).create( cls.pulp) cls.dest_repo2, _, _ = PuppetRepo(id=cls.__name__ + '2', importer=importer, distributors=distributors).create( cls.pulp) # create data for repo cls.invalid_repo = Repo(data={'id': cls.__name__ + "_invalidrepo"}) # create yum repo cls.yumrepo, _, _ = YumRepo( id=cls.__name__ + 'yum', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp)
def setUpClass(cls): super(PackageCategoryTest, cls).setUpClass() repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0].copy() repo_role.id = cls.__name__ + '_repo1' cls.repo1, cls.importer1, [cls.distributor1] = YumRepo.from_role(repo_role).create(cls.pulp) cls.repo2, cls.importer2, [cls.distributor2] = YumRepo(id=cls.__name__ + '_repo2', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp) #sync with cls.pulp.asserting(True): response = cls.repo1.sync(cls.pulp) Task.wait_for_report(cls.pulp, response) #publish with cls.pulp.asserting(True): response = cls.repo1.publish(cls.pulp, cls.distributor1.id) Task.wait_for_report(cls.pulp, response)
def setUpClass(cls): super(ConsumerAuthTest, cls).setUpClass() cls.ROLES = ROLES cls.PROFILE = PROFILE cls.rsa_primary = RSA.load_key( '/usr/share/pulp_auto/tests/data/fake-consumer.pem') cls.rsa_secondary = RSA.load_key( '/usr/share/pulp_auto/tests/data/fake-consumer-secondary.pem') bio_fd = BIO.MemoryBuffer() cls.rsa_primary.save_pub_key_bio(bio_fd) cls.pub_pem_primary = bio_fd.getvalue() bio_fd = BIO.MemoryBuffer() cls.rsa_secondary.save_pub_key_bio(bio_fd) cls.pub_pem_secondary = bio_fd.getvalue() repo_role = [repo for repo in cls.ROLES.repos if repo.type == 'rpm'][0] cls.repo, cls.importer, [ cls.distributor ] = YumRepo.from_role(repo_role).create(cls.pulp) cls.consumer = Consumer.register(cls.pulp, cls.__name__ + '_consumer', rsa_pub=cls.pub_pem_primary) cls.agent = Agent(pulp_auto.handler, PROFILE=pulp_auto.handler.profile.PROFILE) cls.qpid_handle = QpidHandle(cls.ROLES.qpid.url, cls.consumer.id, auth=Authenticator( signing_key=cls.rsa_primary, verifying_key=cls.pulp.pubkey))
def setUpClass(cls): super(ImporterDistributorTest, cls).setUpClass() #create repo with inporter/distributer associated repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.repo, _, [cls.distributor ] = YumRepo.from_role(repo_config).create(cls.pulp) cls.importer = cls.repo.get_importer(cls.pulp, "yum_importer") cls.repo1 = Repo(data={'id': cls.__name__ + "_repo1"})
def setUpClass(cls): # set up a repo for the test cases super(EventListenerTest, cls).setUpClass() repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0].copy() repo_role.id = 'EventListenerRepo' # this test case relies on exact events counts # auto_publish would mess things up repo_role.auto_publish = False cls.repo, cls.importer, [cls.distributor] = YumRepo.from_role(repo_role).create(cls.pulp)
def setUpClass(cls): # set up the class with a repo that is synced and set up for nodes to feed from super(NodeTestRepo, cls).setUpClass() cls.node.activate(cls.pulp) repo_role = ROLES.repos[0] cls.repo, cls.importer, [cls.distributor] = YumRepo.from_role(repo_role).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp)) response = cls.repo.associate_distributor(cls.pulp, NodeDistributor.default().data) cls.node_distributor = NodeDistributor.from_response(response)
def testcase_03_parallel_upload_rpms(self): import gevent repo, importer, [distributor] = YumRepo(id='upload_test_rpm_repo', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor) as (repo, importer, distributor): # create and perform an rpm url upload jobs = [gevent.spawn(lambda: self.rpm_uploader(self.pulp, url, repo, distributor)) for url in \ [self.rpm_url_bear, self.rpm_url_mouse]] gevent.joinall(jobs, raise_error=True)
def setUpClass(cls): super(RegRepoFeedTest, cls).setUpClass() repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0].copy() repo_role.id = cls.__name__ + '_repo' # create repo cls.repo, cls.importer, [cls.distributor] = YumRepoFacade.from_role(repo_role).create(cls.pulp) # create consumer cls.consumer = Consumer(ROLES.consumers[0]) setattr(cls.consumer, 'cli', Cli.ready_instance(**ROLES.consumers[0]))
def setUpClass(cls): super(SimpleOrphanTest, cls).setUpClass() # prepare orphans by syncing and deleting a repo # make sure the repo is gone repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] repo = Repo(repo_config) repo.delete(cls.pulp) # create and sync repo cls.repo, _, _ = YumRepo.from_role(repo_config).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp)) # this is where orphans appear Task.wait_for_report(cls.pulp, cls.repo.delete(cls.pulp))
def _test_02_repo_publish_finish(self): self.el.update(self.pulp, {'event_listener': ['repo.publish.finish']}) self.el.reload(self.pulp) repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] repo, importer, [distributor] = YumRepo(id='publish_error_repo', importer=Importer.from_role(repo_role), distributors=[YumDistributor(distributor_type_id='invalid_distributor_id', relative_url='xyz')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): response = repo.publish(self.pulp, 'invalid_distributor_id') self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the publish task failed Task.wait_for_report(self.pulp, response) task = Task.from_report(self.pulp, response) # assert the bin contains a request with a fained task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request_count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [task.id for task in tasks], 'invalid request.body:Task.id: %s' % el_task.id
def setUpClass(cls): super(ScheduledSyncTest, cls).setUpClass() # create and sync rpm repo repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.repo, _, _ = YumRepo.from_role(repo_config).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp)) # create a schedule sync cls.importer = cls.repo.get_importer(cls.pulp, "yum_importer") # sync will be done every minute response = cls.importer.schedule_sync(cls.pulp, "PT1M") cls.action = ScheduledAction.from_response(response) cls.delta = time.time() + 120
def testcase_01_upload_and_download_using_dnf_rpm(self): # create yum-repo, -importer, -distributor repo, importer, [distributor] = YumRepo(id='test_22_rpm_repo_for_dnf', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): # create and perform an rpm url upload with deleting(self.pulp, upload_url_rpm(self.pulp, self.rpm_url)) as (upload,): # assign the upload to the repo response = upload.import_to(self.pulp, repo) self.assertPulpOK() Task.wait_for_report(self.pulp, response) # check the content is accessible response = repo.publish(self.pulp, distributor.id) self.assertPulpOK() Task.wait_for_report(self.pulp, response) # fetch the package through the repo pulp_rpm_url = distributor.content_url(self.pulp, url_basename(self.rpm_url)) pulp_repo = distributor.content_url(self.pulp) with closing(temp_url(pulp_rpm_url)) as tmpfile: assert url_basename(self.rpm_url).startswith(rpm_metadata(tmpfile)['unit_key']['name']) assert "bear" == download_package_with_dnf(self.pulp, pulp_repo, "bear")
def setUpClass(cls): # set up a repo for the test cases super(EventListenerTest, cls).setUpClass() repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0].copy() repo_role.id = 'EventListenerRepo' # this test case relies on exact events counts # auto_publish would mess things up repo_role.auto_publish = False cls.repo, cls.importer, [ cls.distributor ] = YumRepo.from_role(repo_role).create(cls.pulp)
def setUpClass(cls): # set up the class with a repo that is synced and set up for nodes to feed from super(NodeTestRepo, cls).setUpClass() cls.node.activate(cls.pulp) repo_role = ROLES.repos[0] cls.repo, cls.importer, [ cls.distributor ] = YumRepo.from_role(repo_role).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp)) response = cls.repo.associate_distributor( cls.pulp, NodeDistributor.default().data) cls.node_distributor = NodeDistributor.from_response(response)
def setUpClass(cls): super(ScheduledPublishTest, cls).setUpClass() # create and sync and publish rpm repo repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.repo, _, [cls.distributor] = YumRepo.from_role(repo_config).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp)) Task.wait_for_report(cls.pulp, cls.repo.publish(cls.pulp, cls.distributor.id)) # create a schedule publish cls.distributor = cls.repo.get_distributor(cls.pulp, cls.distributor.id) # publish will be done every minute response = cls.distributor.schedule_publish(cls.pulp, "PT1M") cls.action = ScheduledAction.from_response(response) cls.delta = time.time() + 120
def setUpClass(cls): super(RegRepoCopyTest, cls).setUpClass() # create repo repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0].copy() repo_role.id = cls.__name__ + '_repo1' cls.repo1, cls.importer1, [cls.distributor1] = YumRepoFacade.from_role(repo_role).create(cls.pulp) cls.repo2, cls.importer2, [cls.distributor2] = YumRepoFacade(id=cls.__name__ + '_repo2', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp) # create consumer cls.consumer = Consumer(ROLES.consumers[0]) setattr(cls.consumer, 'cli', Cli.ready_instance(**ROLES.consumers[0]))
def setUpClass(cls): super(CliConsumerTest, cls).setUpClass() # create all repos # get repo configs across all consumers repo_configs = sum([consumer.repos for consumer in ROLES.consumers], []) # filter&uniq repo configs repo_configs = { repo.id: repo for repo in repo_configs if repo.type == 'rpm' }.values() with cls.pulp.asserting(True): cls.repos = [ FacadeYumRepo.from_role(repo_config).create(cls.pulp) for repo_config in repo_configs ] # sync&publish all repos with cls.pulp.asserting(True): task_reports = [repo.sync(cls.pulp) for repo, _, _ in cls.repos] Task.wait_for_reports(cls.pulp, task_reports) with cls.pulp.asserting(True): task_reports = [ repo.publish(cls.pulp, distributor.id) for repo, _, [distributor] in cls.repos ] Task.wait_for_reports(cls.pulp, task_reports) # create all consumers # gather all consumers consumer_configs = { consumer.id: consumer for consumer in ROLES.consumers } with cls.pulp.asserting(True): cls.consumers = [ Consumer(consumer_config) for consumer_config in consumer_configs.values() ] # set up consumer cli & link repos for consumer in cls.consumers: setattr(consumer, 'cli', Cli.ready_instance(**consumer_configs[consumer.id])) setattr( consumer, 'repos', filter( lambda (repo, importer, distributors): any(repo.id == repo_config.id for repo_config in consumer_configs[consumer.id].repos), cls.repos))
def setUpClass(cls): super(RegRepoFeedTest, cls).setUpClass() repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0].copy() repo_role.id = cls.__name__ + '_repo' # create repo cls.repo, cls.importer, [ cls.distributor ] = YumRepoFacade.from_role(repo_role).create(cls.pulp) # create consumer cls.consumer = Consumer(ROLES.consumers[0]) setattr(cls.consumer, 'cli', Cli.ready_instance(**ROLES.consumers[0]))
def test_01_repo_sync_finish(self): self.el.update(self.pulp, {'event_types': ['repo.sync.finish']}) self.el.reload(self.pulp) repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] repo, importer, [distributor] = YumRepo(id='sync_error_repo', importer=YumImporter(feed='http://example.com/repos/none'), distributors=[YumDistributor(relative_url='/repos/none')]).create(self.pulp) with deleting(self.pulp, repo, importer, distributor): response = repo.sync(self.pulp) self.assertPulpOK() with self.assertRaises(TaskFailure): # make sure the sync did not succeed Task.wait_for_report(self.pulp, response) tasks = Task.from_report(self.pulp, response) # assert the bin contains request with a failed task in body self.bin.reload() assert self.bin.request_count == 1, 'invalid bin.request count: %s' % self.bin.request_count el_request = self.bin.requests[0] assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method el_task = Task.from_call_report_data(json.loads(el_request.body)) # doesn't work and won't get fixed --- disabling # assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state assert el_task.id in [task.id for task in tasks], 'invalid request.body:Task.id: %s' % el_task.id
def test_01(self): errors = 0 # change the range to 300-600 and run it separately from the rest of testcases for i in xrange(3): with self.agent.catching(True), self.agent.running(self.qpid_handle, frequency=10): try: self.bindRepo() self._tearDown() except (TaskFailure, TaskTimeoutError) as error: print '> ', errors += 1 print error repo_role = [repo for repo in self.ROLES.repos if repo.type == 'rpm'][0] self.repo, self.importer, [self.distributor] = YumRepo.from_role(repo_role).create(self.pulp) print i print "errors", errors
def setUpClass(cls): super(ScheduledPublishTest, cls).setUpClass() # create and sync and publish rpm repo repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.repo, _, [cls.distributor ] = YumRepo.from_role(repo_config).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp)) Task.wait_for_report(cls.pulp, cls.repo.publish(cls.pulp, cls.distributor.id)) # create a schedule publish cls.distributor = cls.repo.get_distributor(cls.pulp, cls.distributor.id) # publish will be done every minute response = cls.distributor.schedule_publish(cls.pulp, "PT1M") cls.action = ScheduledAction.from_response(response) cls.delta = time.time() + 120
def setUpClass(cls): super(ConsumerAuthTest, cls).setUpClass() cls.ROLES = ROLES cls.PROFILE = PROFILE cls.rsa_primary = RSA.load_key('/usr/share/pulp_auto/tests/data/fake-consumer.pem') cls.rsa_secondary = RSA.load_key('/usr/share/pulp_auto/tests/data/fake-consumer-secondary.pem') bio_fd = BIO.MemoryBuffer() cls.rsa_primary.save_pub_key_bio(bio_fd) cls.pub_pem_primary = bio_fd.getvalue() bio_fd = BIO.MemoryBuffer() cls.rsa_secondary.save_pub_key_bio(bio_fd) cls.pub_pem_secondary = bio_fd.getvalue() repo_role = [repo for repo in cls.ROLES.repos if repo.type == 'rpm'][0] cls.repo, cls.importer, [cls.distributor] = YumRepo.from_role(repo_role).create(cls.pulp) cls.consumer = Consumer.register(cls.pulp, cls.__name__ + '_consumer', rsa_pub=cls.pub_pem_primary) cls.agent = Agent(pulp_auto.handler, PROFILE=pulp_auto.handler.profile.PROFILE) cls.qpid_handle = QpidHandle(cls.ROLES.qpid.url, cls.consumer.id, auth=Authenticator(signing_key=cls.rsa_primary, verifying_key=cls.pulp.pubkey))
def setUpClass(cls): super(RegRepoCopyTest, cls).setUpClass() # create repo repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0].copy() repo_role.id = cls.__name__ + '_repo1' cls.repo1, cls.importer1, [ cls.distributor1 ] = YumRepoFacade.from_role(repo_role).create(cls.pulp) cls.repo2, cls.importer2, [cls.distributor2] = YumRepoFacade( id=cls.__name__ + '_repo2', importer=YumImporter(feed=None), distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp) # create consumer cls.consumer = Consumer(ROLES.consumers[0]) setattr(cls.consumer, 'cli', Cli.ready_instance(**ROLES.consumers[0]))
def setUpClass(cls): super(UnitSearchTest, cls).setUpClass() #create and sync puppet repo # FIXME: hardwired role details repo = { 'id': cls.__name__, 'queries': ['jenkins'], 'feed': 'https://forge.puppetlabs.com', 'proxy': ROLES.get('proxy'), } # make sure we run clean response = Repo({'id': repo['id']}).delete(cls.pulp) if response == ResponseLike(202): Task.wait_for_report(cls.pulp, response) cls.repo1, _, _ = PuppetRepo.from_role(repo).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo1.sync(cls.pulp)) # create and sync rpm repo repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.repo2, _, _ = YumRepo.from_role(repo_config).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.repo2.sync(cls.pulp))
def test_01(self): errors = 0 # change the range to 300-600 and run it separately from the rest of testcases for i in xrange(3): with self.agent.catching(True), self.agent.running( self.qpid_handle, frequency=10): try: self.bindRepo() self._tearDown() except (TaskFailure, TaskTimeoutError) as error: print '> ', errors += 1 print error repo_role = [ repo for repo in self.ROLES.repos if repo.type == 'rpm' ][0] self.repo, self.importer, [ self.distributor ] = YumRepo.from_role(repo_role).create(self.pulp) print i print "errors", errors
def setUpClass(cls): super(SimpleRepoCopyTest, cls).setUpClass() #Destination repo # make sure repos don't exist # no need to wait for repos.delete to happen dest_repo_name = cls.__name__ + '_copy' dest_repo1 = Repo({'id': dest_repo_name}) dest_repo1.delete(cls.pulp) cls.dest_repo1, _, _ = YumRepo(id=dest_repo_name, importer=YumImporter(None), distributors=[YumDistributor(relative_url='abc')]).create(cls.pulp) #2nd Destination Repo dest_repo_name = cls.__name__ + '_copy1' dest_repo2 = Repo({'id': dest_repo_name}) dest_repo2.delete(cls.pulp) cls.dest_repo2, _, _ = YumRepo(id=dest_repo_name, importer=YumImporter(None), distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp) # Source repo default_repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0] cls.source_repo, _, _ = YumRepo.from_role(default_repo_config).create(cls.pulp) Task.wait_for_report(cls.pulp, cls.source_repo.sync(cls.pulp))
def setUpClass(cls): super(CliConsumerTest, cls).setUpClass() # create all repos # get repo configs across all consumers repo_configs = sum([consumer.repos for consumer in ROLES.consumers], []) # filter&uniq repo configs repo_configs = {repo.id: repo for repo in repo_configs if repo.type == 'rpm'}.values() with cls.pulp.asserting(True): cls.repos = [ FacadeYumRepo.from_role(repo_config).create(cls.pulp) for repo_config in repo_configs ] # sync&publish all repos with cls.pulp.asserting(True): task_reports = [repo.sync(cls.pulp) for repo, _, _ in cls.repos] Task.wait_for_reports(cls.pulp, task_reports) with cls.pulp.asserting(True): task_reports = [ repo.publish(cls.pulp, distributor.id) for repo, _, [distributor] in cls.repos] Task.wait_for_reports(cls.pulp, task_reports) # create all consumers # gather all consumers consumer_configs = {consumer.id: consumer for consumer in ROLES.consumers} with cls.pulp.asserting(True): cls.consumers = [Consumer(consumer_config) for consumer_config in consumer_configs.values()] # set up consumer cli & link repos for consumer in cls.consumers: setattr(consumer, 'cli', Cli.ready_instance(**consumer_configs[consumer.id])) setattr( consumer, 'repos', filter( lambda (repo, importer, distributors): any(repo.id == repo_config.id for repo_config in consumer_configs[consumer.id].repos), cls.repos ) )