def setUpClass(cls):
     super(PuppetCopyRepoTest, cls).setUpClass()
     # this repo role is hardwired because of the search strings
     # refering to exact names as e.g. tomcat7_rhel
     # The proxy role is considered
     repo = {
         'id': cls.__name__,
         'feed': 'https://forge.puppetlabs.com',
         'queries': ['tomcat'],
         'proxy': ROLES.get('proxy'),
     }
     # create source repo and sync it to have modules fetched
     cls.source_repo, _, _ = PuppetRepo.from_role(repo).create(cls.pulp)
     Task.wait_for_report(cls.pulp, cls.source_repo.sync(cls.pulp))
     # create two destinations repos for copy purpose
     importer = PuppetImporter(feed=None, queries=[])
     distributors = [PuppetDistributor()]
     cls.dest_repo1, _, _ = PuppetRepo(id=cls.__name__ + '1',
                                       importer=importer,
                                       distributors=distributors).create(
                                           cls.pulp)
     cls.dest_repo2, _, _ = PuppetRepo(id=cls.__name__ + '2',
                                       importer=importer,
                                       distributors=distributors).create(
                                           cls.pulp)
     # create data for repo
     cls.invalid_repo = Repo(data={'id': cls.__name__ + "_invalidrepo"})
     # create yum repo
     cls.yumrepo, _, _ = YumRepo(
         id=cls.__name__ + 'yum',
         importer=YumImporter(feed=None),
         distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp)
    def setUpClass(cls):
        super(PublishGroupTest, cls).setUpClass()
        # create repo_group
        repo_group=RepoGroup(data={'id': cls.__name__ + "_repo_group"})
        response=repo_group.create(cls.pulp)
        cls.repo_group = RepoGroup.from_response(response)
        cls.repo_group1 = RepoGroup(data={'id': cls.__name__ + "_repo_group1"})

        #associate_distributor
        with cls.pulp.asserting(True):
            response = cls.repo_group.associate_distributor(
                cls.pulp,
                data={
                    'distributor_type_id': 'group_export_distributor',
                    'distributor_config': {
                        'http': False,
                        'https': False
                    },
                    'distributor_id': 'dist_1'
                }
            )
        cls.distributor = GroupDistributor.from_response(response)
        #create repo
        repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0]
        cls.repo, importer, [distributor] = YumRepo.from_role(repo_config).create(cls.pulp)
        Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp))
 def setUpClass(cls):
     super(PuppetSearchRepoTest, cls).setUpClass()
     repo_id = cls.__name__
     queries = ['tomcat']
     cls.repo, _, _ = create_puppet_repo(cls.pulp, repo_id, queries)
     cls.repo1, _, _ = create_puppet_repo(cls.pulp, repo_id + '1', queries)
     Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp))
示例#4
0
 def _test_02_repo_publish_finish(self):
     self.el.update(self.pulp, {'event_listener': ['repo.publish.finish']})
     self.el.reload(self.pulp)
     repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0]
     repo, importer, [distributor] = YumRepo(
         id='publish_error_repo',
         importer=Importer.from_role(repo_role),
         distributors=[
             YumDistributor(distributor_type_id='invalid_distributor_id',
                            relative_url='xyz')
         ]).create(self.pulp)
     with deleting(self.pulp, repo, importer, distributor):
         response = repo.publish(self.pulp, 'invalid_distributor_id')
         self.assertPulpOK()
         with self.assertRaises(TaskFailure):
             # make sure the publish task failed
             Task.wait_for_report(self.pulp, response)
         task = Task.from_report(self.pulp, response)
         # assert the bin contains a request with a fained task in body
         self.bin.reload()
         assert self.bin.request_count == 1, 'invalid bin.request_count: %s' % self.bin.request_count
         el_request = self.bin.requests[0]
         assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method
         el_task = Task.from_call_report_data(json.loads(el_request.body))
         assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state
         assert el_task.id in [
             task.id for task in tasks
         ], 'invalid request.body:Task.id: %s' % el_task.id
示例#5
0
 def test_01_repo_sync_finish(self):
     self.el.update(self.pulp, {'event_types': ['repo.sync.finish']})
     self.el.reload(self.pulp)
     repo_role = [repo for repo in ROLES.repos if repo.type == 'rpm'][0]
     repo, importer, [distributor] = YumRepo(
         id='sync_error_repo',
         importer=YumImporter(feed='http://example.com/repos/none'),
         distributors=[YumDistributor(relative_url='/repos/none')
                       ]).create(self.pulp)
     with deleting(self.pulp, repo, importer, distributor):
         response = repo.sync(self.pulp)
         self.assertPulpOK()
         with self.assertRaises(TaskFailure):
             # make sure the sync did not succeed
             Task.wait_for_report(self.pulp, response)
         tasks = Task.from_report(self.pulp, response)
         # assert the bin contains request with a failed task in body
         self.bin.reload()
         assert self.bin.request_count == 1, 'invalid bin.request count: %s' % self.bin.request_count
         el_request = self.bin.requests[0]
         assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method
         el_task = Task.from_call_report_data(json.loads(el_request.body))
         # doesn't work and won't get fixed --- disabling
         # assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state
         assert el_task.id in [
             task.id for task in tasks
         ], 'invalid request.body:Task.id: %s' % el_task.id
示例#6
0
 def bindRepo(self):
     '''test cases are performed on a repo bind call; to be run within agent running ctx'''
     with self.pulp.asserting(True):
         report = self.consumer.bind_distributor(self.pulp, self.repo.id,
                                                 self.distributor.id)
         self.assertPulp(code=202)
         Task.wait_for_report(self.pulp, report, timeout=5)
示例#7
0
    def test_01_package_category_create(self):
        response = self.repo1.within_repo_search(
            self.pulp,
            data={"criteria": {"type_ids": ["package_group"],"limit": 1}}
        )
        self.assertPulp(code=200)
        result = Association.from_response(response)
        groupList = []
        # make a list of names
        for i in range(0, len(result)):
            groupList.append(result[i].data['metadata']['name'])
        #create metadata for package category import
        data = package_category_metadata(self.repo1.id+"_category1", self.repo1.id, groupList)

        #actually upload category
        with deleting(self.pulp, Upload.create(self.pulp, data=data)) as (upload,) :
            Task.wait_for_report(self.pulp, upload.import_to(self.pulp, self.repo1))
        self.assertPulp(code=200)

        #check that group is there and contains specified packages
        response = self.repo1.within_repo_search(
            self.pulp,
            data={"criteria": {"type_ids": ["package_category"],\
                  "filters": {"unit": {"id": data["unit_key"]["id"]}}}}
        )
        self.assertPulp(code=200)
        result = Association.from_response(response)
        self.assertEqual(result[0].data["metadata"]["packagegroupids"],
                         data["unit_metadata"]["packagegroupids"])
 def test_05_associate_importer(self):
     response = self.repo.associate_importer(
         self.pulp,
         data={
             'importer_type_id': 'docker_importer',
             'importer_config': {
                 'feed': self.feed,
                  "upstream_name": "busybox"
                                  }
         }
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
     importer = self.repo.get_importer(self.pulp, "docker_importer")
     self.assertEqual(
         importer,
         {
             'id': 'docker_importer',
             'importer_type_id': 'docker_importer',
             'repo_id': self.repo.id,
             'config': {
                 'feed': self.feed,
                 "upstream_name": "busybox"
             },
             'last_sync': None
         }
     )
 def test_07_no_unassociation_within_repo_with_feed(self):
     # repos with feed now can delete partial content inside it
     response = self.source_repo.unassociate_units(
         self.pulp, data={"criteria": {"type_ids": ["iso"], "filters": {"unit": {"name": "test.iso"}}}}
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
    def setUpClass(cls):
        super(SimpleRepoCopyTest, cls).setUpClass()
        #Destination repo
        # make sure repos don't exist
        # no need to wait for repos.delete to happen
        dest_repo_name = cls.__name__ + '_copy'
        dest_repo1 = Repo({'id': dest_repo_name})
        dest_repo1.delete(cls.pulp)
        cls.dest_repo1, _, _ = YumRepo(
            id=dest_repo_name,
            importer=YumImporter(None),
            distributors=[YumDistributor(relative_url='abc')]).create(cls.pulp)

        #2nd Destination Repo
        dest_repo_name = cls.__name__ + '_copy1'
        dest_repo2 = Repo({'id': dest_repo_name})
        dest_repo2.delete(cls.pulp)
        cls.dest_repo2, _, _ = YumRepo(
            id=dest_repo_name,
            importer=YumImporter(None),
            distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp)

        # Source repo
        default_repo_config = [
            repo for repo in ROLES.repos if repo.type == 'rpm'
        ][0]
        cls.source_repo, _, _ = YumRepo.from_role(default_repo_config).create(
            cls.pulp)
        Task.wait_for_report(cls.pulp, cls.source_repo.sync(cls.pulp))
示例#11
0
 def test_02_associate_importer(self):
     '''to the importer_config query/queries can be added to specify witch
     modules have to be synced'''
     response = self.repo.associate_importer(self.pulp,
                                             data={
                                                 'importer_type_id':
                                                 'puppet_importer',
                                                 'importer_config': {
                                                     'feed':
                                                     self.feed,
                                                     'queries':
                                                     ["stdlib", "yum"]
                                                 }
                                             })
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
     importer = self.repo.get_importer(self.pulp, "puppet_importer")
     self.assertEqual(
         importer, {
             'id': 'puppet_importer',
             'importer_type_id': 'puppet_importer',
             'repo_id': self.repo.id,
             'config': {
                 'feed': self.feed,
                 'queries': ["stdlib", "yum"]
             },
             'last_sync': None
         })
 def test_03_repo_sync_finish(self):
     self.el.update(self.pulp, {'event_types': ['repo.sync.finish']})
     self.assertPulpOK()
     self.el.reload(self.pulp)
     report = self.repo.sync(self.pulp)
     # wait till the sync is done
     Task.wait_for_report(self.pulp, report)
     # fetch the tasks sync-call has spawned
     tasks = Task.from_report(self.pulp, report)
     assert tasks, 'no tasks induced'
     # check the requestsb.in got notified
     self.bin.reload()
     assert self.bin.request_count == 1, 'invalid event listener requests count: %s' % \
                                             self.bin.request_count
     el_request = self.bin.requests[0]
     assert el_request.method == 'POST', 'invalid request method: %s' % el_request.method
     # assert the bin was posted no sooner than all tasks have finished
     tasks_finished_after_request = [task.id for task in tasks if el_request.time < task.finish_time]
     assert tasks_finished_after_request, 'tasks %s finished after request at: %s' % \
             (tasks_finished_after_request, el_request.time)
     # the request body contains a task
     el_task = Task.from_call_report_data(json.loads(el_request.body))
     # doesn't work and won't get fixed --- disabling
     # assert el_task.state == TASK_FINISHED_STATE, 'invalid task state: %s' % el_task.state
     el_task.reload(self.pulp)
     # assert proper task was posted
     assert el_task.id in [task.id for task in tasks], 'invalid task id posted: %s' % el_task.id
     assert sorted([u'pulp:repository:EventListenerRepo', u'pulp:action:sync']) == sorted(el_task.data['tags']), \
             'invalid task tags: %s' % el_task.data['tags']
 def test_12_delete_repos(self):
     for repo_id in [
             self.dest_repo1.id, self.dest_repo2.id, self.source_repo.id,
             self.yumrepo.id
     ]:
         response = Repo({'id': repo_id}).delete(self.pulp)
         Task.wait_for_report(self.pulp, response)
    def test_02_repo_sync_start(self):
        self.el.update(self.pulp, {'event_types': ['repo.sync.start']})
        self.assertPulpOK()
        self.el.reload(self.pulp)
        report = self.repo.sync(self.pulp)
        # wait till the sync is done
        Task.wait_for_report(self.pulp, report)
        # keep track of all the spawned tasks
        tasks = Task.from_report(self.pulp, report)
        assert tasks, 'no tasks induced'
        # fetch the request as POSTed by pulp event listener to the bin (http://requestb.in/<bin_id>)
        self.bin.reload()
        assert self.bin.request_count == 1, 'invalid event listener POST count (%s)' \
                                                % self.bin.request_count
        el_request = self.bin.requests[0]
        assert el_request.method == 'POST', 'invalid request method: %s' % el_request.method
        # assert the bin was POSTed no later any task finished
        tasks_finished_before_request = [task.id for task in tasks if el_request.time > task.finish_time]
        assert tasks_finished_before_request == [], 'tasks %s finished before request at: %s' % \
                (tasks_finished_before_request, el_request.time)
        # FIXME: not yet specified in docs: assert the bin was not POSTed before any task has started
        # tasks_started_after_request = [task.id for task in tasks if el_request.time < task.start_time]
        # assert tasks_started_after_request == [], 'tasks %s started after request at: %s' % \
        #        (tasks_started_after_request, el_request.time)

        # assert there's a task POSTed to the bin with the same ID pulp reported with sync
        # request.body contains original POSTed task-report-data --- create a Task object from it
        el_task = Task.from_call_report_data(json.loads(el_request.body))
        assert el_task.state == TASK_RUNNING_STATE, 'invalid task state: %s' % el_task.state
        el_task.reload(self.pulp)
        # assert the task is indeed in the tasks list spawned by pulp to perform repo sync
        assert el_task.id in [task.id for task in tasks], 'invalid task id posted: %s' % el_task.id
        assert sorted([u'pulp:repository:EventListenerRepo', u'pulp:action:sync']) == sorted(el_task.data['tags']), \
                'invalid task tags: %s' % el_task.data['tags']
 def test_02_repo_publish_finish(self):
     self.el.update(self.pulp, {'event_listener': ['repo.publish.finish']})
     self.el.reload(self.pulp)
     with deleting(
             self.pulp,
             *create_yum_repo(
                 self.pulp,
                 'publish_error_repo',
                 feed=
                 'https://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/zoo/'
             )) as (repo, (importer, distributor)):
         response = repo.publish(self.pulp, 'invalid_distributor_id')
         self.assertPulpOK()
         with self.assertRaises(TaskFailure):
             # make sure the publish task failed
             Task.wait_for_report(self.pulp, response)
         task = Task.from_report(self.pulp, response)
         # assert the bin contains a request with a fained task in body
         self.bin.reload()
         assert self.bin.request_count == 1, 'invalid bin.request_count: %s' % self.bin.request_count
         el_request = self.bin.requests[0]
         assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method
         el_task = Task.from_call_report_data(json.loads(el_request.body))
         assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state
         assert el_task.id in [
             task.id for task in tasks
         ], 'invalid request.body:Task.id: %s' % el_task.id
示例#16
0
    def setUpClass(cls):
        super(SimpleRepoCopyTest, cls).setUpClass()
        #Destination repo
        # make sure repos don't exist
        # no need to wait for repos.delete to happen
        feed = None
        dest_repo_name = cls.__name__ + '_copy'
        dest_repo1 = Repo({'id': dest_repo_name})
        dest_repo1.delete(cls.pulp)
        cls.dest_repo1, _, _ = create_yum_repo(cls.pulp, dest_repo_name, feed)

        #2nd Destination Repo
        dest_repo_name = cls.__name__ + '_copy1'
        dest_repo2 = Repo({'id': dest_repo_name})
        dest_repo2.delete(cls.pulp)
        cls.dest_repo2, _, _ = create_yum_repo(cls.pulp, dest_repo_name, feed)

        # Source repo
        default_repo_config = [
            repo for repo in ROLES.repos if repo.type == 'rpm'
        ][0]
        source_repo_name = cls.__name__ + '_repo'
        source_repo = Repo({'id': source_repo_name})
        source_repo.delete(cls.pulp)
        cls.source_repo, _, _ = create_yum_repo(cls.pulp, source_repo_name,
                                                default_repo_config.feed)
        Task.wait_for_report(cls.pulp, cls.source_repo.sync(cls.pulp))
 def test_01_publish_repo_group_with_no_members_bz1148937(self):
     response = self.repo_group.publish(
         self.pulp,
         'dist_1'
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
 def test_05_unassociate_iso_from_copied_repo(self):
     # unassociate unit from a copied repo
     response = self.dest_repo1.unassociate_units(
         self.pulp, data={"criteria": {"type_ids": ["iso"], "filters": {"unit": {"name": "test.iso"}}}}
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
示例#19
0
 def test_05_associate_importer(self):
     response = self.repo.associate_importer(self.pulp,
                                             data={
                                                 'importer_type_id':
                                                 'docker_importer',
                                                 'importer_config': {
                                                     'feed': self.feed,
                                                     "upstream_name":
                                                     "busybox"
                                                 }
                                             })
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
     importer = self.repo.get_importer(self.pulp, "docker_importer")
     self.assertEqual(
         importer, {
             'id': 'docker_importer',
             'importer_type_id': 'docker_importer',
             'repo_id': self.repo.id,
             'config': {
                 'feed': self.feed,
                 "upstream_name": "busybox"
             },
             'last_sync': None
         })
 def test_10_repos_no_feed_cannot_be_synced(self):
     # check that repos without feed cannot be synced
     response = self.dest_repo2.sync(self.pulp)
     self.assertPulp(code=202)
     with self.assertRaises(TaskFailure):
         with self.pulp.asserting(True):
             Task.wait_for_report(self.pulp, response)
示例#21
0
 def test_99_node_unbind_repo(self):
     self.node.unbind_repo(self.pulp, self.repo.id, self.node_distributor.id)
     self.assertPulpOK()
     # nodes keep the repos after updating
     child_repos = Repo.list(self.pulp_child)
     for repo in child_repos:
         Task.wait_for_report(self.pulp_child, repo.delete(self.pulp_child))
 def test_06_assert_unit_install(self):
     unit = {"name": "pike"}
     rpm = RpmUnit(unit, relevant_data_keys=unit.keys())
     with self.pulp.asserting(True):
         response = self.consumer.install_unit(self.pulp, unit, "rpm")
     Task.wait_for_report(self.pulp, response)
     assert rpm in RpmUnit.list(self.consumer.cli), "rpm %s not installed on %s" % (rpm, self.consumer)
 def test_02_assert_unit_update(self):
     unit = {"name": "pike"}
     response = self.consumer_group.update_unit(
         self.pulp, unit, "rpm", options={"apply": True, "reboot": False, "importkeys": False}
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
 def test_01_repo_content_applicability(self):
     response = RepoAppl.applicability(self.pulp, data={
                                      "repo_criteria": {"filters": {"id":{"$in":["test-repo", "test-errata"]}}}
                                                  }
                                 )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
    def test_02_associate_importer(self):
        '''to the importer_config query/queries can be added to specify witch
        modules have to be synced'''
        response = self.repo.associate_importer(
            self.pulp,
            data={
                'importer_type_id': 'puppet_importer',
                'importer_config': {
                    'feed': self.feed,
                    'queries': ["stdlib", "yum"]
                }
            }
        )
        self.assertPulp(code=202)
        Task.wait_for_report(self.pulp, response)
        importer = self.repo.get_importer(self.pulp, "puppet_importer")
        self.assertEqual(
            importer,
            {
                'id': 'puppet_importer',
                'importer_type_id': 'puppet_importer',
                'repo_id': self.repo.id,
                'config': {
                    'feed': self.feed,
                    'queries': ["stdlib", "yum"]

                },
                'last_sync': None
            }
        )
 def test_02_update_importer_config_1078296(self):
     response = self.repo1.update(
         self.pulp, data={"importer_config": {
             "num_units": 6
         }})
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
 def test_05_repo_publish_finish(self):
     self.el.update(self.pulp, {'event_types': ['repo.publish.finish']})
     self.assertPulpOK()
     self.el.reload(self.pulp)
     report = self.repo.publish(self.pulp, self.distributor.id)
     # wait till publish-induced tasks finish
     Task.wait_for_report(self.pulp, report)
     # fetch the tasks spawned for the publish to perform
     tasks = [task for task in Task.from_report(self.pulp, report) \
             if u'pulp:action:publish' in task.data['tags']]
     assert tasks, 'no tasks induced'
     # assert bin status
     self.bin.reload()
     assert self.bin.request_count == 1, 'invalid event listener requests count: %s' % \
             self.bin.request_count
     el_request = self.bin.requests[0]
     # assert request method
     assert el_request.method == 'POST', 'invalid request method: %s' % el_request.method
     # assert the request was made after all tasks finished
     tasks_finished_after_request = [task.id for task in tasks if el_request.time < task.finish_time]
     # doesn't work --- disabling
     #assert tasks_finished_after_request == [], '%s finished after request at %s' % \
     #        (tasks_finished_after_request, el_request.time)
     # the request body contains a task
     el_task = Task.from_call_report_data(json.loads(el_request.body))
     #assert el_task.state == TASK_FINISHED_STATE, 'invalid task state: %s' % el_task.state
     el_task.reload(self.pulp)
     # assert proper task was posted
     assert el_task.id in [task.id for task in tasks], 'invalid task id posted: %s' % el_task.id
     assert sorted([u'pulp:repository:EventListenerRepo', u'pulp:action:publish']) == sorted(el_task.data['tags']), \
             'invalid task tags: %s' % el_task.data['tags']
 def test_12_publish_repo(self):
     response = self.repo.publish(
         self.pulp,
         'dist_1'
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
 def setUpClass(cls):
     super(PuppetSearchRepoTest, cls).setUpClass()
     repo_id = cls.__name__
     queries = ['tomcat']
     cls.repo, _, _ = create_puppet_repo(cls.pulp, repo_id, queries)
     cls.repo1, _, _ = create_puppet_repo(cls.pulp, repo_id + '1', queries)
     Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp))
示例#30
0
 def test_10_repos_no_feed_cannot_be_synced(self):
     # check that repos without feed cannot be synced
     response = self.dest_repo2.sync(self.pulp)
     self.assertPulp(code=202)
     with self.assertRaises(TaskFailure):
         with self.pulp.asserting(True):
             Task.wait_for_report(self.pulp, response)
 def setUpClass(cls):
     super(PuppetCopyRepoTest, cls).setUpClass()
     # this repo role is hardwired because of the search strings
     # refering to exact names as e.g. tomcat7_rhel
     # The proxy role is considered
     repo = {
         'id': cls.__name__,
         'feed': 'https://forge.puppetlabs.com',
         'queries': ['tomcat'],
         'proxy': ROLES.get('proxy'),
     }
     # create source repo and sync it to have modules fetched
     cls.source_repo, _, _ = PuppetRepo.from_role(repo).create(cls.pulp)
     Task.wait_for_report(cls.pulp, cls.source_repo.sync(cls.pulp))
     # create two destinations repos for copy purpose
     importer = PuppetImporter(feed=None, queries=[])
     distributors = [PuppetDistributor()]
     cls.dest_repo1, _, _ = PuppetRepo(id=cls.__name__ + '1', importer=importer,
                 distributors=distributors).create(cls.pulp)
     cls.dest_repo2, _, _ = PuppetRepo(id=cls.__name__ + '2', importer=importer,
                 distributors=distributors).create(cls.pulp)
     # create data for repo
     cls.invalid_repo = Repo(data={'id': cls.__name__ + "_invalidrepo"})
     # create yum repo
     cls.yumrepo, _, _ = YumRepo(id=cls.__name__ + 'yum', importer=YumImporter(feed=None),
                             distributors=[YumDistributor(relative_url='xyz')]).create(cls.pulp)
 def testcase_01_upload_and_download_using_dnf_rpm(self):
     # create yum-repo, -importer, -distributor
     with deleting(self.pulp,
                   *create_yum_repo(
                       self.pulp,
                       'test_22_rpm_repo_for_dnf')) as (repo,
                                                        (importer,
                                                         (distributor))):
         # create and perform an rpm url upload
         with deleting(self.pulp, upload_url_rpm(self.pulp,
                                                 self.rpm_url)) as upload:
             # assign the upload to the repo
             response = upload.import_to(self.pulp, repo)
             self.assertPulpOK()
             Task.wait_for_report(self.pulp, response)
             # check the content is accessible
             response = repo.publish(self.pulp, distributor.id)
             self.assertPulpOK()
             Task.wait_for_report(self.pulp, response)
             # fetch the package through the repo
             pulp_rpm_url = distributor.content_url(
                 self.pulp, url_basename(self.rpm_url))
             pulp_repo = distributor.content_url(self.pulp)
             with closing(temp_url(pulp_rpm_url)) as tmpfile:
                 assert url_basename(self.rpm_url).startswith(
                     rpm_metadata(tmpfile)['unit_key']['name'])
             assert "bear" == download_package_with_dnf(
                 self.pulp, pulp_repo, "bear")
 def test_01_repo_sync_finish(self):
     self.el.update(self.pulp, {'event_types': ['repo.sync.finish']})
     self.el.reload(self.pulp)
     with deleting(
             self.pulp,
             *create_yum_repo(
                 self.pulp,
                 'sync_error_repo',
                 feed='http://example.com/repos/none')) as (repo,
                                                            (importer,
                                                             distributor)):
         response = repo.sync(self.pulp)
         self.assertPulpOK()
         with self.assertRaises(TaskFailure):
             # make sure the sync did not succeed
             Task.wait_for_report(self.pulp, response)
         tasks = Task.from_report(self.pulp, response)
         # assert the bin contains request with a failed task in body
         self.bin.reload()
         assert self.bin.request_count == 1, 'invalid bin.request count: %s' % self.bin.request_count
         el_request = self.bin.requests[0]
         assert el_request.method == 'POST', 'invalid bin request method: %s' % el_request.method
         el_task = Task.from_call_report_data(json.loads(el_request.body))
         assert el_task.state == TASK_ERROR_STATE, 'invalid request.body:Task.state: %s' % el_task.state
         assert el_task.id in [
             task.id for task in tasks
         ], 'invalid request.body:Task.id: %s' % el_task.id
    def setUpClass(cls):
        super(PublishGroupTest, cls).setUpClass()
        # create repo_group
        repo_group = RepoGroup(data={'id': cls.__name__ + "_repo_group"})
        response = repo_group.create(cls.pulp)
        cls.repo_group = RepoGroup.from_response(response)
        cls.repo_group1 = RepoGroup(data={'id': cls.__name__ + "_repo_group1"})

        #associate_distributor
        with cls.pulp.asserting(True):
            response = cls.repo_group.associate_distributor(
                cls.pulp,
                data={
                    'distributor_type_id': 'group_export_distributor',
                    'distributor_config': {
                        'http': False,
                        'https': False
                    },
                    'distributor_id': 'dist_1'
                })
        cls.distributor = GroupDistributor.from_response(response)
        #create repo
        repo_config = [repo for repo in ROLES.repos if repo.type == 'rpm'][0]
        cls.repo, _, _ = create_yum_repo(cls.pulp, **repo_config)
        Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp))
示例#35
0
 def test_08_publish_repo(self):
     response = self.repo.publish(
         self.pulp,
         'dist_1',
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
示例#36
0
 def wrapper_ctx(thing):
     with calling_method(thing, 'delete', pulp) as thing:
         yield thing
     # async-delete hacks
     if pulp.last_response.status_code == 202:
         Task.wait_for_report(pulp, pulp.last_response)
     assert pulp.is_ok, 'deleting %s caused pulp not feeling ok: %s' % \
             (thing, pulp.last_response)
 def _tearDown(self):
     with self.pulp.asserting(True), \
         self.agent.catching(True), \
         self.agent.running(self.qpid_handle, frequency=10) \
     :
         Task.wait_for_report(self.pulp,
                              self.repo.delete(self.pulp),
                              timeout=20)
示例#38
0
 def test_99_node_unbind_repo(self):
     self.node.unbind_repo(self.pulp, self.repo.id,
                           self.node_distributor.id)
     self.assertPulpOK()
     # nodes keep the repos after updating
     child_repos = Repo.list(self.pulp_child)
     for repo in child_repos:
         Task.wait_for_report(self.pulp_child, repo.delete(self.pulp_child))
示例#39
0
    def test_01_sync_repo_and_publish(self):
        with self.pulp.asserting(True):
            response = self.repo.sync(self.pulp)
        Task.wait_for_report(self.pulp, response)

        with self.pulp.asserting(True):
            response = self.repo.publish(self.pulp, self.distributor1.id)
        Task.wait_for_report(self.pulp, response)
示例#40
0
 def test_06_associate_importer(self):
     data = YumImporter.from_role(self.repo_role).as_data()
     response = self.repo.associate_importer(self.pulp, data=data)
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
     importer = self.repo.get_importer(self.pulp, data['id'])
     # fixed as a doc bug https://bugzilla.redhat.com/show_bug.cgi?id=1076225
     self.assertEqual(importer.id, data['id'])
 def test_01_applicabilty_consumers(self):
     #Generate Content Applicability for Updated Consumers
     response = ConsumersApplicability.regenerate(self.pulp, data={
                                                    "consumer_criteria": {"filters": {"id": {"$in": ["sunflower", "voyager"]}}}
                                                   }
                                 )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
 def test_05_unassociate_module_from_copied_repo_1076628(self):
     # unassociate unit from a copied repo
     # https://bugzilla.redhat.com/show_bug.cgi?id=1076628
     response = self.dest_repo1.unassociate_units(
         self.pulp, data={"criteria": {"type_ids": ["puppet_module"], "filters": {"unit": {"name": "tomcat7_rhel"}}}}
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
    def test_01_sync_repo_and_publish(self):
        with self.pulp.asserting(True):
            response = self.repo.sync(self.pulp)
        Task.wait_for_report(self.pulp, response)

        with self.pulp.asserting(True):        
            response = self.repo.publish(self.pulp, self.distributor1.id)
        Task.wait_for_report(self.pulp, response)        
 def test_06_associate_importer(self):
     data = YumImporter.from_role(self.repo_role).as_data()
     response = self.repo.associate_importer(self.pulp, data=data)
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
     importer = self.repo.get_importer(self.pulp, data['id'])
     # fixed as a doc bug https://bugzilla.redhat.com/show_bug.cgi?id=1076225
     self.assertEqual(importer.id, data['id'])
 def test_04_importer_update(self):
     response = self.importer.update(
         self.pulp, data={"importer_config": {
             "num_units": 10
         }})
     Task.wait_for_report(self.pulp, response)
     self.importer.reload(self.pulp)
     self.assertEqual(self.importer.data["config"]["num_units"], 10)
 def test_08_publish_repo(self):
     distributor_facade = IsoDistributor.from_role(self.repo_role)
     response = self.repo.publish(
         self.pulp,
         distributor_facade.distributor_id
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
 def test_04_distributor_update(self):
     response = self.distributor.update(
         self.pulp, data={"distributor_config": {
             "relative_url": "my_url"
         }})
     Task.wait_for_report(self.pulp, response)
     self.distributor.reload(self.pulp)
     self.assertEqual(self.distributor.data["config"]["relative_url"],
                      "my_url")
示例#48
0
 def setUpClass(cls):
     # set up the class with a repo that is synced and set up for nodes to feed from
     super(NodeTestRepo, cls).setUpClass()
     cls.node.activate(cls.pulp)
     repo_role = ROLES.repos[0]
     cls.repo, cls.importer, cls.distributor = create_yum_repo(cls.pulp, **repo_role)
     Task.wait_for_report(cls.pulp, cls.repo.sync(cls.pulp))
     response = cls.repo.associate_distributor(cls.pulp, NodeDistributor.default().data)
     cls.node_distributor = NodeDistributor.from_response(response)
 def tearDown(self):
     '''delete repo binding; runs within a "correct" agent running ctx'''
     with self.pulp.asserting(True), \
         self.agent.catching(True), \
         self.agent.running(self.qpid_handle, frequency=10) \
     :
         report = self.consumer.unbind_distributor(self.pulp, self.repo.id, self.distributor.id)
         self.assertPulp(code=202)
         Task.wait_for_report(self.pulp, report)
 def tearDownClass(cls):
     with \
         cls.pulp.asserting(True), \
         cls.agent.catching(False), \
         cls.agent.running(cls.qpid_handle, frequency=10) \
     :
         Task.wait_for_report(cls.pulp, cls.repo.delete(cls.pulp))
         cls.consumer.delete(cls.pulp)
     super(ConsumerAuthTest, cls).tearDownClass()
示例#51
0
 def test_07_assert_unit_install(self):
     unit = {'name': 'pike'}
     rpm = RpmUnit(unit, relevant_data_keys=unit.keys())
     with self.pulp.asserting(True):
         response = self.consumer.install_unit(self.pulp, unit, 'rpm')
     Task.wait_for_report(self.pulp, response)
     assert rpm in RpmUnit.list(
         self.consumer.cli), "rpm %s not installed on %s" % (rpm,
                                                             self.consumer)
 def tearDownClass(cls):
     with \
         cls.pulp.asserting(True), \
         cls.agent.catching(False), \
         cls.agent.running(cls.qpid_handle, frequency=10) \
     :
         Task.wait_for_report(cls.pulp, cls.repo.delete(cls.pulp))
         cls.consumer.delete(cls.pulp)
     super(ConsumerAuthTest, cls).tearDownClass()
 def test_07_sync_repo(self):
     x = Repo.get(self.pulp, self.repo.id).data['content_unit_counts']['puppet_module']
     response = self.repo.sync(self.pulp)
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
     y = Repo.get(self.pulp, self.repo.id).data['content_unit_counts']['puppet_module']
     #FIXME result can change with time as number of modules is not constant!
     #check that the second i.e. updated query was also processed.
     self.assertTrue(x != y)
 def test_04_sync_repo(self):
     '''On the initial sync, all modules (matching any queries if specified)
     will be downloaded to the Pulp server. On subsequent sync, only new
     modules and new versions of existing modules will be downloaded. Any
     modules that were once present in the feed but have been removed will
     be removed from the Pulp repository as well.'''
     response = self.repo.sync(self.pulp) 
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)
 def test_03_copy_1_module(self):
     # attention with time this specific module can dissapear as puppetlabs are continiously upload/update/delete them
     response = self.dest_repo2.copy(
         self.pulp,
         self.source_repo.id,
         data={"criteria": {"type_ids": ["iso"], "filters": {"unit": {"name": "test.iso"}}}},
     )
     self.assertPulp(code=202)
     Task.wait_for_report(self.pulp, response)