예제 #1
0
 def test_import_cached_manifest_missing_units(self, *unused):
     # Setup
     self.populate()
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         configuration = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, configuration)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         publisher = dist.publisher(repo, configuration)
         manifest_path = publisher.manifest_path()
         manifest = Manifest(manifest_path)
         manifest.read()
         shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME))
         # Test
         importer = NodesHttpImporter()
         manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
예제 #2
0
 def test_import_modified_units(self, mock_get_working, *mocks):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     max_concurrency = 5
     max_bandwidth = 12345
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         cfg = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID,
                                      constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, cfg)
         # make the published unit have a newer _last_updated.
         collection = connection.get_collection(
             unit_db.unit_collection_name(self.UNIT_TYPE_ID))
         # N=0 (no file)
         unit = collection.find_one({'N': 0})
         unit['age'] = 84  # this will be updated back to 42.
         unit['_last_updated'] -= 1
         unit['_storage_path'] = None
         collection.update({'N': 0}, unit)
         # N=1
         unit = collection.find_one({'N': 1})
         unit['age'] = 85  # this will be updated back to 42.
         unit['_last_updated'] -= 1
         collection.update({'N': 1}, unit)
         # Test
         importer = NodesHttpImporter()
         publisher = dist.publisher(repo, cfg)
         manifest_url = pathlib.url_join(publisher.base_url,
                                         publisher.manifest_path())
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
             importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
             importer_constants.KEY_MAX_SPEED: max_bandwidth,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                                   Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         unit = collection.find_one({'N': 0})
         self.assertEqual(unit['age'], 42)
         unit = collection.find_one({'N': 1})
         self.assertEqual(unit['age'], 42)
예제 #3
0
 def test_import_unit_files_already_exist_size_mismatch(
         self, mock_get_working, *mocks):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         cfg = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID,
                                      constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, cfg)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         parent_content = os.path.join(self.parentfs, 'content')
         child_content = os.path.join(self.childfs, 'content')
         shutil.copytree(parent_content, child_content)
         for fn in os.listdir(child_content):
             path = os.path.join(child_content, fn)
             if os.path.isdir(path):
                 continue
             with open(path, 'w') as fp:
                 fp.truncate()
         # Test
         importer = NodesHttpImporter()
         publisher = dist.publisher(repo, cfg)
         manifest_url = pathlib.url_join(publisher.base_url,
                                         publisher.manifest_path())
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                                   Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
         mock_importer_config_to_nectar_config = mocks[0]
         mock_importer_config_to_nectar_config.assert_called_with(
             configuration.flatten())
예제 #4
0
    def populate(self, mock_repo_ctrl):
        # make content/ dir.
        os.makedirs(os.path.join(self.parentfs, 'content'))

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            units = self.add_units(0, self.NUM_UNITS)
            self.units = units
예제 #5
0
    def test_publish(self, mock_repo_ctrl):
        # Setup
        self.populate()

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            # Test
            dist = NodesHttpDistributor()
            repo = Repository(self.REPO_ID)
            conduit = RepoPublishConduit(self.REPO_ID,
                                         constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, self.dist_conf())
            # Verify
            conf = DownloaderConfig()
            downloader = LocalFileDownloader(conf)
            pub = dist.publisher(repo, self.dist_conf())
            url = pathlib.url_join(pub.base_url, pub.manifest_path())
            working_dir = self.childfs
            manifest = RemoteManifest(url, downloader, working_dir)
            manifest.fetch()
            manifest.fetch_units()
            units = [u for u, r in manifest.get_units()]
            self.assertEqual(len(units), self.NUM_UNITS)
            for n in range(0, self.NUM_UNITS):
                unit = units[n]
                created = self.units[n]
                for p, v in unit['unit_key'].items():
                    self.assertEqual(created[p], v)
                for p, v in unit['metadata'].items():
                    if p in ('_ns', '_content_type_id'):
                        continue
                    self.assertEqual(created[p], v)
                self.assertEqual(created.get('_storage_path'),
                                 unit['storage_path'])
                self.assertEqual(unit['type_id'], self.UNIT_TYPE_ID)
예제 #6
0
    def test_payload(self):
        # Setup
        self.populate()

        # Test
        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            repo = Repository(self.REPO_ID)
            payload = dist.create_consumer_payload(repo, self.dist_conf(), {})
            f = open('/tmp/payload', 'w+')
            f.write(repr(payload['importers']))
            f.close()

        # Verify
        distributors = payload['distributors']
        importers = payload['importers']
        repository = payload['repository']
        self.assertTrue(isinstance(distributors, list))
        self.assertTrue(isinstance(importers, list))
        self.assertTrue(isinstance(repository, dict))
        self.assertTrue(len(importers), 1)
        for key in ('id', 'importer_type_id', 'config'):
            self.assertTrue(key in importers[0])
        for key in (constants.MANIFEST_URL_KEYWORD,
                    constants.STRATEGY_KEYWORD):
            self.assertTrue(key in importers[0]['config'])
예제 #7
0
    def test_payload(self, mock_repo_model):
        mock_repo = mock_repo_model.objects.get_repo_or_missing_resource.return_value
        self.populate()

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            repo = Repository(self.REPO_ID)
            payload = dist.create_consumer_payload(repo, self.dist_conf(), {})
            f = open('/tmp/payload', 'w+')
            f.write(repr(payload['importers']))
            f.close()

        # Verify
        distributors = payload['distributors']
        importers = payload['importers']
        repository = payload['repository']
        self.assertTrue(isinstance(distributors, list))
        self.assertTrue(isinstance(importers, list))
        self.assertEqual(repository['id'], mock_repo.repo_id)
        self.assertEqual(repository['display_name'], mock_repo.display_name)
        self.assertEqual(repository['description'], mock_repo.description)
        self.assertEqual(repository['notes'], mock_repo.notes)
        self.assertEqual(repository['scratchpad'], mock_repo.scratchpad)
        self.assertTrue(len(importers), 1)
        for key in ('id', 'importer_type_id', 'config'):
            self.assertTrue(key in importers[0])
        for key in (constants.MANIFEST_URL_KEYWORD,
                    constants.STRATEGY_KEYWORD):
            self.assertTrue(key in importers[0]['config'])
예제 #8
0
    def populate(self, mock_repo_ctrl):
        # make content/ dir.
        os.makedirs(os.path.join(self.parentfs, 'content'))

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            units = self.add_units(0, self.NUM_UNITS)
            self.units = units
예제 #9
0
    def test_payload(self, mock_repo_model):
        mock_repo = mock_repo_model.objects.get_repo_or_missing_resource.return_value
        self.populate()

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            repo = Repository(self.REPO_ID)
            payload = dist.create_consumer_payload(repo, self.dist_conf(), {})
            f = open('/tmp/payload', 'w+')
            f.write(repr(payload['importers']))
            f.close()

        # Verify
        distributors = payload['distributors']
        importers = payload['importers']
        repository = payload['repository']
        self.assertTrue(isinstance(distributors, list))
        self.assertTrue(isinstance(importers, list))
        self.assertEqual(repository['id'], mock_repo.repo_id)
        self.assertEqual(repository['display_name'], mock_repo.display_name)
        self.assertEqual(repository['description'], mock_repo.description)
        self.assertEqual(repository['notes'], mock_repo.notes)
        self.assertEqual(repository['scratchpad'], mock_repo.scratchpad)
        self.assertTrue(len(importers), 1)
        for key in ('id', 'importer_type_id', 'config'):
            self.assertTrue(key in importers[0])
        for key in (constants.MANIFEST_URL_KEYWORD, constants.STRATEGY_KEYWORD):
            self.assertTrue(key in importers[0]['config'])
예제 #10
0
    def test_publish(self, mock_repo_ctrl):
        # Setup
        self.populate()

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            # Test
            dist = NodesHttpDistributor()
            repo = Repository(self.REPO_ID)
            conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, self.dist_conf())
            # Verify
            conf = DownloaderConfig()
            downloader = LocalFileDownloader(conf)
            pub = dist.publisher(repo, self.dist_conf())
            url = pathlib.url_join(pub.base_url, pub.manifest_path())
            working_dir = self.childfs
            manifest = RemoteManifest(url, downloader, working_dir)
            manifest.fetch()
            manifest.fetch_units()
            units = [u for u, r in manifest.get_units()]
            self.assertEqual(len(units), self.NUM_UNITS)
            for n in range(0, self.NUM_UNITS):
                unit = units[n]
                created = self.units[n]
                for p, v in unit['unit_key'].items():
                    self.assertEqual(created[p], v)
                for p, v in unit['metadata'].items():
                    if p in ('_ns', '_content_type_id'):
                        continue
                    self.assertEqual(created[p], v)
                self.assertEqual(created.get('_storage_path'), unit['storage_path'])
                self.assertEqual(unit['type_id'], self.UNIT_TYPE_ID)
예제 #11
0
    def test_create_consumer_payload(self):
        local_distributor = YumHTTPDistributor()
        repo = Mock()
        repo.display_name = 'foo'
        repo.id = 'bar'
        config = {'https_ca': 'pear',
                  'gpgkey': 'kiwi',
                  'auth_cert': 'durian',
                  'auth_ca': True,
                  'http': True,
                  'https': True}
        binding_config = {}
        cert_file = os.path.join(self.working_dir, "orange_file")

        with mock_config.patch({'server': {'server_name': 'apple'},
                                'security': {'ssl_ca_certificate': cert_file}}):
            with open(cert_file, 'w') as filewriter:
                filewriter.write("orange")

            result = local_distributor.create_consumer_payload(repo, config, binding_config)

            target = {
                'server_name': 'apple',
                'ca_cert': 'orange',
                'relative_path': '/pulp/repos/bar',
                'gpg_keys': {'pulp.key': 'kiwi'},
                'client_cert': 'durian',
                'protocols': ['http', 'https'],
                'repo_name': 'foo'
            }
            compare_dict(result, target)
예제 #12
0
 def test_import_modified_units(self, mock_get_working, *mocks):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     max_concurrency = 5
     max_bandwidth = 12345
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         cfg = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, cfg)
         # make the published unit have a newer _last_updated.
         collection = connection.get_collection(unit_db.unit_collection_name(self.UNIT_TYPE_ID))
         # N=0 (no file)
         unit = collection.find_one({'N': 0})
         unit['age'] = 84  # this will be updated back to 42.
         unit['_last_updated'] -= 1
         unit['_storage_path'] = None
         collection.update({'N': 0}, unit)
         # N=1
         unit = collection.find_one({'N': 1})
         unit['age'] = 85   # this will be updated back to 42.
         unit['_last_updated'] -= 1
         collection.update({'N': 1}, unit)
         # Test
         importer = NodesHttpImporter()
         publisher = dist.publisher(repo, cfg)
         manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
             importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
             importer_constants.KEY_MAX_SPEED: max_bandwidth,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         unit = collection.find_one({'N': 0})
         self.assertEqual(unit['age'], 42)
         unit = collection.find_one({'N': 1})
         self.assertEqual(unit['age'], 42)
예제 #13
0
    def test_import_unit_files_already_exist_size_mismatch(self, *mocks):
        # Setup
        self.populate()

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            working_dir = os.path.join(self.childfs, 'working_dir')
            os.makedirs(working_dir)
            repo = Repository(self.REPO_ID, working_dir)
            cfg = self.dist_conf()
            conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, cfg)
            model.Repository.drop_collection()
            RepoDistributor.get_collection().remove()
            RepoContentUnit.get_collection().remove()
            unit_db.clean()
            self.define_plugins()
            parent_content = os.path.join(self.parentfs, 'content')
            child_content = os.path.join(self.childfs, 'content')
            shutil.copytree(parent_content, child_content)
            for fn in os.listdir(child_content):
                path = os.path.join(child_content, fn)
                if os.path.isdir(path):
                    continue
                with open(path, 'w') as fp:
                    fp.truncate()
            # Test
            importer = NodesHttpImporter()
            publisher = dist.publisher(repo, cfg)
            manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
            configuration = {
                constants.MANIFEST_URL_KEYWORD: manifest_url,
                constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
            }
            configuration = PluginCallConfiguration(configuration, {})
            conduit = RepoSyncConduit(
                self.REPO_ID,
                constants.HTTP_IMPORTER)

        with mock_config.patch({'server': {'storage_dir': self.childfs}}):
            importer.sync_repo(repo, conduit, configuration)
            # Verify
            units = conduit.get_units()
            self.assertEquals(len(units), self.NUM_UNITS)
            mock_importer_config_to_nectar_config = mocks[0]
            mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
예제 #14
0
 def populate(self):
     # make content/ dir.
     os.makedirs(os.path.join(self.parentfs, 'content'))
     repository = model.Repository()
     repository.repo_id = self.REPO_ID
     repository.save()
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         units = self.add_units(0, self.NUM_UNITS)
         self.units = units
예제 #15
0
 def populate(self):
     # make content/ dir.
     os.makedirs(os.path.join(self.parentfs, 'content'))
     repository = model.Repository()
     repository.repo_id = self.REPO_ID
     repository.save()
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         units = self.add_units(0, self.NUM_UNITS)
         self.units = units
예제 #16
0
 def test_import_cached_manifest_matched(self, mock_get_working, mock_fetch,
                                         *unused):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         configuration = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID,
                                      constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, configuration)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         publisher = dist.publisher(repo, configuration)
         manifest_path = publisher.manifest_path()
         units_path = os.path.join(os.path.dirname(manifest_path),
                                   UNITS_FILE_NAME)
         manifest = Manifest(manifest_path)
         manifest.read()
         shutil.copy(manifest_path,
                     os.path.join(working_dir, MANIFEST_FILE_NAME))
         shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME))
         # Test
         importer = NodesHttpImporter()
         manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER,
                                   Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
         self.assertFalse(mock_fetch.called)
예제 #17
0
    def test_import(self, *mocks):
        # Setup
        self.populate()
        max_concurrency = 5
        max_bandwidth = 12345

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            working_dir = os.path.join(self.childfs, 'working_dir')
            os.makedirs(working_dir)
            repo = Repository(self.REPO_ID, working_dir)
            cfg = self.dist_conf()
            conduit = RepoPublishConduit(self.REPO_ID,
                                         constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, cfg)
            model.Repository.drop_collection()
            RepoDistributor.get_collection().remove()
            RepoContentUnit.get_collection().remove()
            unit_db.clean()
            self.define_plugins()
            # Test
            importer = NodesHttpImporter()
            publisher = dist.publisher(repo, cfg)
            manifest_url = pathlib.url_join(publisher.base_url,
                                            publisher.manifest_path())
            configuration = {
                constants.MANIFEST_URL_KEYWORD: manifest_url,
                constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
                importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
                importer_constants.KEY_MAX_SPEED: max_bandwidth,
            }
            configuration = PluginCallConfiguration(configuration, {})
            conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER)

        with mock_config.patch({'server': {'storage_dir': self.childfs}}):
            importer.sync_repo(repo, conduit, configuration)
            # Verify
            units = conduit.get_units()
            self.assertEquals(len(units), self.NUM_UNITS)
            mock_importer_config_to_nectar_config = mocks[0]
            mock_importer_config_to_nectar_config.assert_called_with(
                configuration.flatten())
예제 #18
0
    def test_import_cached_manifest_units_invalid(self, *unused):
        # Setup
        self.populate()

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            working_dir = os.path.join(self.childfs, 'working_dir')
            os.makedirs(working_dir)
            repo = Repository(self.REPO_ID, working_dir)
            configuration = self.dist_conf()
            conduit = RepoPublishConduit(self.REPO_ID,
                                         constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, configuration)
            model.Repository.drop_collection()
            RepoDistributor.get_collection().remove()
            RepoContentUnit.get_collection().remove()
            unit_db.clean()
            self.define_plugins()
            publisher = dist.publisher(repo, configuration)
            manifest_path = publisher.manifest_path()
            manifest = Manifest(manifest_path)
            manifest.read()
            shutil.copy(manifest_path,
                        os.path.join(working_dir, MANIFEST_FILE_NAME))
            with open(os.path.join(working_dir, UNITS_FILE_NAME), 'w+') as fp:
                fp.write('invalid-units')
            # Test
            importer = NodesHttpImporter()
            manifest_url = pathlib.url_join(publisher.base_url, manifest_path)
            configuration = {
                constants.MANIFEST_URL_KEYWORD: manifest_url,
                constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
            }
            configuration = PluginCallConfiguration(configuration, {})
            conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER)

        with mock_config.patch({'server': {'storage_dir': self.childfs}}):
            importer.sync_repo(repo, conduit, configuration)
            # Verify
            units = conduit.get_units()
            self.assertEquals(len(units), self.NUM_UNITS)
예제 #19
0
    def test_import(self, *mocks):
        # Setup
        self.populate()
        max_concurrency = 5
        max_bandwidth = 12345

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            dist = NodesHttpDistributor()
            working_dir = os.path.join(self.childfs, 'working_dir')
            os.makedirs(working_dir)
            repo = Repository(self.REPO_ID, working_dir)
            cfg = self.dist_conf()
            conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
            dist.publish_repo(repo, conduit, cfg)
            model.Repository.drop_collection()
            RepoDistributor.get_collection().remove()
            RepoContentUnit.get_collection().remove()
            unit_db.clean()
            self.define_plugins()
            # Test
            importer = NodesHttpImporter()
            publisher = dist.publisher(repo, cfg)
            manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
            configuration = {
                constants.MANIFEST_URL_KEYWORD: manifest_url,
                constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
                importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
                importer_constants.KEY_MAX_SPEED: max_bandwidth,
            }
            configuration = PluginCallConfiguration(configuration, {})
            conduit = RepoSyncConduit(
                self.REPO_ID,
                constants.HTTP_IMPORTER)

        with mock_config.patch({'server': {'storage_dir': self.childfs}}):
            importer.sync_repo(repo, conduit, configuration)
            # Verify
            units = conduit.get_units()
            self.assertEquals(len(units), self.NUM_UNITS)
            mock_importer_config_to_nectar_config = mocks[0]
            mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
예제 #20
0
 def test_import_unit_files_already_exist(self, mock_get_working, *mocks):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         cfg = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, cfg)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         parent_content = os.path.join(self.parentfs, 'content')
         child_content = os.path.join(self.childfs, 'content')
         shutil.copytree(parent_content, child_content)
         # Test
         importer = NodesHttpImporter()
         publisher = dist.publisher(repo, cfg)
         manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
         mock_importer_config_to_nectar_config = mocks[0]
         mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
예제 #21
0
    def populate(self):
        # make content/ dir.
        os.makedirs(os.path.join(self.parentfs, 'content'))

        with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
            # create repo
            manager = managers.repo_manager()
            manager.create_repo(self.REPO_ID,
                                display_name=REPO_NAME,
                                description=REPO_DESCRIPTION,
                                notes=REPO_NOTES)
            manager.set_repo_scratchpad(self.REPO_ID, REPO_SCRATCHPAD)
            # add units
            units = self.add_units(0, self.NUM_UNITS)
            self.units = units
예제 #22
0
    def test_create_consumer_payload(self):
        local_distributor = YumHTTPDistributor()
        repo = Mock()
        repo.display_name = 'foo'
        repo.id = 'bar'
        config = {
            'https_ca': 'pear',
            'gpgkey': 'kiwi',
            'auth_cert': 'durian',
            'auth_ca': True,
            'http': True,
            'https': True
        }
        binding_config = {}
        cert_file = os.path.join(self.working_dir, "orange_file")

        with mock_config.patch({
                'server': {
                    'server_name': 'apple'
                },
                'security': {
                    'ssl_ca_certificate': cert_file
                }
        }):
            with open(cert_file, 'w') as filewriter:
                filewriter.write("orange")

            result = local_distributor.create_consumer_payload(
                repo, config, binding_config)

            target = {
                'server_name': 'apple',
                'ca_cert': 'orange',
                'relative_path': '/pulp/repos/bar',
                'gpg_keys': {
                    'pulp.key': 'kiwi'
                },
                'client_cert': 'durian',
                'protocols': ['http', 'https'],
                'repo_name': 'foo'
            }
            compare_dict(result, target)