예제 #1
0
 def test_process_main_missing_slashes(self, mock_deb_download, mock_deb_packagefile):
     """
     Test when the the feed is missing a '/' at the end and the package_file_path is
     not relative.
     """
     mock_deb_packagefile.return_value = [
         {
             'Package': 'foo',
             'Version': '1.5',
             'Architecture': 'x86_64',
             'Size': '105',
             'Filename': 'foo.deb'
         }
     ]
     plugin_config = {
         importer_constants.KEY_FEED: 'http://ftp.fau.de/debian',
         'package-file-path': '/dists/stable/main/binary-amd64/'
     }
     self.config = PluginCallConfiguration({}, plugin_config)
     self.step.config = plugin_config
     self.step.parent.available_units = []
     self.step.process_main()
     download_feed = 'http://ftp.fau.de/debian/dists/stable/main/binary-amd64/Packages'
     download_location = os.path.join(self.working_dir, 'Packages')
     mock_deb_download.assert_called_once_with(download_feed, download_location)
     self.assertEquals(len(self.step.parent.available_units), 1)
     self.assertDictEqual(self.step.parent.available_units[0],
                          {'name': 'foo',
                           'version': '1.5',
                           'architecture': 'x86_64'})
예제 #2
0
파일: test_config.py 프로젝트: alanoe/pulp
class PluginCallConfigurationTests(unittest.TestCase):

    def setUp(self):
        super(PluginCallConfigurationTests, self).setUp()

        self.override_config = {'a': 'a4', 'b': 'b4', 'e': 'e4'}
        self.repo_plugin_config = {'a': 'a3', 'c': 'c3'}
        self.plugin_config = {'a': 'a2', 'b': 'b2', 'd': 'd2'}
        self.default_config = {'a': 'a1'}

        self.config = PluginCallConfiguration(self.plugin_config,
                                              self.repo_plugin_config,
                                              self.override_config)
        self.config.default_config = self.default_config

    def test_flatten(self):
        # Test
        flattened = self.config.flatten()

        # Verify
        self.assertTrue(isinstance(flattened, dict))
        self.assertEqual(5, len(flattened))
        self.assertEqual(flattened['a'], 'a4')
        self.assertEqual(flattened['b'], 'b4')
        self.assertEqual(flattened['c'], 'c3')
        self.assertEqual(flattened['d'], 'd2')
        self.assertEqual(flattened['e'], 'e4')
예제 #3
0
 def test_import_unit_files_already_exist_size_mismatch(self, *mocks):
     # Setup
     self.populate()
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     working_dir = os.path.join(self.childfs, 'working_dir')
     os.makedirs(working_dir)
     repo = Repository(self.REPO_ID, working_dir)
     cfg = self.dist_conf()
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, cfg)
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     parent_content = os.path.join(self.parentfs, 'content')
     child_content = os.path.join(self.childfs, 'content')
     shutil.copytree(parent_content, child_content)
     for fn in os.listdir(child_content):
         path = os.path.join(child_content, fn)
         if os.path.isdir(path):
             continue
         with open(path, 'w') as fp:
             fp.truncate()
     # Test
     importer = NodesHttpImporter()
     publisher = dist.publisher(repo, cfg)
     manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
     configuration = {
         constants.MANIFEST_URL_KEYWORD: manifest_url,
         constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
     }
     configuration = PluginCallConfiguration(configuration, {})
     conduit = RepoSyncConduit(
         self.REPO_ID,
         constants.HTTP_IMPORTER,
         RepoContentUnit.OWNER_TYPE_IMPORTER,
         constants.HTTP_IMPORTER)
     pulp_conf.set('server', 'storage_dir', self.childfs)
     importer.sync_repo(repo, conduit, configuration)
     # Verify
     units = conduit.get_units()
     self.assertEquals(len(units), self.NUM_UNITS)
     mock_importer_config_to_nectar_config = mocks[0]
     mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
예제 #4
0
 def test_import(self, *mocks):
     # Setup
     self.populate()
     max_concurrency = 5
     max_bandwidth = 12345
     pulp_conf.set('server', 'storage_dir', self.parentfs)
     dist = NodesHttpDistributor()
     working_dir = os.path.join(self.childfs, 'working_dir')
     os.makedirs(working_dir)
     repo = Repository(self.REPO_ID, working_dir)
     cfg = self.dist_conf()
     conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
     dist.publish_repo(repo, conduit, cfg)
     Repo.get_collection().remove()
     RepoDistributor.get_collection().remove()
     RepoContentUnit.get_collection().remove()
     unit_db.clean()
     self.define_plugins()
     # Test
     importer = NodesHttpImporter()
     publisher = dist.publisher(repo, cfg)
     manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
     configuration = {
         constants.MANIFEST_URL_KEYWORD: manifest_url,
         constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         importer_constants.KEY_MAX_DOWNLOADS: max_concurrency,
         importer_constants.KEY_MAX_SPEED: max_bandwidth,
     }
     configuration = PluginCallConfiguration(configuration, {})
     conduit = RepoSyncConduit(
         self.REPO_ID,
         constants.HTTP_IMPORTER,
         RepoContentUnit.OWNER_TYPE_IMPORTER,
         constants.HTTP_IMPORTER)
     pulp_conf.set('server', 'storage_dir', self.childfs)
     importer.sync_repo(repo, conduit, configuration)
     # Verify
     units = conduit.get_units()
     self.assertEquals(len(units), self.NUM_UNITS)
     mock_importer_config_to_nectar_config = mocks[0]
     mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
예제 #5
0
파일: test_config.py 프로젝트: alanoe/pulp
    def setUp(self):
        super(PluginCallConfigurationTests, self).setUp()

        self.override_config = {'a': 'a4', 'b': 'b4', 'e': 'e4'}
        self.repo_plugin_config = {'a': 'a3', 'c': 'c3'}
        self.plugin_config = {'a': 'a2', 'b': 'b2', 'd': 'd2'}
        self.default_config = {'a': 'a1'}

        self.config = PluginCallConfiguration(self.plugin_config,
                                              self.repo_plugin_config,
                                              self.override_config)
        self.config.default_config = self.default_config
예제 #6
0
    def setUp(self):
        self.working_dir = tempfile.mkdtemp()
        self.repo = RepositoryModel('repo1')
        self.repo.working_dir = self.working_dir
        self.conduit = mock.MagicMock()
        plugin_config = {
            importer_constants.KEY_FEED: 'http://ftp.fau.de/debian/dists/stable/main/binary-amd64/',
        }
        self.config = PluginCallConfiguration({}, plugin_config)

        self.step = sync.GetMetadataStep(repo=self.repo, conduit=self.conduit, config=self.config,
                                         working_dir=self.working_dir)
        self.step.parent = mock.MagicMock()
        self.index = self.step.parent.index_repository
예제 #7
0
 def test_import_unit_files_already_exist(self, mock_get_working, *mocks):
     # Setup
     self.populate()
     mock_get_working.return_value = self.temp_dir
     with mock_config.patch({'server': {'storage_dir': self.parentfs}}):
         dist = NodesHttpDistributor()
         working_dir = os.path.join(self.childfs, 'working_dir')
         os.makedirs(working_dir)
         repo = Repository(self.REPO_ID, working_dir)
         cfg = self.dist_conf()
         conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR)
         dist.publish_repo(repo, conduit, cfg)
         model.Distributor.objects.delete()
         RepoContentUnit.get_collection().remove()
         unit_db.clean()
         self.define_plugins()
         parent_content = os.path.join(self.parentfs, 'content')
         child_content = os.path.join(self.childfs, 'content')
         shutil.copytree(parent_content, child_content)
         # Test
         importer = NodesHttpImporter()
         publisher = dist.publisher(repo, cfg)
         manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path())
         configuration = {
             constants.MANIFEST_URL_KEYWORD: manifest_url,
             constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY,
         }
         configuration = PluginCallConfiguration(configuration, {})
         conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock())
     with mock_config.patch({'server': {'storage_dir': self.childfs}}):
         with patch('pulp_node.constants.CONTENT_PATH', self.parentfs):
             importer.sync_repo(repo, conduit, configuration)
         # Verify
         units = conduit.get_units()
         self.assertEquals(len(units), self.NUM_UNITS)
         mock_importer_config_to_nectar_config = mocks[0]
         mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
예제 #8
0
 def test_process_step_skip_units(self):
     self.publisher.config = PluginCallConfiguration(None, {'skip': ['FOO']})
     step = publish_step.UnitPublishStep('foo_step', 'FOO')
     step.parent = self.publisher
     step.process()
     self.assertEquals(step.state, reporting_constants.STATE_SKIPPED)
예제 #9
0
    def add_distributor(self,
                        repo_id,
                        distributor_type_id,
                        repo_plugin_config,
                        auto_publish,
                        distributor_id=None):
        """
        Adds an association from the given repository to a distributor. The
        association will be tracked through the distributor_id; each distributor
        on a given repository must have a unique ID. If this is not specified,
        one will be generated. If a distributor already exists on the repo for
        the given ID, the existing one will be removed and replaced with the
        newly configured one.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param distributor_type_id: identifies the distributor; must correspond
                                    to a distributor loaded at server startup
        @type  distributor_type_id: str

        @param repo_plugin_config: configuration the repo will use with this distributor; may be None
        @type  repo_plugin_config: dict

        @param auto_publish: if true, this distributor will be invoked at
                             the end of every sync
        @type  auto_publish: bool

        @param distributor_id: unique ID to refer to this distributor for this repo
        @type  distributor_id: str

        @return: ID assigned to the distributor (only valid in conjunction with the repo)

        @raise MissingResource: if the given repo_id does not refer to a valid repo
        @raise InvalidValue: if the distributor ID is provided and unacceptable
        @raise InvalidDistributorConfiguration: if the distributor plugin does not
               accept the given configuration
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repository=repo_id)

        if not plugin_api.is_valid_distributor(distributor_type_id):
            raise InvalidValue(['distributor_type_id'])

        # Determine the ID for this distributor on this repo; will be
        # unique for all distributors on this repository but not globally
        if distributor_id is None:
            distributor_id = str(uuid.uuid4())
        else:
            # Validate if one was passed in
            if not is_distributor_id_valid(distributor_id):
                raise InvalidValue(['distributor_id'])

        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(
            distributor_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        if repo_plugin_config is not None:
            clean_config = dict([(k, v) for k, v in repo_plugin_config.items()
                                 if v is not None])
        else:
            clean_config = None

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(
            distributor_type_id, repo_id)

        query_manager = manager_factory.repo_query_manager()
        related_repos = query_manager.find_with_distributor_type(
            distributor_type_id)

        transfer_related_repos = []
        for r in related_repos:
            all_configs = [d['config'] for d in r['distributors']]
            trr = common_utils.to_related_repo(r, all_configs)
            transfer_related_repos.append(trr)

        try:
            result = distributor_instance.validate_config(
                transfer_repo, call_config, transfer_related_repos)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            _LOG.exception(
                'Exception received from distributor [%s] while validating config'
                % distributor_type_id)
            raise PulpDataException(e.args), None, sys.exc_info()[2]
예제 #10
0
 def test_no_dirs(self):
     config = PluginCallConfiguration({}, {})
     repo = mock.Mock(id='foo')
     dirs = configuration.get_export_repo_group_publish_dirs(repo, config)
     self.assertEquals(dirs, [])
예제 #11
0
 def test_is_skipped_dict(self):
     step = publish_step.UnitPublishStep("foo", 'bar')
     step.config = PluginCallConfiguration(None, {'skip': {'bar': True, 'baz': True}})
     self.assertTrue(step.is_skipped())
예제 #12
0
 def test_optional(self):
     config = PluginCallConfiguration({}, {})
     importer_config.validate_max_speed(config)
예제 #13
0
    def import_uploaded_unit(repo_id, unit_type_id, unit_key, unit_metadata, upload_id,
                             override_config=None):
        """
        Called to trigger the importer's handling of an uploaded unit. This
        should not be called until the bits have finished uploading. The
        importer is then responsible for moving the file to the correct location,
        adding it to the Pulp server's inventory, and associating it with the
        repository.

        This call will first call is_valid_upload to check the integrity of the
        destination repository. See that method's documentation for exception
        possibilities.

        :param repo_id:       identifies the repository into which the unit is uploaded
        :type  repo_id:       str
        :param unit_type_id:  type of unit being uploaded
        :type  unit_type_id:  str
        :param unit_key:      unique identifier for the unit (user-specified)
        :type  unit_key:      dict
        :param unit_metadata: any user-specified information about the unit
        :type  unit_metadata: dict
        :param upload_id:     upload being imported
        :type  upload_id:     str
        :return:              A SyncReport indicating the success or failure of the upload
        :rtype:               pulp.plugins.model.SyncReport
        """
        # If it doesn't raise an exception, it's good to go
        ContentUploadManager.is_valid_upload(repo_id, unit_type_id)
        repo_obj = model.Repository.objects.get_repo_or_missing_resource(repo_id)
        repo_importer = model.Importer.objects.get_or_404(repo_id=repo_id)

        try:
            importer_instance, plugin_config = plugin_api.get_importer_by_id(
                repo_importer['importer_type_id'])
        except plugin_exceptions.PluginNotFound:
            raise MissingResource(repo_id), None, sys.exc_info()[2]

        # Assemble the data needed for the import
        conduit = UploadConduit(repo_id, repo_importer['id'])

        call_config = PluginCallConfiguration(plugin_config, repo_importer['config'],
                                              override_config)
        transfer_repo = repo_obj.to_transfer_repo()

        file_path = ContentUploadManager._upload_file_path(upload_id)

        # Invoke the importer
        try:
            result = importer_instance.upload_unit(transfer_repo, unit_type_id, unit_key,
                                                   unit_metadata, file_path, conduit, call_config)
            repo_controller.rebuild_content_unit_counts(repo_obj)
            return result

        except PulpException:
            msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]')
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise
        except Exception, e:
            msg = _('Error from the importer while importing uploaded unit to repository [%(r)s]')
            msg = msg % {'r': repo_id}
            logger.exception(msg)
            raise PulpExecutionException(e), None, sys.exc_info()[2]
예제 #14
0
 def test_valid(self):
     config = PluginCallConfiguration(
         {},
         {importer_constants.KEY_PROXY_PASS: '******',
          importer_constants.KEY_PROXY_USER: '******'})
     importer_config.validate_proxy_password(config)
예제 #15
0
 def test_optional(self):
     config = PluginCallConfiguration({}, {})
     importer_config.validate_feed_requirement(config)
예제 #16
0
 def test_valid_str(self):
     config = PluginCallConfiguration({}, {importer_constants.KEY_PROXY_PORT: '3128'})
     importer_config.validate_proxy_port(config)
예제 #17
0
 def test_non_string(self):
     config = PluginCallConfiguration({}, {importer_constants.KEY_PROXY_PORT: 1.1})
     try:
         importer_config.validate_proxy_port(config)
     except ValueError, e:
         self.assertTrue('1.1' in e[0])
예제 #18
0
 def test_failure(self):
     config = PluginCallConfiguration({}, {importer_constants.KEY_VALIDATE: 0})
     self.assertRaises(importer_config.InvalidConfig, importer_config.validate_config, config)
예제 #19
0
 def test_validate(self):
     config = PluginCallConfiguration({},
                                      {importer_constants.KEY_PROXY_HOST: 'http://fake.com/'})
     importer_config.validate_proxy_host(config)
예제 #20
0
 def test_all_pass(self):
     # All options are optional, so we can run them without concern
     config = PluginCallConfiguration({}, {})
     importer_config.validate_config(config)
예제 #21
0
 def test_validate_config_no_files_dir_specified(self):
     config = PluginCallConfiguration({}, {})
     return_val, error_message = self.distributor.validate_config(
         self.repo, config, None)
     self.assertTrue(return_val)
     self.assertEquals(error_message, None)
예제 #22
0
 def test_optional(self):
     config = PluginCallConfiguration({}, {})
     importer_config.validate_proxy_password(config)
예제 #23
0
 def test_optional(self):
     config = PluginCallConfiguration({}, {})
     importer_config._run_validate_is_non_required_bool(config, 'missing')
예제 #24
0
 def test_valid(self):
     config = PluginCallConfiguration({}, {importer_constants.KEY_SSL_CLIENT_CERT: 'cert'})
     importer_config.validate_ssl_client_cert(config)
예제 #25
0
 def test_str(self):
     config = PluginCallConfiguration({}, {importer_constants.KEY_MAX_SPEED: '512'})
     importer_config.validate_max_speed(config)
예제 #26
0
 def test_valid(self):
     config = PluginCallConfiguration({}, {importer_constants.KEY_SSL_CLIENT_KEY: 'key'})
     importer_config.validate_ssl_client_key(config)
예제 #27
0
 def test_validate(self):
     config = PluginCallConfiguration({}, {importer_constants.KEY_MAX_DOWNLOADS: 11})
     importer_config.validate_max_downloads(config)
예제 #28
0
 def test_optional(self):
     config = PluginCallConfiguration({}, {})
     importer_config.validate_ssl_client_key(config)
예제 #29
0
 def _generate_call_config(**kwargs):
     config = PluginCallConfiguration(None, None)
     config.default_config.update(kwargs)
     return config
예제 #30
0
    def regenerate_applicability(profile_hash,
                                 content_type,
                                 profile_id,
                                 bound_repo_id,
                                 existing_applicability=None):
        """
        Regenerate and save applicability data for given profile and bound repo id.
        If existing_applicability is not None, replace it with the new applicability data.

        :param profile_hash: hash of the unit profile
        :type profile_hash: basestring

        :param content_type: profile (unit) type ID
        :type content_type: str

        :param profile_id: unique id of the unit profile
        :type profile_id: str

        :param bound_repo_id: repo id to be used to calculate applicability
                              against the given unit profile
        :type bound_repo_id: str

        :param existing_applicability: existing RepoProfileApplicability object to be replaced
        :type existing_applicability: pulp.server.db.model.consumer.RepoProfileApplicability
        """
        profiler_conduit = ProfilerConduit()
        # Get the profiler for content_type of given unit_profile
        profiler, profiler_cfg = ApplicabilityRegenerationManager._profiler(
            content_type)

        # Check if the profiler supports applicability, else return
        if profiler.calculate_applicable_units == Profiler.calculate_applicable_units:
            # If base class calculate_applicable_units method is called,
            # skip applicability regeneration
            return

        # Find out which content types have unit counts greater than zero in the bound repo
        repo_content_types = ApplicabilityRegenerationManager._get_existing_repo_content_types(
            bound_repo_id)
        # Get the intersection of existing types in the repo and the types that the profiler
        # handles. If the intersection is not empty, regenerate applicability
        if (set(repo_content_types) & set(profiler.metadata()['types'])):
            # Get the actual profile for existing_applicability or lookup using profile_id
            if existing_applicability:
                profile = existing_applicability.profile
            else:
                unit_profile = UnitProfile.get_collection().find_one(
                    {'id': profile_id}, projection=['profile'])
                profile = unit_profile['profile']
            call_config = PluginCallConfiguration(plugin_config=profiler_cfg,
                                                  repo_plugin_config=None)
            try:
                applicability = profiler.calculate_applicable_units(
                    profile, bound_repo_id, call_config, profiler_conduit)
            except NotImplementedError:
                msg = "Profiler for content type [%s] does not support applicability" % content_type
                _logger.debug(msg)
                return

            try:
                # Create a new RepoProfileApplicability object and save it in the db
                RepoProfileApplicability.objects.create(
                    profile_hash, bound_repo_id, profile, applicability)
            except DuplicateKeyError:
                # Update existing applicability
                if not existing_applicability:
                    applicability_dict = RepoProfileApplicability.get_collection(
                    ).find_one({
                        'repo_id': bound_repo_id,
                        'profile_hash': profile_hash
                    })
                    existing_applicability = RepoProfileApplicability(
                        **applicability_dict)
                existing_applicability.applicability = applicability
                existing_applicability.save()
예제 #31
0
 def test_is_skipped_dict_not_skipped(self):
     step = publish_step.UnitPublishStep("foo", 'bar')
     step.config = PluginCallConfiguration(None, None)
     self.assertFalse(step.is_skipped())
예제 #32
0
 def test_str(self):
     config = PluginCallConfiguration({}, {importer_constants.KEY_UNITS_RETAIN_OLD_COUNT: '1'})
     importer_config.validate_retain_old_count(config)
예제 #33
0
class TestGenerateMetadataStep(unittest.TestCase):
    def setUp(self):
        self.working_dir = tempfile.mkdtemp()
        self.repo = RepositoryModel('repo1')
        self.repo.working_dir = self.working_dir
        self.conduit = mock.MagicMock()
        plugin_config = {
            importer_constants.KEY_FEED: 'http://ftp.fau.de/debian/dists/stable/main/binary-amd64/',
        }
        self.config = PluginCallConfiguration({}, plugin_config)

        self.step = sync.GetMetadataStep(repo=self.repo, conduit=self.conduit, config=self.config,
                                         working_dir=self.working_dir)
        self.step.parent = mock.MagicMock()
        self.index = self.step.parent.index_repository

    def tearDown(self):
        super(TestGenerateMetadataStep, self).tearDown()
        shutil.rmtree(self.working_dir)

    @mock.patch('pulp_deb.plugins.importers.sync.debian_support.PackageFile')
    @mock.patch('pulp_deb.plugins.importers.sync.debian_support.download_file')
    def test_process_main(self, mock_deb_download, mock_deb_packagefile):
        mock_deb_packagefile.return_value = [
            {
                'Package': 'foo',
                'Version': '1.5',
                'Architecture': 'x86_64',
                'Size': '105',
                'Filename': 'foo.deb'
            }
        ]
        self.step.parent.available_units = []
        self.step.process_main()
        download_feed = self.config.get(importer_constants.KEY_FEED) + 'Packages'
        download_location = os.path.join(self.working_dir, 'Packages')
        mock_deb_download.assert_called_once_with(download_feed, download_location)
        self.assertEquals(len(self.step.parent.available_units), 1)
        self.assertDictEqual(self.step.parent.available_units[0],
                             {'name': 'foo',
                              'version': '1.5',
                              'architecture': 'x86_64'})

    @mock.patch('pulp_deb.plugins.importers.sync.debian_support.PackageFile')
    @mock.patch('pulp_deb.plugins.importers.sync.debian_support.download_file')
    def test_process_main_sub_packagefile(self, mock_deb_download, mock_deb_packagefile):
        """
        Test when the Packages file is not in the feed directory.
        """
        mock_deb_packagefile.return_value = [
            {
                'Package': 'foo',
                'Version': '1.5',
                'Architecture': 'x86_64',
                'Size': '105',
                'Filename': 'foo.deb'
            }
        ]
        plugin_config = {
            importer_constants.KEY_FEED: 'http://ftp.fau.de/debian/',
            'package-file-path': 'dists/stable/main/binary-amd64/'
        }
        self.config = PluginCallConfiguration({}, plugin_config)
        self.step.config = plugin_config
        self.step.parent.available_units = []
        self.step.process_main()
        download_feed = 'http://ftp.fau.de/debian/dists/stable/main/binary-amd64/Packages'
        download_location = os.path.join(self.working_dir, 'Packages')
        mock_deb_download.assert_called_once_with(download_feed, download_location)
        self.assertEquals(len(self.step.parent.available_units), 1)
        self.assertDictEqual(self.step.parent.available_units[0],
                             {'name': 'foo',
                              'version': '1.5',
                              'architecture': 'x86_64'})

    @mock.patch('pulp_deb.plugins.importers.sync.debian_support.PackageFile')
    @mock.patch('pulp_deb.plugins.importers.sync.debian_support.download_file')
    def test_process_main_missing_slashes(self, mock_deb_download, mock_deb_packagefile):
        """
        Test when the the feed is missing a '/' at the end and the package_file_path is
        not relative.
        """
        mock_deb_packagefile.return_value = [
            {
                'Package': 'foo',
                'Version': '1.5',
                'Architecture': 'x86_64',
                'Size': '105',
                'Filename': 'foo.deb'
            }
        ]
        plugin_config = {
            importer_constants.KEY_FEED: 'http://ftp.fau.de/debian',
            'package-file-path': '/dists/stable/main/binary-amd64/'
        }
        self.config = PluginCallConfiguration({}, plugin_config)
        self.step.config = plugin_config
        self.step.parent.available_units = []
        self.step.process_main()
        download_feed = 'http://ftp.fau.de/debian/dists/stable/main/binary-amd64/Packages'
        download_location = os.path.join(self.working_dir, 'Packages')
        mock_deb_download.assert_called_once_with(download_feed, download_location)
        self.assertEquals(len(self.step.parent.available_units), 1)
        self.assertDictEqual(self.step.parent.available_units[0],
                             {'name': 'foo',
                              'version': '1.5',
                              'architecture': 'x86_64'})
예제 #34
0
 def test_valid(self):
     config = PluginCallConfiguration({}, {importer_constants.KEY_FEED: "http://test.com/feed"})
     importer_config.validate_feed_requirement(config)
예제 #35
0
    def update_distributor_config(self, repo_id, distributor_id,
                                  distributor_config):
        """
        Attempts to update the saved configuration for the given distributor.
        The distributor will be asked if the new configuration is valid. If not,
        this method will raise an error and the existing configuration will
        remain unchanged.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param distributor_id: identifies the distributor on the repo
        @type  distributor_id: str

        @param distributor_config: new configuration values to use
        @type  distributor_config: dict

        @return: the updated distributor
        @rtype:  dict

        @raise MissingResource: if the given repo or distributor doesn't exist
        @raise PulpDataException: if the plugin rejects the given changes
        """

        repo_coll = Repo.get_collection()
        distributor_coll = RepoDistributor.get_collection()

        # Input Validation
        repo = repo_coll.find_one({'id': repo_id})
        if repo is None:
            raise MissingResource(repository=repo_id)

        repo_distributor = distributor_coll.find_one({
            'repo_id': repo_id,
            'id': distributor_id
        })
        if repo_distributor is None:
            raise MissingResource(distributor=distributor_id)

        distributor_type_id = repo_distributor['distributor_type_id']
        distributor_instance, plugin_config = plugin_api.get_distributor_by_id(
            distributor_type_id)

        # The supplied config is a delta of changes to make to the existing config.
        # The plugin expects a full configuration, so we apply those changes to
        # the original config and pass that to the plugin's validate method.
        merged_config = dict(repo_distributor['config'])

        # The convention is that None in an update is removing the value and
        # setting it to the default. Find all such properties in this delta and
        # remove them from the existing config if they are there.
        unset_property_names = [
            k for k in distributor_config if distributor_config[k] is None
        ]
        for key in unset_property_names:
            merged_config.pop(key, None)
            distributor_config.pop(key, None)

        # Whatever is left over are the changed/added values, so merge them in.
        merged_config.update(distributor_config)

        # Let the distributor plugin verify the configuration
        call_config = PluginCallConfiguration(plugin_config, merged_config)
        transfer_repo = common_utils.to_transfer_repo(repo)
        transfer_repo.working_dir = common_utils.distributor_working_dir(
            distributor_type_id, repo_id)

        query_manager = manager_factory.repo_query_manager()
        related_repos = query_manager.find_with_distributor_type(
            distributor_type_id)

        transfer_related_repos = []
        for r in related_repos:

            # Don't include the repo being updated in this list
            if r['id'] == repo_id:
                continue

            all_configs = [d['config'] for d in r['distributors']]
            trr = common_utils.to_related_repo(r, all_configs)
            transfer_related_repos.append(trr)

        try:
            result = distributor_instance.validate_config(
                transfer_repo, call_config, transfer_related_repos)

            # For backward compatibility with plugins that don't yet return the tuple
            if isinstance(result, bool):
                valid_config = result
                message = None
            else:
                valid_config, message = result
        except Exception, e:
            _LOG.exception(
                'Exception raised from distributor [%s] while validating config for repo [%s]'
                % (distributor_type_id, repo_id))
            raise PulpDataException(e.args), None, sys.exc_info()[2]
예제 #36
0
 def test_valid_str(self):
     config = PluginCallConfiguration({}, {'a': 'true'})
     importer_config._run_validate_is_non_required_bool(config, 'a')
예제 #37
0
 def test_optional(self):
     config = PluginCallConfiguration({}, {})
     importer_config.validate_retain_old_count(config)
예제 #38
0
    def add_distributor(self,
                        repo_group_id,
                        distributor_type_id,
                        group_plugin_config,
                        distributor_id=None):
        """
        Adds an association from the given repository group to a distributor.
        The assocation will be tracked through the distributor_id; each
        distributor on a given group must have a unique ID. If this is not
        specified, one will be generated. If a distributor already exists on the
        group with a given ID, the existing one will be removed and replaced
        with the newly configured one.

        @param repo_group_id: identifies the repo group
        @type  repo_group_id: str

        @param distributor_type_id: type of distributor being added; must reference
               one of the installed group distributors
        @type  distributor_type_id: str

        @param group_plugin_config: config to use for the distributor for this group alone
        @type  group_plugin_config: dict

        @param distributor_id: if specified, the newly added distributor will be
               referenced by this value and the group id; if omitted one will
               be generated
        @type  distributor_id: str

        @return: database representation of the added distributor
        @rtype:  dict

        @raise MissingResource: if the group doesn't exist
        @raise InvalidValue: if a distributor ID is provided and is not valid
        @raise PulpDataException: if the plugin indicates the config is invalid
        @raise PulpExecutionException: if the plugin raises an exception while
               initializing the newly added distributor
        """
        distributor_coll = RepoGroupDistributor.get_collection()

        query_manager = manager_factory.repo_group_query_manager()

        # Validation
        group = query_manager.get_group(
            repo_group_id)  # will raise MissingResource

        if not plugin_api.is_valid_group_distributor(distributor_type_id):
            raise InvalidValue(['distributor_type_id'])

        # Determine the ID for the distributor on this repo
        if distributor_id is None:
            distributor_id = str(uuid.uuid4())
        else:
            # Validate if one was passed in
            if not is_distributor_id_valid(distributor_id):
                raise InvalidValue(['distributor_id'])

        distributor_instance, plugin_config = plugin_api.get_group_distributor_by_id(
            distributor_type_id)

        # Convention is that a value of None means unset. Remove any keys that
        # are explicitly set to None so the plugin will default them.
        clean_config = None
        if group_plugin_config is not None:
            clean_config = dict([(k, v)
                                 for k, v in group_plugin_config.items()
                                 if v is not None])

        # Let the plugin validate the configuration
        call_config = PluginCallConfiguration(plugin_config, clean_config)
        transfer_group = common_utils.to_transfer_repo_group(group)
        transfer_group.working_dir = common_utils.distributor_working_dir(
            distributor_type_id, repo_group_id)

        # Load the related groups which is needed for the validation
        transfer_related_groups = related_groups(distributor_type_id)

        # Request the plugin validate the configuration
        try:
            is_valid, message = distributor_instance.validate_config(
                transfer_group, call_config, transfer_related_groups)

            if not is_valid:
                raise PulpDataException(message)
        except Exception, e:
            _LOG.exception(
                'Exception received from distributor [%s] while validating config'
                % distributor_type_id)
            raise PulpDataException(e.args), None, sys.exc_info()[2]