示例#1
0
    def _add_new_module(self, downloader, module):
        """
        Performs the tasks for downloading and saving a new unit in Pulp.

        This method entirely skips modules that are already in the repository.

        :param downloader: downloader instance to use for retrieving the unit
        :type downloader: child of pulp_puppet.plugins.importers.downloaders.base.BaseDownloader

        :param module: module to download and add
        :type  module: pulp_puppet.plugins.db.models.Module
        """
        try:
            # Download the bits
            downloaded_filename = downloader.retrieve_module(
                self.progress_report, module)

            # Extract the extra metadata into the module
            metadata = metadata_module.extract_metadata(
                downloaded_filename, self.repo.working_dir)

            # Overwrite the author and name
            metadata.update(Module.split_filename(metadata['name']))

            # Create and save the Module
            module = Module.from_metadata(metadata)
            module.set_storage_path(os.path.basename(downloaded_filename))
            try:
                module.save_and_import_content(downloaded_filename)
            except NotUniqueError:
                module = module.__class__.objects.get(**module.unit_key)
            # Associate the module with the repo
            repo_controller.associate_single_unit(self.repo.repo_obj, module)
        finally:
            downloader.cleanup_module(module)
示例#2
0
    def _add_new_module(self, downloader, module):
        """
        Performs the tasks for downloading and saving a new unit in Pulp.

        This method entirely skips modules that are already in the repository.

        :param downloader: downloader instance to use for retrieving the unit
        :type downloader: child of pulp_puppet.plugins.importers.downloaders.base.BaseDownloader

        :param module: module to download and add
        :type  module: pulp_puppet.plugins.db.models.Module
        """
        try:
            # Download the bits
            downloaded_filename = downloader.retrieve_module(self.progress_report, module)

            # Extract the extra metadata into the module
            metadata = metadata_module.extract_metadata(downloaded_filename,
                                                        self.repo.working_dir)

            # Overwrite the author and name
            metadata.update(Module.split_filename(metadata['name']))

            # Create and save the Module
            module = Module.from_metadata(metadata)
            module.set_storage_path(os.path.basename(downloaded_filename))
            try:
                module.save_and_import_content(downloaded_filename)
            except NotUniqueError:
                module = module.__class__.objects.get(**module.unit_key)
            # Associate the module with the repo
            repo_controller.associate_single_unit(self.repo.repo_obj, module)
        finally:
            downloader.cleanup_module(module)
示例#3
0
 def setUp(self):
     self.distributor = installdistributor.PuppetModuleInstallDistributor()
     self.uk1 = {'author': 'puppetlabs', 'name': 'stdlib', 'version': '1.2.0'}
     self.uk2 = {'author': 'puppetlabs', 'name': 'stdlib', 'version': '1.2.1'}
     self.units = [
         Module(_storage_path='/a/b/x', **self.uk1),
         Module(_storage_path='/a/b/y', **self.uk2)
     ]
    def test_migration(self, mock_modules, mock_dist, mock_open):
        module_foo = Module(name='kung-foo', version='0.1.2', author='kung')
        mock_modules.filter.return_value = [module_foo]
        module_foo.save = Mock()

        migration.migrate()

        module_foo.save.assert_called_once_with()
        mock_dist.filter.assert_called_once_with(repo_id__in=[], last_publish__ne=None)
示例#5
0
    def _import_modules(self, module_paths):
        """
        Import the puppet modules (tarballs) at the specified paths. This will also handle
        removing any modules in the local repository if they are no longer present on remote
        repository and the 'remove_missing' config value is True.

        :param module_paths: A list of paths to puppet module files.
        :type module_paths: list
        """
        existing_module_ids_by_key = {}
        for module in Module.objects.only(*Module.unit_key_fields).all():
            existing_module_ids_by_key[module.unit_key_str] = module.id

        remote_paths = {}

        list_of_modules = []
        for module_path in module_paths:
            puppet_manifest = self._extract_metadata(module_path)
            puppet_manifest.update(Module.split_filename(puppet_manifest['name']))
            module = Module.from_metadata(puppet_manifest)
            remote_paths[module.unit_key_str] = module_path
            list_of_modules.append(module)

        pub_step = publish_step.GetLocalUnitsStep(constants.IMPORTER_TYPE_ID,
                                                  available_units=list_of_modules, repo=self.repo)
        pub_step.process_main()
        self.report.modules_total_count = len(pub_step.units_to_download)

        for module in pub_step.units_to_download:
            remote_path = remote_paths[module.unit_key_str]
            if self.canceled:
                return
            _logger.debug(IMPORT_MODULE, dict(mod=remote_path))

            module.set_storage_path(os.path.basename(remote_path))
            try:
                module.save_and_import_content(remote_path)
            except NotUniqueError:
                module = module.__class__.objects.get(**module.unit_key)

            repo_controller.associate_single_unit(self.repo.repo_obj, module)

            self.report.modules_finished_count += 1
            self.report.update_progress()

        # Write the report, making sure we don't overwrite a failure in _fetch_modules
        if self.report.modules_state not in constants.COMPLETE_STATES:
            self.report.modules_state = constants.STATE_SUCCESS
        self.report.modules_execution_time = time() - self.started_fetch_modules
        self.report.update_progress()

        remove_missing = self.config.get_boolean(constants.CONFIG_REMOVE_MISSING)
        if remove_missing is None:
            remove_missing = constants.DEFAULT_REMOVE_MISSING
        if remove_missing:
            self._remove_missing(existing_module_ids_by_key, remote_paths.keys())
    def test_migration(self, mock_modules, mock_dist, mock_open):
        module_foo = Module(name='kung-foo', version='0.1.2', author='kung')
        mock_modules.filter.return_value = [module_foo]
        module_foo.save = Mock()

        migration.migrate()

        module_foo.save.assert_called_once_with()
        mock_dist.filter.assert_called_once_with(repo_id__in=[],
                                                 last_publish__ne=None)
示例#7
0
 def setUp(self):
     self.distributor = installdistributor.PuppetModuleInstallDistributor()
     self.puppet_dir = '/opt/my/modules/'
     self.repo = Repository('repo1', '', repo_obj=mock.MagicMock())
     self.conduit = RepoPublishConduit('repo1', self.distributor.metadata()['id'])
     self.uk1 = {'author': 'puppetlabs', 'name': 'stdlib', 'version': '1.2.0'}
     self.uk2 = {'author': 'puppetlabs', 'name': 'java', 'version': '1.3.1'}
     self.units = [
         Module(_storage_path='/a/b/x', **self.uk1),
         Module(_storage_path='/a/b/y', **self.uk2)
     ]
     self.conduit.get_units = mock.MagicMock(return_value=self.units, spec_set=self.conduit.get_units)
示例#8
0
def handle_uploaded_unit(repo, type_id, unit_key, metadata, file_path,
                         conduit):
    """
    Handles an upload unit request to the importer. This call is responsible
    for moving the unit from its temporary location where Pulp stored the
    upload to the final storage location (as dictated by Pulp) for the unit.
    This call will also update the database in Pulp to reflect the unit
    and its association to the repository.

    :param repo: repository into which the unit is being uploaded
    :type repo: pulp.plugins.model.Repository
    :param type_id: type of unit being uploaded
    :type type_id: str
    :param unit_key: unique identifier for the unit
    :type unit_key: dict
    :param metadata: extra data about the unit
    :type metadata: dict
    :param file_path: temporary location of the uploaded file
    :type file_path: str
    :param conduit: for calls back into Pulp
    :type conduit: pulp.plugins.conduit.upload.UploadConduit
    """
    if type_id != constants.TYPE_PUPPET_MODULE:
        raise NotImplementedError()

    # Extract the metadata from the module
    extracted_data = metadata_parser.extract_metadata(file_path,
                                                      repo.working_dir)

    # Overwrite the author and name
    extracted_data.update(Module.split_filename(extracted_data['name']))

    uploaded_module = Module.from_metadata(extracted_data)

    # rename the file so it has the original module name
    new_file_path = os.path.join(os.path.dirname(file_path),
                                 uploaded_module.puppet_standard_filename())
    shutil.move(file_path, new_file_path)

    uploaded_module.set_storage_path(os.path.basename(new_file_path))
    try:
        uploaded_module.save_and_import_content(new_file_path)
    except NotUniqueError:
        uploaded_module = uploaded_module.__class__.objects.get(
            **uploaded_module.unit_key)
    repo_controller.associate_single_unit(repo.repo_obj, uploaded_module)

    return {'success_flag': True, 'summary': '', 'details': {}}
示例#9
0
    def setUp(self):
        self.working_dir = tempfile.mkdtemp(prefix='puppet-sync-tests')
        self.repo = Repository('test-repo', working_dir=self.working_dir)
        self.conduit = MockConduit()
        self.config = PluginCallConfiguration({}, {
            constants.CONFIG_FEED: FEED,
        })

        self.method = SynchronizeWithPuppetForge(self.repo, self.conduit,
                                                 self.config)

        self.sample_units = [
            Module(author='a1', name='n1', version='1.0'),
            Module(author='a2', name='n2', version='2.0'),
            Module(author='a3', name='n3', version='3.0')
        ]
示例#10
0
    def _import_modules(self, module_paths):
        """
        Import the puppet modules (tarballs) at the specified paths. This will also handle
        removing any modules in the local repository if they are no longer present on remote
        repository and the 'remove_missing' config value is True.

        :param module_paths: A list of paths to puppet module files.
        :type module_paths: list
        """
        existing_module_ids_by_key = {}
        for module in Module.objects.only(*Module.unit_key_fields).all():
            existing_module_ids_by_key[module.unit_key_str] = module.id

        remote_paths = {}

        list_of_modules = []
        for module_path in module_paths:
            puppet_manifest = self._extract_metadata(module_path)
            module = Module.from_metadata(puppet_manifest)
            remote_paths[module.unit_key_str] = module_path
            list_of_modules.append(module)

        pub_step = publish_step.GetLocalUnitsStep(
            constants.IMPORTER_TYPE_ID,
            available_units=list_of_modules,
            repo=self.repo)
        pub_step.process_main()
        self.report.modules_total_count = len(pub_step.units_to_download)

        for module in pub_step.units_to_download:
            remote_path = remote_paths[module.unit_key_str]
            if self.canceled:
                return
            _logger.debug(IMPORT_MODULE, dict(mod=remote_path))

            module.set_storage_path(os.path.basename(remote_path))
            try:
                module.save_and_import_content(remote_path)
            except NotUniqueError:
                module = module.__class__.objects.get(**module.unit_key)

            repo_controller.associate_single_unit(self.repo.repo_obj, module)

            self.report.modules_finished_count += 1
            self.report.update_progress()

        # Write the report, making sure we don't overwrite a failure in _fetch_modules
        if self.report.modules_state not in constants.COMPLETE_STATES:
            self.report.modules_state = constants.STATE_SUCCESS
        self.report.modules_execution_time = time(
        ) - self.started_fetch_modules
        self.report.update_progress()

        remove_missing = self.config.get_boolean(
            constants.CONFIG_REMOVE_MISSING)
        if remove_missing is None:
            remove_missing = constants.DEFAULT_REMOVE_MISSING
        if remove_missing:
            self._remove_missing(existing_module_ids_by_key,
                                 remote_paths.keys())
示例#11
0
def handle_uploaded_unit(repo, type_id, unit_key, metadata, file_path, conduit):
    """
    Handles an upload unit request to the importer. This call is responsible
    for moving the unit from its temporary location where Pulp stored the
    upload to the final storage location (as dictated by Pulp) for the unit.
    This call will also update the database in Pulp to reflect the unit
    and its association to the repository.

    :param repo: repository into which the unit is being uploaded
    :type repo: pulp.plugins.model.Repository
    :param type_id: type of unit being uploaded
    :type type_id: str
    :param unit_key: unique identifier for the unit
    :type unit_key: dict
    :param metadata: extra data about the unit
    :type metadata: dict
    :param file_path: temporary location of the uploaded file
    :type file_path: str
    :param conduit: for calls back into Pulp
    :type conduit: pulp.plugins.conduit.upload.UploadConduit
    """
    if type_id != constants.TYPE_PUPPET_MODULE:
        raise NotImplementedError()

    # Extract the metadata from the module
    extracted_data = metadata_parser.extract_metadata(file_path, repo.working_dir)

    # Overwrite the author and name
    extracted_data.update(Module.split_filename(extracted_data['name']))

    uploaded_module = Module.from_metadata(extracted_data)

    # rename the file so it has the original module name
    new_file_path = os.path.join(os.path.dirname(file_path),
                                 uploaded_module.puppet_standard_filename())
    shutil.move(file_path, new_file_path)

    uploaded_module.set_storage_path(os.path.basename(new_file_path))
    try:
        uploaded_module.save_and_import_content(new_file_path)
    except NotUniqueError:
        uploaded_module = uploaded_module.__class__.objects.get(**uploaded_module.unit_key)
    repo_controller.associate_single_unit(repo.repo_obj, uploaded_module)

    return {'success_flag': True, 'summary': '', 'details': {}}
    def test_migration_duplicate_unit(self, mock_modules, mock_dist,
                                      mock_association, mock_controller,
                                      mock_repo, mock_open):
        module_foo = Module(name='kung-foo', version='0.1.2', author='kung')
        module_bar = Module(name='foo', version='0.1.2', author='kung')
        module_bar.first = Mock()
        mock_modules.filter.side_effect = ([module_foo], module_bar)
        module_foo.save = Mock()
        module_foo.save.side_effect = NotUniqueError()
        repo_association = model.RepositoryContentUnit(
            repo_id='test_repo', unit_type_id='puppet_module', unit_id='bar')
        test_repo = model.Repository(repo_id='test_repo')
        mock_repo.get_repo_or_missing_resource.return_value = test_repo
        mock_association.filter.return_value = [repo_association]

        migration.migrate()

        module_foo.save.assert_called_once_with()
        mock_association.filter.assert_called_once_with(unit_id=module_foo.id)
        mock_modules.filter.assert_called_with(name='foo')
        mock_controller.disassociate_units.assert_called_once_with(
            repo_association, [module_foo])
        mock_repo.get_repo_or_missing_resource.assert_called_once_with(
            'test_repo')
        mock_controller.rebuild_content_unit_counts.assert_called_once_with(
            test_repo)
示例#13
0
    def test_duplicate_unit_names(self, mock_find):
        config = PluginCallConfiguration({}, {constants.CONFIG_INSTALL_PATH: self.puppet_dir})
        uk3 = {'author': 'puppetlabs', 'name': 'stdlib', 'version': '1.3.1'}
        unit3 = Module(_storage_path='/a/b/y', **uk3)
        self.units.append(unit3)
        mock_find.return_value = self.units

        report = self.distributor.publish_repo(self.repo, self.conduit, config)

        self.assertFalse(report.success_flag)
        self.assertTrue(isinstance(report.summary, basestring))
        self.assertEqual(len(report.details['errors']), 2)
        self.assertTrue(report.summary.find('duplicate') >= 0)
示例#14
0
    def _import_modules(self, module_paths):
        """
        Import the puppet modules (tarballs) at the specified paths. This will also handle
        removing any modules in the local repository if they are no longer present on remote
        repository and the 'remove_missing' config value is True.

        :param module_paths: A list of paths to puppet module files.
        :type module_paths: list
        """
        existing_module_ids_by_key = {}
        for module in Module.objects.only(*Module.unit_key_fields).all():
            existing_module_ids_by_key[module.unit_key_str] = module.id

        remote_unit_keys = []

        for module_path in module_paths:
            if self.canceled:
                return
            puppet_manifest = self._extract_metadata(module_path)
            module = Module.from_metadata(puppet_manifest)
            remote_unit_keys.append(module.unit_key_str)

            # Even though we've already basically processed this unit, not doing this makes the
            # progress reporting confusing because it shows Pulp always importing all the modules.
            if module.unit_key_str in existing_module_ids_by_key:
                self.report.modules_total_count -= 1
                continue
            _logger.debug(IMPORT_MODULE, dict(mod=module_path))

            module.set_storage_path(os.path.basename(module_path))
            module.save_and_import_content(module_path)

            repo_controller.associate_single_unit(self.repo.repo_obj, module)

            self.report.modules_finished_count += 1
            self.report.update_progress()

        # Write the report, making sure we don't overwrite a failure in _fetch_modules
        if self.report.modules_state not in constants.COMPLETE_STATES:
            self.report.modules_state = constants.STATE_SUCCESS
        self.report.modules_execution_time = time(
        ) - self.started_fetch_modules
        self.report.update_progress()

        remove_missing = self.config.get_boolean(
            constants.CONFIG_REMOVE_MISSING)
        if remove_missing is None:
            remove_missing = constants.DEFAULT_REMOVE_MISSING
        if remove_missing:
            self._remove_missing(existing_module_ids_by_key, remote_unit_keys)
    def test_migration_duplicate_unit(self, mock_modules, mock_dist, mock_association,
                                      mock_controller, mock_repo, mock_open):
        module_foo = Module(name='kung-foo', version='0.1.2', author='kung')
        module_bar = Module(name='foo', version='0.1.2', author='kung')
        module_bar.first = Mock()
        mock_modules.filter.side_effect = ([module_foo], module_bar)
        module_foo.save = Mock()
        module_foo.save.side_effect = NotUniqueError()
        repo_association = model.RepositoryContentUnit(repo_id='test_repo',
                                                       unit_type_id='puppet_module',
                                                       unit_id='bar')
        test_repo = model.Repository(repo_id='test_repo')
        mock_repo.get_repo_or_missing_resource.return_value = test_repo
        mock_association.filter.return_value = [repo_association]

        migration.migrate()

        module_foo.save.assert_called_once_with()
        mock_association.filter.assert_called_once_with(unit_id=module_foo.id)
        mock_modules.filter.assert_called_with(name='foo')
        mock_controller.disassociate_units.assert_called_once_with(repo_association, [module_foo])
        mock_repo.get_repo_or_missing_resource.assert_called_once_with('test_repo')
        mock_controller.rebuild_content_unit_counts.assert_called_once_with(test_repo)
示例#16
0
    def _import_modules(self, module_paths):
        """
        Import the puppet modules (tarballs) at the specified paths. This will also handle
        removing any modules in the local repository if they are no longer present on remote
        repository and the 'remove_missing' config value is True.

        :param module_paths: A list of paths to puppet module files.
        :type module_paths: list
        """
        existing_module_ids_by_key = {}
        for module in Module.objects.only(*Module.unit_key_fields).all():
            existing_module_ids_by_key[module.unit_key_str] = module.id

        remote_unit_keys = []

        for module_path in module_paths:
            if self.canceled:
                return
            puppet_manifest = self._extract_metadata(module_path)
            module = Module.from_metadata(puppet_manifest)
            remote_unit_keys.append(module.unit_key_str)

            # Even though we've already basically processed this unit, not doing this makes the
            # progress reporting confusing because it shows Pulp always importing all the modules.
            if module.unit_key_str in existing_module_ids_by_key:
                self.report.modules_total_count -= 1
                continue
            _logger.debug(IMPORT_MODULE, dict(mod=module_path))

            module.set_content(module_path)
            module.save()

            repo_controller.associate_single_unit(self.repo.repo_obj, module)

            self.report.modules_finished_count += 1
            self.report.update_progress()

        # Write the report, making sure we don't overwrite a failure in _fetch_modules
        if self.report.modules_state not in constants.COMPLETE_STATES:
            self.report.modules_state = constants.STATE_SUCCESS
        self.report.modules_execution_time = time() - self.started_fetch_modules
        self.report.update_progress()

        remove_missing = self.config.get_boolean(constants.CONFIG_REMOVE_MISSING)
        if remove_missing is None:
            remove_missing = constants.DEFAULT_REMOVE_MISSING
        if remove_missing:
            self._remove_missing(existing_module_ids_by_key, remote_unit_keys)
        repo_controller.rebuild_content_unit_counts(self.repo.repo_obj)