def test_skip_list(self, mock_metadata): """ Test that unit types in the skip list are actually skipped """ # Setup skip_list = [ids.TYPE_ID_RPM, ids.TYPE_ID_PKG_GROUP, ids.TYPE_ID_DISTRO, ids.TYPE_ID_ERRATA] config = PluginCallConfiguration({}, {constants.SKIP_KEYWORD: skip_list}) mock_conduit = mock.Mock(spec=RepoPublishConduit) mock_metadata.return_value = (None, None) # Test export_utils.export_complete_repo('repo_id', '/working/dir', mock_conduit, config, None) self.assertEqual(0, export_utils.export_rpm.call_count) self.assertEqual(0, export_utils.export_package_groups_and_cats.call_count) self.assertEqual(0, export_utils.export_distribution.call_count) self.assertEqual(0, export_utils.export_errata.call_count)
def test_metadata_errors(self, mock_metadata): """ Test that metadata errors from the call to metadata.generate_yum_metadata are placed in the details dictionary """ # Setup skip_list = [ids.TYPE_ID_RPM, ids.TYPE_ID_PKG_GROUP, ids.TYPE_ID_DISTRO, ids.TYPE_ID_ERRATA] config = PluginCallConfiguration({}, {constants.SKIP_KEYWORD: skip_list}) mock_conduit = mock.Mock(spec=RepoPublishConduit) mock_metadata.return_value = (None, ['error']) # Test summary, details = export_utils.export_complete_repo('repo_id', '/working/dir', mock_conduit, config, None) self.assertEqual({}, summary) self.assertEqual({'errors': {'metadata_errors': ['error']}}, details)
def test_export_complete_repo(self, mock_metadata): """ Test that export_complete_repo calls all the correct helper methods """ # Setup config = PluginCallConfiguration({}, {}) mock_conduit = mock.Mock(spec=RepoPublishConduit) mock_metadata.return_value = (None, None) export_utils.export_rpm.return_value = ({}, {}) export_utils.export_package_groups_and_cats.return_value = (None, {}) # Test summary, details = export_utils.export_complete_repo('repo_id', '/working/dir', mock_conduit, config, None) self.assertEqual(1, export_utils.export_rpm.call_count) self.assertEqual(1, export_utils.export_package_groups_and_cats.call_count) self.assertEqual(1, export_utils.export_distribution.call_count) self.assertEqual(1, export_utils.export_errata.call_count) self.assertEqual({}, summary) self.assertEqual({'errors': {}}, details)
def publish_repo(self, repo, publish_conduit, config): """ Export a yum repository to a given directory, or to ISO :param repo: metadata describing the repository :type repo: pulp.plugins.model.Repository :param publish_conduit: provides access to relevant Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: plugin configuration :type config: pulp.plugins.config.PluginConfiguration :return: report describing the publish run :rtype: pulp.plugins.model.PublishReport """ # First, validate the configuration because there may be override config options, and # currently, validate_config is not called prior to publishing by the manager. valid_config, msg = export_utils.validate_export_config(config) if not valid_config: raise PulpDataException(msg) _logger.info('Starting export of [%s]' % repo.id) progress_status = { ids.TYPE_ID_RPM: {'state': constants.STATE_NOT_STARTED}, ids.TYPE_ID_ERRATA: {'state': constants.STATE_NOT_STARTED}, ids.TYPE_ID_DISTRO: {'state': constants.STATE_NOT_STARTED}, ids.TYPE_ID_PKG_CATEGORY: {'state': constants.STATE_NOT_STARTED}, ids.TYPE_ID_PKG_GROUP: {'state': constants.STATE_NOT_STARTED}, 'metadata': {'state': constants.STATE_NOT_STARTED}, 'isos': {'state': constants.STATE_NOT_STARTED}, 'publish_http': {'state': constants.STATE_NOT_STARTED}, 'publish_https': {'state': constants.STATE_NOT_STARTED}, } def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) # Retrieve a config tuple and unpack it for use config_settings = export_utils.retrieve_repo_config(repo, config) self.working_dir, self.date_filter = config_settings # Before starting, clean out the working directory. Done to remove last published ISOs shutil.rmtree(repo.working_dir, ignore_errors=True) os.makedirs(repo.working_dir) # If a date filter is not present, do a complete export. If it is, do an incremental export. if self.date_filter: result = export_utils.export_incremental_content(self.working_dir, publish_conduit, self.date_filter, progress_callback) else: result = export_utils.export_complete_repo(repo.id, self.working_dir, publish_conduit, config, progress_callback) self.summary = result[0] self.details = result[1] if not config.get(constants.EXPORT_DIRECTORY_KEYWORD): util.generate_listing_files(repo.working_dir, self.working_dir) # build iso and publish via HTTPS self._publish_isos(repo, config, progress_callback) else: export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD) util.generate_listing_files(export_dir, self.working_dir) if len(self.details['errors']) != 0: return publish_conduit.build_failure_report(self.summary, self.details) return publish_conduit.build_success_report(self.summary, self.details)