def test_export_incremental(self): # Setup mock_conduit = mock.Mock(spec=RepoPublishConduit) mock_conduit.get_units.return_value = ['unit'] expected_units = ['unit', 'unit', 'unit'] # Test export_utils.export_incremental_content('/working/dir', mock_conduit, {'fake': 'filter'}) # Confirm the conduit was called for each content type self.assertEqual(4, mock_conduit.get_units.call_count) call_list = mock_conduit.get_units.call_args_list self.assertEqual([ids.TYPE_ID_RPM], call_list[0][1]['criteria'].type_ids) self.assertEqual([ids.TYPE_ID_SRPM], call_list[1][1]['criteria'].type_ids) self.assertEqual([ids.TYPE_ID_DRPM], call_list[2][1]['criteria'].type_ids) self.assertEqual([ids.TYPE_ID_ERRATA], call_list[3][1]['criteria'].type_ids) # Test that each helper method was called correctly export_utils.export_rpm.assert_called_once_with('/working/dir', expected_units, None) export_utils.export_rpm_json.assert_called_once_with('/working/dir/rpm_json', expected_units) export_utils.export_errata_json.assert_called_once_with('/working/dir/errata_json', ['unit'], None)
def publish_repo(self, repo, publish_conduit, config): """ Export a yum repository to a given directory, or to ISO :param repo: metadata describing the repository :type repo: pulp.plugins.model.Repository :param publish_conduit: provides access to relevant Pulp functionality :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: plugin configuration :type config: pulp.plugins.config.PluginConfiguration :return: report describing the publish run :rtype: pulp.plugins.model.PublishReport """ # First, validate the configuration because there may be override config options, and # currently, validate_config is not called prior to publishing by the manager. valid_config, msg = export_utils.validate_export_config(config) if not valid_config: raise PulpDataException(msg) _logger.info('Starting export of [%s]' % repo.id) progress_status = { ids.TYPE_ID_RPM: {'state': constants.STATE_NOT_STARTED}, ids.TYPE_ID_ERRATA: {'state': constants.STATE_NOT_STARTED}, ids.TYPE_ID_DISTRO: {'state': constants.STATE_NOT_STARTED}, ids.TYPE_ID_PKG_CATEGORY: {'state': constants.STATE_NOT_STARTED}, ids.TYPE_ID_PKG_GROUP: {'state': constants.STATE_NOT_STARTED}, 'metadata': {'state': constants.STATE_NOT_STARTED}, 'isos': {'state': constants.STATE_NOT_STARTED}, 'publish_http': {'state': constants.STATE_NOT_STARTED}, 'publish_https': {'state': constants.STATE_NOT_STARTED}, } def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) # Retrieve a config tuple and unpack it for use config_settings = export_utils.retrieve_repo_config(repo, config) self.working_dir, self.date_filter = config_settings # Before starting, clean out the working directory. Done to remove last published ISOs shutil.rmtree(repo.working_dir, ignore_errors=True) os.makedirs(repo.working_dir) # If a date filter is not present, do a complete export. If it is, do an incremental export. if self.date_filter: result = export_utils.export_incremental_content(self.working_dir, publish_conduit, self.date_filter, progress_callback) else: result = export_utils.export_complete_repo(repo.id, self.working_dir, publish_conduit, config, progress_callback) self.summary = result[0] self.details = result[1] if not config.get(constants.EXPORT_DIRECTORY_KEYWORD): util.generate_listing_files(repo.working_dir, self.working_dir) # build iso and publish via HTTPS self._publish_isos(repo, config, progress_callback) else: export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD) util.generate_listing_files(export_dir, self.working_dir) if len(self.details['errors']) != 0: return publish_conduit.build_failure_report(self.summary, self.details) return publish_conduit.build_success_report(self.summary, self.details)