def test_yum_plugin_generate_yum_metadata_checksum_default(self, mock_YumMetadataGenerator, mock_distributor_manager): repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_publish" num_units = 10 relative_url = "rel_a/rel_b/rel_c/" existing_units = self.get_units(count=num_units) publish_conduit = distributor_mocks.get_publish_conduit(type_id="rpm", existing_units=existing_units, checksum_type=None, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, relative_url=relative_url, http=False, https=True) distributor = YumDistributor() distributor.process_repo_auth_certificate_bundle = mock.Mock() config_conduit = mock.Mock(spec=RepoConfigConduit) config_conduit.get_repo_distributors_by_relative_url.return_value = MockCursor([]) metadata.generate_yum_metadata(repo.id, repo.working_dir, publish_conduit, config) mock_YumMetadataGenerator.assert_called_with(ANY, checksum_type=metadata.DEFAULT_CHECKSUM, skip_metadata_types=ANY, is_cancelled=ANY, group_xml_path=ANY, updateinfo_xml_path=ANY, custom_metadata_dict=ANY) self.assertFalse(mock_distributor_manager.called)
def test_yum_plugin_generate_yum_metadata_checksum_from_conduit_sha1_conversion(self, mock_YumMetadataGenerator, mock_distributor_manager): repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_publish" num_units = 10 relative_url = "rel_a/rel_b/rel_c/" existing_units = self.get_units(count=num_units) publish_conduit = distributor_mocks.get_publish_conduit(type_id="rpm", existing_units=existing_units, pkg_dir=self.pkg_dir) publish_conduit.repo_id = 'foo' publish_conduit.distributor_id = TYPE_ID_DISTRIBUTOR_YUM config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, relative_url=relative_url, http=False, https=True) distributor = YumDistributor() distributor.process_repo_auth_certificate_bundle = mock.Mock() config_conduit = mock.Mock(spec=RepoConfigConduit) config_conduit.get_repo_distributors_by_relative_url.return_value = MockCursor([]) metadata.generate_yum_metadata(repo.id, repo.working_dir, publish_conduit, config, repo_scratchpad={'checksum_type': 'sha'}) mock_YumMetadataGenerator.assert_called_with(ANY, checksum_type='sha1', skip_metadata_types=ANY, is_cancelled=ANY, group_xml_path=ANY, updateinfo_xml_path=ANY, custom_metadata_dict=ANY) mock_distributor_manager.return_value.update_distributor_config.\ assert_called_with(ANY, ANY, {'checksum_type': 'sha1'})
def test_cancel_generate_repodata(self): global metadata_progress_status metadata_progress_status = {} def set_progress(progress): global metadata_progress_status metadata_progress_status = progress def progress_callback(type_id, status): metadata_progress_status[type_id] = status mock_publish_conduit.set_progress(metadata_progress_status) mock_repo = mock.Mock(spec=Repository) mock_repo.id = "test_repo" mock_repo.scratchpad = {"checksum_type" : "sha"} mock_repo.working_dir = os.path.join(self.temp_dir, "test_yum_repo_metadata") # Confirm required and optional are successful units_to_write= mock.Mock() units_to_write.metadata = {} units_to_write.metadata["repodata"] = {} repo_scratchpad = {"checksum_type" : "sha", "repodata" : {}} optional_kwargs = {} config = distributor_mocks.get_basic_config(**optional_kwargs) mock_publish_conduit = distributor_mocks.get_publish_conduit() mock_publish_conduit.set_progress = mock.Mock() mock_publish_conduit.set_progress.side_effect = set_progress status, errors = metadata.generate_yum_metadata(mock_repo.working_dir, mock_publish_conduit, config, progress_callback=progress_callback, is_cancelled=True, repo_scratchpad=repo_scratchpad) self.assertEquals(status, False) self.assertEquals(metadata_progress_status['metadata']['state'], "CANCELED")
def test_yum_generate_metadata(self): global metadata_progress_status metadata_progress_status = {} def set_progress(progress): global metadata_progress_status metadata_progress_status = progress def progress_callback(type_id, status): metadata_progress_status[type_id] = status mock_publish_conduit.set_progress(metadata_progress_status) mock_repo = mock.Mock(spec=Repository) mock_repo.id = "test_repo" repo_scratchpad = {"checksum_type" : "sha", "repodata" : {}} mock_repo.working_dir = os.path.join(self.temp_dir, "test_yum_repo_metadata") # Confirm required and optional are successful units_to_write= mock.Mock() units_to_write.metadata = {} units_to_write.metadata["repodata"] = {} units_to_write.metadata["repodata"]["primary"] = """<package type="rpm"><name>feedless</name><arch>noarch</arch><version epoch="0" ver="1.0" rel="1"/><checksum type="sha" pkgid="YES">c1181097439ae4c69793c91febd8513475fb7ed6</checksum><summary>dummy testing pkg</summary><description>A dumb 1Mb pkg.</description><packager/><url/><time file="1299184404" build="1299168170"/><size package="1050973" installed="2097152" archive="1048976"/><location href="feedless-1.0-1.noarch.rpm"/><format><rpm:license>GPLv2</rpm:license><rpm:vendor/><rpm:group>Application</rpm:group><rpm:buildhost>pulp-qe-rhel5.usersys.redhat.com</rpm:buildhost><rpm:sourcerpm>feedless-1.0-1.src.rpm</rpm:sourcerpm><rpm:header-range start="456" end="1846"/><rpm:provides><rpm:entry name="feedless" flags="EQ" epoch="0" ver="1.0" rel="1"/></rpm:provides><rpm:requires><rpm:entry name="rpmlib(CompressedFileNames)" flags="LE" epoch="0" ver="3.0.4" rel="1" pre="1"/><rpm:entry name="rpmlib(PayloadFilesHavePrefix)" flags="LE" epoch="0" ver="4.0" rel="1" pre="1"/></rpm:requires></format></package>""" units_to_write.metadata["repodata"]["filelists"] = """<package pkgid="c1181097439ae4c69793c91febd8513475fb7ed6" name="feedless" arch="noarch"><version epoch="0" ver="1.0" rel="1"/><file>/tmp/rpm_test/feedless/key</file><file type="dir">/tmp/rpm_test/feedless</file></package>""" units_to_write.metadata["repodata"]["other"] = """<package pkgid="c1181097439ae4c69793c91febd8513475fb7ed6" name="feedless" arch="noarch"><version epoch="0" ver="1.0" rel="1"/></package>""" optional_kwargs = {"use_createrepo" : False} config = distributor_mocks.get_basic_config(**optional_kwargs) mock_publish_conduit = distributor_mocks.get_publish_conduit() mock_publish_conduit.set_progress = mock.Mock() mock_publish_conduit.set_progress.side_effect = set_progress status, errors = metadata.generate_yum_metadata(mock_repo.working_dir, mock_publish_conduit, config, progress_callback=progress_callback, repo_scratchpad=repo_scratchpad) self.assertEquals(status, True) self.assertEquals(metadata_progress_status['metadata']['state'], "FINISHED")
def _generate_metadata(self, repo, conduit, config): _LOG.info("Generating metadata for source repo %s" % repo.id) metadata_status, metadata_errors = metadata.generate_yum_metadata( os.path.join(repo.working_dir, repo.id), conduit, config, is_cancelled=self.canceled, repo_scratchpad=conduit.get_repo_scratchpad()) _LOG.debug("metadata generation complete; metadata status : %s; errors : %s" % (metadata_status, metadata_errors)) return metadata_status, metadata_errors
def export_complete_repo(repo_id, working_dir, publish_conduit, config, progress_callback=None): """ Export all content types for a repository, unless the type is in the skip list. :param working_dir: The full path to the directory to export the content to :type working_dir: str :param publish_conduit: The publish conduit for the repository :type publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit :param config: The configuration to use while exporting :type config: pulp.plugins.config.PluginConfiguration :param progress_callback: The progress callback function to use :type progress_callback: function :return: A tuple containing the summary and the details dictionaries for the export :rtype: tuple """ groups_xml = None updateinfo_xml = None skip_types = config.get(constants.SKIP_KEYWORD) or [] summary, details = {}, {"errors": {}} # Retrieve all the units associated with the repository using the conduit errata_criteria = UnitAssociationCriteria(type_ids=[models.Errata.TYPE]) distro_criteria = UnitAssociationCriteria(type_ids=[models.Distribution.TYPE]) group_criteria = UnitAssociationCriteria(type_ids=[models.PackageGroup.TYPE, models.PackageCategory.TYPE]) rpm_units = get_rpm_units(publish_conduit) errata_units = publish_conduit.get_units(errata_criteria) distro_units = publish_conduit.get_units(distro_criteria) group_units = publish_conduit.get_units(group_criteria) # Export the rpm units if models.RPM.TYPE not in skip_types: rpm_summary, rpm_details = export_rpm(working_dir, rpm_units, progress_callback) summary = dict(summary.items() + rpm_summary.items()) details = dict(details.items() + rpm_details.items()) # Export the group units if models.PackageGroup.TYPE not in skip_types: groups_xml, group_summary = export_package_groups_and_cats(working_dir, group_units, progress_callback) summary = dict(summary.items() + group_summary.items()) # Export the distribution units if models.Distribution.TYPE not in skip_types: export_distribution(working_dir, distro_units, progress_callback) # Export the errata if models.Errata.TYPE not in skip_types: updateinfo_xml = export_errata(working_dir, errata_units, progress_callback) # generate metadata with a painfully long call. Note that this method retrieves all the content # again to generate the metadata. This could probably be optimized when the yum distributor is # rewritten. metadata_status, metadata_errors = metadata.generate_yum_metadata( repo_id, working_dir, publish_conduit, config, progress_callback, False, groups_xml, updateinfo_xml, publish_conduit.get_repo_scratchpad(), ) if metadata_errors: details["errors"]["metadata_errors"] = metadata_errors return summary, details
def publish_repo(self, repo, publish_conduit, config): summary = {} details = {} progress_status = { "packages": {"state": "NOT_STARTED"}, "distribution": {"state": "NOT_STARTED"}, "metadata": {"state": "NOT_STARTED"}, "packagegroups": {"state": "NOT_STARTED"}, "publish_http": {"state": "NOT_STARTED"}, "publish_https": {"state": "NOT_STARTED"}, } def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) self.repo_working_dir = repo.working_dir if self.canceled: return publish_conduit.build_cancel_report(summary, details) skip_list = config.get('skip') or [] # Determine Content in this repo pkg_units = [] pkg_errors = [] if 'rpm' not in skip_list: for type_id in [TYPE_ID_RPM, TYPE_ID_SRPM]: criteria = UnitAssociationCriteria(type_ids=type_id, unit_fields=['id', 'name', 'version', 'release', 'arch', 'epoch', '_storage_path', "checksum", "checksumtype" ]) pkg_units += publish_conduit.get_units(criteria=criteria) drpm_units = [] if 'drpm' not in skip_list: criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM) drpm_units = publish_conduit.get_units(criteria=criteria) pkg_units += drpm_units # Create symlinks under repo.working_dir pkg_status, pkg_errors = self.handle_symlinks(pkg_units, repo.working_dir, progress_callback) if not pkg_status: _LOG.error("Unable to publish %s items" % (len(pkg_errors))) distro_errors = [] distro_units = [] if 'distribution' not in skip_list: criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DISTRO) distro_units = publish_conduit.get_units(criteria=criteria) # symlink distribution files if any under repo.working_dir distro_status, distro_errors = self.symlink_distribution_unit_files(distro_units, repo.working_dir, publish_conduit, progress_callback) if not distro_status: _LOG.error("Unable to publish distribution tree %s items" % (len(distro_errors))) updateinfo_xml_path = None if 'erratum' not in skip_list: criteria = UnitAssociationCriteria(type_ids=TYPE_ID_ERRATA) errata_units = publish_conduit.get_units(criteria=criteria) updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo.working_dir) if self.canceled: return publish_conduit.build_cancel_report(summary, details) groups_xml_path = None existing_cats = [] existing_groups = [] if 'packagegroup' not in skip_list: criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY]) existing_units = publish_conduit.get_units(criteria) existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units) existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units) groups_xml_path = comps_util.write_comps_xml(repo.working_dir, existing_groups, existing_cats) metadata_start_time = time.time() # update/generate metadata for the published repo self.use_createrepo = config.get('use_createrepo') if self.use_createrepo: metadata_status, metadata_errors = metadata.generate_metadata( repo.working_dir, publish_conduit, config, progress_callback, groups_xml_path) else: metadata_status, metadata_errors = metadata.generate_yum_metadata(repo.id, repo.working_dir, publish_conduit, config, progress_callback, is_cancelled=self.canceled, group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=publish_conduit.get_repo_scratchpad()) metadata_end_time = time.time() relpath = self.get_repo_relative_path(repo, config) if relpath.startswith("/"): relpath = relpath[1:] # Build the https and http publishing paths https_publish_dir = self.get_https_publish_dir(config) https_repo_publish_dir = os.path.join(https_publish_dir, relpath).rstrip('/') http_publish_dir = self.get_http_publish_dir(config) http_repo_publish_dir = os.path.join(http_publish_dir, relpath).rstrip('/') # Clean up the old publish directories, if they exist. scratchpad = publish_conduit.get_repo_scratchpad() if OLD_REL_PATH_KEYWORD in scratchpad: old_relative_path = scratchpad[OLD_REL_PATH_KEYWORD] old_https_repo_publish_dir = os.path.join(https_publish_dir, old_relative_path) if os.path.exists(old_https_repo_publish_dir): util.remove_repo_publish_dir(https_publish_dir, old_https_repo_publish_dir) old_http_repo_publish_dir = os.path.join(http_publish_dir, old_relative_path) if os.path.exists(old_http_repo_publish_dir): util.remove_repo_publish_dir(http_publish_dir, old_http_repo_publish_dir) # Now write the current publish relative path to the scratch pad. This way, if the relative path # changes before the next publish, we can clean up the old path. scratchpad[OLD_REL_PATH_KEYWORD] = relpath publish_conduit.set_repo_scratchpad(scratchpad) # Handle publish link for HTTPS if config.get("https"): # Publish for HTTPS self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback) try: _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir)) util.create_symlink(repo.working_dir, https_repo_publish_dir) util.generate_listing_files(https_publish_dir, https_repo_publish_dir) summary["https_publish_dir"] = https_repo_publish_dir self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback) except: self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback) else: self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback) if os.path.lexists(https_repo_publish_dir): _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir) util.remove_repo_publish_dir(https_publish_dir, https_repo_publish_dir) # Handle publish link for HTTP if config.get("http"): # Publish for HTTP self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback) try: _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir)) util.create_symlink(repo.working_dir, http_repo_publish_dir) util.generate_listing_files(http_publish_dir, http_repo_publish_dir) summary["http_publish_dir"] = http_repo_publish_dir self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback) except: self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback) else: self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback) if os.path.lexists(http_repo_publish_dir): _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir) util.remove_repo_publish_dir(http_publish_dir, http_repo_publish_dir) summary["num_package_units_attempted"] = len(pkg_units) summary["num_package_units_published"] = len(pkg_units) - len(pkg_errors) summary["num_package_units_errors"] = len(pkg_errors) summary["num_distribution_units_attempted"] = len(distro_units) summary["num_distribution_units_published"] = len(distro_units) - len(distro_errors) summary["num_distribution_units_errors"] = len(distro_errors) summary["num_package_groups_published"] = len(existing_groups) summary["num_package_categories_published"] = len(existing_cats) summary["relative_path"] = relpath if metadata_status is False and not len(metadata_errors): summary["skip_metadata_update"] = True else: summary["skip_metadata_update"] = False details["errors"] = pkg_errors + distro_errors # metadata_errors details['time_metadata_sec'] = metadata_end_time - metadata_start_time # metadata generate skipped vs run _LOG.info("Publish complete: summary = <%s>, details = <%s>" % (summary, details)) if details["errors"]: return publish_conduit.build_failure_report(summary, details) return publish_conduit.build_success_report(summary, details)
def publish_repo(self, repo, publish_conduit, config): publish_start_time = time.time() _LOG.info("Start publish time %s" % publish_start_time) progress_status = { "rpms": {"state": "NOT_STARTED"}, "errata": {"state": "NOT_STARTED"}, "distribution": {"state": "NOT_STARTED"}, "metadata": {"state": "NOT_STARTED"}, "packagegroups": {"state": "NOT_STARTED"}, "isos": {"state": "NOT_STARTED"}, "publish_http": {"state": "NOT_STARTED"}, "publish_https": {"state": "NOT_STARTED"}, } def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) self.repo_working_dir = repo_working_dir = repo.working_dir if self.cancelled: return publish_conduit.build_failure_report(self.summary, self.details) skip_types = config.get("skip") or [] repo_exporter = RepoExporter(repo_working_dir, skip=skip_types) date_filter = repo_exporter.create_date_range_filter(config) groups_xml_path = None updateinfo_xml_path = None if date_filter: # export errata by date and associated rpm units progress_status["errata"]["state"] = "STARTED" criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA], unit_filters=date_filter) errata_units = publish_conduit.get_units(criteria=criteria) rpm_units = self.__get_rpm_units(publish_conduit) drpm_criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM) rpm_units += publish_conduit.get_units(criteria=drpm_criteria) rpm_units = repo_exporter.get_errata_rpms(errata_units, rpm_units) rpm_summary, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback) if self.cancelled: return publish_conduit.build_failure_report(self.summary, self.details) updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir) progress_status["errata"]["num_success"] = len(errata_units) progress_status["errata"]["state"] = "FINISHED" self.summary = dict(self.summary.items() + rpm_summary.items()) self.summary["num_errata_units_exported"] = len(errata_units) self.details["errors"] = rpm_errors else: # export everything # export rpms rpm_units = self.__get_rpm_units(publish_conduit) drpm_criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM) rpm_units += publish_conduit.get_units(criteria=drpm_criteria) rpm_summary, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback) # export package groups groups_xml_path = None if "packagegroup" not in skip_types: progress_status["packagegroups"]["state"] = "STARTED" criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY]) existing_units = publish_conduit.get_units(criteria=criteria) existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units) existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units) groups_xml_path = comps_util.write_comps_xml(repo_working_dir, existing_groups, existing_cats) self.summary["num_package_groups_exported"] = len(existing_groups) self.summary["num_package_categories_exported"] = len(existing_cats) progress_status["packagegroups"]["state"] = "FINISHED" else: progress_status["packagegroups"]["state"] = "SKIPPED" _LOG.info("packagegroup unit type in skip list [%s]; skipping export" % skip_types) if self.cancelled: return publish_conduit.build_failure_report(self.summary, self.details) # export errata updateinfo_xml_path = None if 'erratum' not in skip_types: progress_status["errata"]["state"] = "STARTED" criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA]) errata_units = publish_conduit.get_units(criteria=criteria) progress_status["errata"]["state"] = "IN_PROGRESS" updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir) progress_status["errata"]["num_success"] = len(errata_units) progress_status["errata"]["state"] = "FINISHED" self.summary["num_errata_units_exported"] = len(errata_units) else: #errata_summary, errata_errors = repo_exporter.export_errata(errata_units, progress_callback=progress_callback) progress_status["errata"]["state"] = "SKIPPED" _LOG.info("errata unit type in skip list [%s]; skipping export" % skip_types) # distro units criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_DISTRO]) distro_units = publish_conduit.get_units(criteria=criteria) distro_summary, distro_errors = repo_exporter.export_distributions(distro_units, progress_callback=progress_callback) # sum up summary and details self.details["errors"] = rpm_errors + distro_errors self.summary = dict(self.summary.items() + rpm_summary.items() + distro_summary.items()) # generate metadata metadata_status, metadata_errors = metadata.generate_yum_metadata( repo_working_dir, publish_conduit, config, progress_callback, is_cancelled=self.cancelled, group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=publish_conduit.get_repo_scratchpad()) _LOG.info("metadata generation complete at target location %s" % repo_working_dir) self.details["errors"] += metadata_errors # build iso and publish via HTTPS self._publish_isos(repo, config, progress_callback=progress_callback) _LOG.info("Publish complete: summary = <%s>, details = <%s>" % (self.summary, self.details)) # remove exported content from working dirctory iso_util.cleanup_working_dir(self.repo_working_dir) if self.details["errors"]: return publish_conduit.build_failure_report(self.summary, self.details) return publish_conduit.build_success_report(self.summary, self.details)
def publish_repo(self, repo, publish_conduit, config): summary = {} details = {} progress_status = { "packages": {"state": "NOT_STARTED"}, "distribution": {"state": "NOT_STARTED"}, "metadata": {"state": "NOT_STARTED"}, "packagegroups": {"state": "NOT_STARTED"}, "publish_http": {"state": "NOT_STARTED"}, "publish_https": {"state": "NOT_STARTED"}, } def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) self.repo_working_dir = repo.working_dir if self.canceled: return publish_conduit.build_failure_report(summary, details) skip_list = config.get('skip') or [] # Determine Content in this repo unfiltered_units = publish_conduit.get_units() # filter compatible units rpm_units = filter(lambda u : u.type_id in [TYPE_ID_RPM, TYPE_ID_SRPM], unfiltered_units) drpm_units = filter(lambda u : u.type_id == TYPE_ID_DRPM, unfiltered_units) rpm_errors = [] if 'rpm' not in skip_list: _LOG.debug("Publish on %s invoked. %s existing units, %s of which are supported to be published." \ % (repo.id, len(unfiltered_units), len(rpm_units))) # Create symlinks under repo.working_dir rpm_status, rpm_errors = self.handle_symlinks(rpm_units, repo.working_dir, progress_callback) if not rpm_status: _LOG.error("Unable to publish %s items" % (len(rpm_errors))) drpm_errors = [] if 'drpm' not in skip_list: _LOG.debug("Publish on %s invoked. %s existing units, %s of which are supported to be published." \ % (repo.id, len(unfiltered_units), len(drpm_units))) # Create symlinks under repo.working_dir drpm_status, drpm_errors = self.handle_symlinks(drpm_units, repo.working_dir, progress_callback) if not drpm_status: _LOG.error("Unable to publish %s items" % (len(drpm_errors))) pkg_errors = rpm_errors + drpm_errors pkg_units = rpm_units + drpm_units distro_errors = [] distro_units = filter(lambda u: u.type_id == TYPE_ID_DISTRO, unfiltered_units) if 'distribution' not in skip_list: # symlink distribution files if any under repo.working_dir distro_status, distro_errors = self.symlink_distribution_unit_files(distro_units, repo.working_dir, progress_callback) if not distro_status: _LOG.error("Unable to publish distribution tree %s items" % (len(distro_errors))) updateinfo_xml_path = None if 'erratum' not in skip_list: criteria = UnitAssociationCriteria(type_ids=TYPE_ID_ERRATA) errata_units = publish_conduit.get_units(criteria=criteria) updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo.working_dir) if self.canceled: return publish_conduit.build_failure_report(summary, details) groups_xml_path = None existing_cats = [] existing_groups = [] if 'packagegroup' not in skip_list: criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY]) existing_units = publish_conduit.get_units(criteria) existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units) existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units) groups_xml_path = comps_util.write_comps_xml(repo.working_dir, existing_groups, existing_cats) metadata_start_time = time.time() # update/generate metadata for the published repo self.use_createrepo = config.get('use_createrepo') if self.use_createrepo: metadata_status, metadata_errors = metadata.generate_metadata( repo.working_dir, publish_conduit, config, progress_callback, groups_xml_path) else: # default to per package metadata metadata_status, metadata_errors = metadata.generate_yum_metadata(repo.working_dir, rpm_units, config, progress_callback, is_cancelled=self.canceled, group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=publish_conduit.get_repo_scratchpad()) metadata_end_time = time.time() relpath = self.get_repo_relative_path(repo, config) if relpath.startswith("/"): relpath = relpath[1:] # # Handle publish link for HTTPS # https_publish_dir = self.get_https_publish_dir(config) https_repo_publish_dir = os.path.join(https_publish_dir, relpath).rstrip('/') if config.get("https"): # Publish for HTTPS self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback) try: _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir)) util.create_symlink(repo.working_dir, https_repo_publish_dir) summary["https_publish_dir"] = https_repo_publish_dir self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback) except: self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback) else: self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback) if os.path.lexists(https_repo_publish_dir): _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir) util.remove_symlink(https_publish_dir, https_repo_publish_dir) # # Handle publish link for HTTP # http_publish_dir = self.get_http_publish_dir(config) http_repo_publish_dir = os.path.join(http_publish_dir, relpath).rstrip('/') if config.get("http"): # Publish for HTTP self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback) try: _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir)) util.create_symlink(repo.working_dir, http_repo_publish_dir) summary["http_publish_dir"] = http_repo_publish_dir self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback) except: self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback) else: self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback) if os.path.lexists(http_repo_publish_dir): _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir) util.remove_symlink(http_publish_dir, http_repo_publish_dir) summary["num_package_units_attempted"] = len(pkg_units) summary["num_package_units_published"] = len(pkg_units) - len(pkg_errors) summary["num_package_units_errors"] = len(pkg_errors) summary["num_distribution_units_attempted"] = len(distro_units) summary["num_distribution_units_published"] = len(distro_units) - len(distro_errors) summary["num_distribution_units_errors"] = len(distro_errors) summary["num_package_groups_published"] = len(existing_groups) summary["num_package_categories_published"] = len(existing_cats) summary["relative_path"] = relpath if metadata_status is False and not len(metadata_errors): summary["skip_metadata_update"] = True else: summary["skip_metadata_update"] = False details["errors"] = pkg_errors + distro_errors + metadata_errors details['time_metadata_sec'] = metadata_end_time - metadata_start_time # metadata generate skipped vs run _LOG.info("Publish complete: summary = <%s>, details = <%s>" % (summary, details)) if details["errors"]: return publish_conduit.build_failure_report(summary, details) return publish_conduit.build_success_report(summary, details)
def publish_group(self, repo_group, publish_conduit, config): """ see parent class for doc string """ self.group_working_dir = group_working_dir = repo_group.working_dir skip_types = config.get("skip") or [] self.init_group_progress() self.group_progress_status["group-id"] = repo_group.id # progress callback for group status def group_progress_callback(type_id, status): self.group_progress_status[type_id] = status publish_conduit.set_progress(self.group_progress_status) # loop through each repo in the group and perform exports for repoid in repo_group.repo_ids: _LOG.info("Exporting repo %s " % repoid) summary = {} details = {} progress_status = { "rpms": {"state": "NOT_STARTED"}, "errata": {"state": "NOT_STARTED"}, "distribution": {"state": "NOT_STARTED"}, "packagegroups": {"state": "NOT_STARTED"},} def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) repo_working_dir = "%s/%s" % (group_working_dir, repoid) repo_exporter = RepoExporter(repo_working_dir, skip=skip_types) # check if any datefilter is set on the distributor date_filter = repo_exporter.create_date_range_filter(config) _LOG.debug("repo working dir %s" % repo_working_dir) groups_xml_path = None updateinfo_xml_path = None if date_filter: # If a date range is specified, we only export the errata within that range # and associated rpm units. This might change once we have dates associated # to other units. criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA], unit_filters=date_filter) errata_units = publish_conduit.get_units(repoid, criteria=criteria) # we only include binary and source; drpms are not associated to errata criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM]) rpm_units = publish_conduit.get_units(repoid, criteria=criteria) rpm_units = repo_exporter.get_errata_rpms(errata_units, rpm_units) rpm_status, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback) if self.canceled: return publish_conduit.build_failure_report(summary, details) # export errata and generate updateinfo xml updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir) progress_status["errata"]["num_success"] = len(errata_units) progress_status["errata"]["state"] = "FINISHED" summary["num_package_units_attempted"] = len(rpm_units) summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors) summary["num_package_units_errors"] = len(rpm_errors) summary["num_errata_units_exported"] = len(errata_units) details["errors"] = rpm_errors else: # export rpm units(this includes binary, source and delta) criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM, TYPE_ID_DRPM]) rpm_units = publish_conduit.get_units(repoid, criteria) rpm_status, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback) summary["num_package_units_attempted"] = len(rpm_units) summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors) summary["num_package_units_errors"] = len(rpm_errors) # export package groups information and generate comps.xml groups_xml_path = None if "packagegroup" not in skip_types: progress_status["packagegroups"]["state"] = "STARTED" criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY]) existing_units = publish_conduit.get_units(repoid, criteria) existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units) existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units) groups_xml_path = comps_util.write_comps_xml(repo_working_dir, existing_groups, existing_cats) summary["num_package_groups_exported"] = len(existing_groups) summary["num_package_categories_exported"] = len(existing_cats) progress_status["packagegroups"]["state"] = "FINISHED" else: progress_status["packagegroups"]["state"] = "SKIPPED" _LOG.info("packagegroup unit type in skip list [%s]; skipping export" % skip_types) if self.canceled: return publish_conduit.build_failure_report(summary, details) # export errata units and associated rpms progress_status["errata"]["state"] = "STARTED" criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA]) errata_units = publish_conduit.get_units(repoid, criteria=criteria) progress_status["errata"]["state"] = "IN_PROGRESS" updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir) progress_status["errata"]["num_success"] = len(errata_units) progress_status["errata"]["state"] = "FINISHED" summary["num_errata_units_exported"] = len(errata_units) # export distributions criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_DISTRO]) distro_units = publish_conduit.get_units(repoid, criteria) distro_status, distro_errors = repo_exporter.export_distributions(distro_units, progress_callback=progress_callback) summary["num_distribution_units_attempted"] = len(distro_units) summary["num_distribution_units_exported"] = len(distro_units) - len(distro_errors) summary["num_distribution_units_errors"] = len(distro_errors) self.group_progress_status["repositories"][repoid] = progress_status self.set_progress("repositories", self.group_progress_status["repositories"], group_progress_callback) details["errors"] = rpm_errors + distro_errors self.group_summary[repoid] = summary self.group_details[repoid] = details # generate metadata for the exported repo repo_scratchpad = publish_conduit.get_repo_scratchpad(repoid) metadata_status, metadata_errors = metadata.generate_yum_metadata( repo_working_dir, rpm_units, config, progress_callback, is_cancelled=self.canceled, group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=repo_scratchpad) details["errors"] += metadata_errors # generate and publish isos self._publish_isos(repo_group, config, progress_callback=group_progress_callback) _LOG.info("Publish complete: summary = <%s>, details = <%s>" % (self.group_summary, self.group_details)) # remove exported content from working dirctory iso_util.cleanup_working_dir(self.group_working_dir) # check for any errors if not len([self.group_details[repoid]["errors"] for repoid in self.group_details.keys()]): return publish_conduit.build_failure_report(self.group_summary, self.group_details) return publish_conduit.build_success_report(self.group_summary, self.group_details)