Пример #1
0
    def test_yum_bug_worked_around(self, mock_open, mock_error):
        mock_open.return_value = StringIO()

        # The following will fail for yum version 3.4.3-111 without pulp's
        # workaround in place.
        updateinfo.updateinfo([self.unit], '/a/b/c')

        # Sadly, the only way we have to detect a failure is that it gets logged.
        self.assertEqual(mock_error.call_count, 0)

        # make sure some data was written
        self.assertTrue(mock_open.return_value.len > 0)
Пример #2
0
    def test_yum_bug_worked_around(self, mock_open, mock_error):
        mock_open.return_value = StringIO()

        # The following will fail for yum version 3.4.3-111 without pulp's
        # workaround in place.
        updateinfo.updateinfo([self.unit], '/a/b/c')

        # Sadly, the only way we have to detect a failure is that it gets logged.
        self.assertEqual(mock_error.call_count, 0)

        # make sure some data was written
        self.assertTrue(mock_open.return_value.len > 0)
Пример #3
0
    def export_errata(self, errata_units, progress_callback=None):
        """
         This call looksup each errata unit and its associated rpms and exports
         the rpms units to the working directory and generates updateinfo xml for
         exported errata metadata.

        @param errata_units
        @type errata_units list of AssociatedUnit

        @param progress_callback: callback to report progress info to publish_conduit
        @type  progress_callback: function

        @return tuple of status and list of error messages if any occurred
        @rtype ({}, [str])
        """
        summary = {}
        repo_working_dir = self.repo_working_dir
        errors = []
        errata_progress_status = self.init_progress()
        if not errata_units:
            errata_progress_status["state"] = "FINISHED"
            self.set_progress("errata", errata_progress_status, progress_callback)
            return summary, errors
        if 'errata' in self.skip:
            errata_progress_status["state"] = "SKIPPED"
            self.set_progress("errata", errata_progress_status, progress_callback)
            _LOG.info("errata unit type in skip list [%s]; skipping export" % self.skip)
            return summary, errors
        errata_progress_status["num_success"] = 0
        errata_progress_status["items_left"] = len(errata_units)
        errata_progress_status["items_total"] = len(errata_units)
        summary["num_errata_units_exported"] = 0
        try:
            errata_progress_status['state'] = "IN_PROGRESS"
            self.set_progress("errata", errata_progress_status, progress_callback)

            updateinfo_path = updateinfo.updateinfo(errata_units, repo_working_dir)
            if updateinfo_path:
                repodata_dir = os.path.join(repo_working_dir, "repodata")
                if not os.path.exists(repodata_dir):
                    _LOG.error("Missing repodata; cannot run modifyrepo")
                    return summary, errors
                _LOG.debug("Modifying repo for updateinfo")
                metadata.modify_repo(repodata_dir,  updateinfo_path)
            errata_progress_status["num_success"] = len(errata_units)
            errata_progress_status["items_left"] = 0
        except metadata.ModifyRepoError, mre:
            details["errors"] = errors
            msg = "Unable to run modifyrepo to include updateinfo at target location %s; Error: %s" % (repo_working_dir, str(mre))
            errors.append(msg)
            _LOG.error(msg)
            errata_progress_status['state'] = "FAILED"
            errata_progress_status["num_success"] = 0
            errata_progress_status["items_left"] = len(errata_units)
            return summary, errors
Пример #4
0
    def _export_errata(self, errata_units, repo_working_dir, progress_callback=None):
        """
         This call looksup each errata unit and its associated rpms and exports
         the rpms units to the working directory and generates updateinfo xml for
         exported errata metadata.

        @param errata_units
        @type errata_units list of AssociatedUnit

        @param repo_working_dir: path of where we want the symlink and repodata to reside
        @type repo_working_dir str

        @param progress_callback: callback to report progress info to publish_conduit
        @type  progress_callback: function

        @return tuple of status and list of error messages if any occurred
        @rtype (bool, [str])
        """
        errors = []
        errata_progress_status = self.init_progress()
        if not errata_units:
            return True, []
        errata_progress_status["num_success"] = 0
        errata_progress_status["items_left"] = len(errata_units)
        errata_progress_status["items_total"] = len(errata_units)
        try:
            errata_progress_status['state'] = "IN_PROGRESS"
            self.set_progress("errata", errata_progress_status, progress_callback)

            updateinfo_path = updateinfo.updateinfo(errata_units, repo_working_dir)
            if updateinfo_path:
                repodata_dir = os.path.join(repo_working_dir, "repodata")
                if not os.path.exists(repodata_dir):
                    _LOG.error("Missing repodata; cannot run modifyrepo")
                    return False, []
                _LOG.debug("Modifying repo for updateinfo")
                metadata.modify_repo(repodata_dir,  updateinfo_path)
            errata_progress_status["num_success"] = len(errata_units)
            errata_progress_status["items_left"] = 0
        except metadata.ModifyRepoError, mre:
            msg = "Unable to run modifyrepo to include updateinfo at target location %s; Error: %s" % (repo_working_dir, str(mre))
            errors.append(msg)
            _LOG.error(msg)
            errata_progress_status['state'] = "FAILED"
            errata_progress_status["num_success"] = 0
            errata_progress_status["items_left"] = len(errata_units)
            return False, errors
Пример #5
0
    def export_errata(self, errata_units, progress_callback=None):
        """
         This call looksup each errata unit and its associated rpms and exports
         the rpms units to the working directory and generates updateinfo xml for
         exported errata metadata.

        @param errata_units
        @type errata_units list of AssociatedUnit

        @param progress_callback: callback to report progress info to publish_conduit
        @type  progress_callback: function

        @return tuple of status and list of error messages if any occurred
        @rtype ({}, [str])
        """
        summary = {}
        repo_working_dir = self.repo_working_dir
        errors = []
        summary["updateinfo_xml_path"] = None
        errata_progress_status = self.init_progress()
        if not errata_units:
            errata_progress_status["state"] = "FINISHED"
            self.set_progress("errata", errata_progress_status, progress_callback)
            return summary, errors
        if 'errata' in self.skip:
            errata_progress_status["state"] = "SKIPPED"
            self.set_progress("errata", errata_progress_status, progress_callback)
            _LOG.info("errata unit type in skip list [%s]; skipping export" % self.skip)
            return summary, errors
        errata_progress_status["num_success"] = 0
        errata_progress_status["items_left"] = len(errata_units)
        errata_progress_status["items_total"] = len(errata_units)
        summary["num_errata_units_exported"] = 0
        try:
            errata_progress_status['state'] = "IN_PROGRESS"
            self.set_progress("errata", errata_progress_status, progress_callback)
            updateinfo_path = updateinfo.updateinfo(errata_units, repo_working_dir)
            summary["updateinfo_xml_path"] = updateinfo_path
            errata_progress_status["num_success"] = len(errata_units)
            errata_progress_status["items_left"] = 0
        except Exception, e:
            errors.append(str(e))
            errata_progress_status['state'] = "FAILED"
            errata_progress_status["num_success"] = 0
            errata_progress_status["items_left"] = len(errata_units)
            return summary, errors
Пример #6
0
def export_errata(working_dir, errata_units, progress_callback=None):
    """
    This call writes the given errata units to an updateinfo.xml file in the working directory.
    This does not export any packages associated with the errata.

    :param working_dir:         The full path to the directory to export the content to
    :type  working_dir:         str
    :param errata_units:        the errata units to find the rpm units for
    :type  errata_units:        list of pulp.plugins.model.AssociatedUnit
    :param progress_callback:   callback to report progress info to publish_conduit
    :type  progress_callback:   function

    :return: The updateinfo.xml file path
    :rtype:  str
    """
    progress_status = init_progress_report(len(errata_units))

    # If there are no errata units to export, quit.
    if not errata_units:
        progress_status[constants.PROGRESS_STATE_KEY] = constants.STATE_COMPLETE
        set_progress(ids.TYPE_ID_ERRATA, progress_status, progress_callback)
        return

    # Update the progress status
    progress_status[constants.PROGRESS_STATE_KEY] = constants.STATE_RUNNING
    set_progress(ids.TYPE_ID_ERRATA, progress_status, progress_callback)

    # Write the updateinfo.xml file to the working directory
    updateinfo_path = updateinfo.updateinfo(errata_units, working_dir)

    # Set the progress status, summary, and details
    progress_status[constants.PROGRESS_STATE_KEY] = constants.STATE_COMPLETE
    progress_status[constants.PROGRESS_NUM_SUCCESS_KEY] = len(errata_units)
    progress_status[constants.PROGRESS_ITEMS_LEFT_KEY] = 0
    set_progress(ids.TYPE_ID_ERRATA, progress_status, progress_callback)

    return updateinfo_path
Пример #7
0
    def publish_repo(self, repo, publish_conduit, config):
        summary = {}
        details = {}
        progress_status = {
            "packages":           {"state": "NOT_STARTED"},
            "distribution":       {"state": "NOT_STARTED"},
            "metadata":           {"state": "NOT_STARTED"},
            "packagegroups":      {"state": "NOT_STARTED"},
            "publish_http":       {"state": "NOT_STARTED"},
            "publish_https":      {"state": "NOT_STARTED"},
            }

        def progress_callback(type_id, status):
            progress_status[type_id] = status
            publish_conduit.set_progress(progress_status)

        self.repo_working_dir = repo.working_dir

        if self.canceled:
            return publish_conduit.build_cancel_report(summary, details)
        skip_list = config.get('skip') or []
        # Determine Content in this repo
        pkg_units = []
        pkg_errors = []
        if 'rpm' not in skip_list:
            for type_id in [TYPE_ID_RPM, TYPE_ID_SRPM]:
                criteria = UnitAssociationCriteria(type_ids=type_id,
                    unit_fields=['id', 'name', 'version', 'release', 'arch', 'epoch', '_storage_path', "checksum", "checksumtype" ])
                pkg_units += publish_conduit.get_units(criteria=criteria)
            drpm_units = []
            if 'drpm' not in skip_list:
                criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM)
                drpm_units = publish_conduit.get_units(criteria=criteria)
            pkg_units += drpm_units
            # Create symlinks under repo.working_dir
            pkg_status, pkg_errors = self.handle_symlinks(pkg_units, repo.working_dir, progress_callback)
            if not pkg_status:
                _LOG.error("Unable to publish %s items" % (len(pkg_errors)))

        distro_errors = []
        distro_units =  []
        if 'distribution' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DISTRO)
            distro_units = publish_conduit.get_units(criteria=criteria)
            # symlink distribution files if any under repo.working_dir
            distro_status, distro_errors = self.symlink_distribution_unit_files(distro_units, repo.working_dir, publish_conduit, progress_callback)
            if not distro_status:
                _LOG.error("Unable to publish distribution tree %s items" % (len(distro_errors)))

        updateinfo_xml_path = None
        if 'erratum' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=TYPE_ID_ERRATA)
            errata_units = publish_conduit.get_units(criteria=criteria)
            updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo.working_dir)

        if self.canceled:
            return publish_conduit.build_cancel_report(summary, details)
        groups_xml_path = None
        existing_cats = []
        existing_groups = []
        if 'packagegroup' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
            existing_units = publish_conduit.get_units(criteria)
            existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
            existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
            groups_xml_path = comps_util.write_comps_xml(repo.working_dir, existing_groups, existing_cats)
        metadata_start_time = time.time()
        # update/generate metadata for the published repo
        self.use_createrepo = config.get('use_createrepo')
        if self.use_createrepo:
            metadata_status, metadata_errors = metadata.generate_metadata(
                repo.working_dir, publish_conduit, config, progress_callback, groups_xml_path)
        else:
            metadata_status, metadata_errors = metadata.generate_yum_metadata(repo.id, repo.working_dir, publish_conduit, config,
                progress_callback, is_cancelled=self.canceled, group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path,
                repo_scratchpad=publish_conduit.get_repo_scratchpad())

        metadata_end_time = time.time()
        relpath = self.get_repo_relative_path(repo, config)
        if relpath.startswith("/"):
            relpath = relpath[1:]

        # Build the https and http publishing paths
        https_publish_dir = self.get_https_publish_dir(config)
        https_repo_publish_dir = os.path.join(https_publish_dir, relpath).rstrip('/')
        http_publish_dir = self.get_http_publish_dir(config)
        http_repo_publish_dir = os.path.join(http_publish_dir, relpath).rstrip('/')

        # Clean up the old publish directories, if they exist.
        scratchpad = publish_conduit.get_repo_scratchpad()
        if OLD_REL_PATH_KEYWORD in scratchpad:
            old_relative_path = scratchpad[OLD_REL_PATH_KEYWORD]
            old_https_repo_publish_dir = os.path.join(https_publish_dir, old_relative_path)
            if os.path.exists(old_https_repo_publish_dir):
                util.remove_repo_publish_dir(https_publish_dir, old_https_repo_publish_dir)
            old_http_repo_publish_dir = os.path.join(http_publish_dir, old_relative_path)
            if os.path.exists(old_http_repo_publish_dir):
                util.remove_repo_publish_dir(http_publish_dir, old_http_repo_publish_dir)

        # Now write the current publish relative path to the scratch pad. This way, if the relative path
        # changes before the next publish, we can clean up the old path.
        scratchpad[OLD_REL_PATH_KEYWORD] = relpath
        publish_conduit.set_repo_scratchpad(scratchpad)

        # Handle publish link for HTTPS
        if config.get("https"):
            # Publish for HTTPS
            self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir))
                util.create_symlink(repo.working_dir, https_repo_publish_dir)
                util.generate_listing_files(https_publish_dir, https_repo_publish_dir)
                summary["https_publish_dir"] = https_repo_publish_dir
                self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(https_repo_publish_dir):
                _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir)
                util.remove_repo_publish_dir(https_publish_dir, https_repo_publish_dir)

        # Handle publish link for HTTP
        if config.get("http"):
            # Publish for HTTP
            self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir))
                util.create_symlink(repo.working_dir, http_repo_publish_dir)
                util.generate_listing_files(http_publish_dir, http_repo_publish_dir)
                summary["http_publish_dir"] = http_repo_publish_dir
                self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(http_repo_publish_dir):
                _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir)
                util.remove_repo_publish_dir(http_publish_dir, http_repo_publish_dir)

        summary["num_package_units_attempted"] = len(pkg_units)
        summary["num_package_units_published"] = len(pkg_units) - len(pkg_errors)
        summary["num_package_units_errors"] = len(pkg_errors)
        summary["num_distribution_units_attempted"] = len(distro_units)
        summary["num_distribution_units_published"] = len(distro_units) - len(distro_errors)
        summary["num_distribution_units_errors"] = len(distro_errors)
        summary["num_package_groups_published"] = len(existing_groups)
        summary["num_package_categories_published"] = len(existing_cats)
        summary["relative_path"] = relpath
        if metadata_status is False and not len(metadata_errors):
            summary["skip_metadata_update"] = True
        else:
            summary["skip_metadata_update"] = False
        details["errors"] = pkg_errors + distro_errors # metadata_errors
        details['time_metadata_sec'] = metadata_end_time - metadata_start_time
        # metadata generate skipped vs run
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (summary, details))
        if details["errors"]:
            return publish_conduit.build_failure_report(summary, details)
        return publish_conduit.build_success_report(summary, details)
Пример #8
0
    def publish_repo(self, repo, publish_conduit, config):
        publish_start_time = time.time()
        _LOG.info("Start publish time %s" % publish_start_time)
        progress_status = {
            "rpms":               {"state": "NOT_STARTED"},
            "errata":             {"state": "NOT_STARTED"},
            "distribution":       {"state": "NOT_STARTED"},
            "metadata":           {"state": "NOT_STARTED"},
            "packagegroups":      {"state": "NOT_STARTED"},
            "isos":               {"state": "NOT_STARTED"},
            "publish_http":       {"state": "NOT_STARTED"},
            "publish_https":      {"state": "NOT_STARTED"},
            }

        def progress_callback(type_id, status):
            progress_status[type_id] = status
            publish_conduit.set_progress(progress_status)

        self.repo_working_dir = repo_working_dir = repo.working_dir

        if self.cancelled:
            return publish_conduit.build_failure_report(self.summary, self.details)

        skip_types = config.get("skip") or []
        repo_exporter = RepoExporter(repo_working_dir, skip=skip_types)
        date_filter = repo_exporter.create_date_range_filter(config)
        groups_xml_path = None
        updateinfo_xml_path = None
        if date_filter:
            # export errata by date and associated rpm units
            progress_status["errata"]["state"] = "STARTED"
            criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA], unit_filters=date_filter)
            errata_units = publish_conduit.get_units(criteria=criteria)
            rpm_units = self.__get_rpm_units(publish_conduit)
            drpm_criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM)
            rpm_units += publish_conduit.get_units(criteria=drpm_criteria)
            rpm_units = repo_exporter.get_errata_rpms(errata_units, rpm_units)
            rpm_summary, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback)
            if self.cancelled:
                return publish_conduit.build_failure_report(self.summary, self.details)
            updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir)
            progress_status["errata"]["num_success"] = len(errata_units)
            progress_status["errata"]["state"] = "FINISHED"
            self.summary = dict(self.summary.items() + rpm_summary.items())
            self.summary["num_errata_units_exported"] = len(errata_units)
            self.details["errors"] = rpm_errors
        else:
            # export everything
            # export rpms
            rpm_units = self.__get_rpm_units(publish_conduit)
            drpm_criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM)
            rpm_units += publish_conduit.get_units(criteria=drpm_criteria)
            rpm_summary, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback)
            # export package groups
            groups_xml_path = None
            if "packagegroup" not in skip_types:
                progress_status["packagegroups"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
                existing_units = publish_conduit.get_units(criteria=criteria)
                existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
                existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
                groups_xml_path = comps_util.write_comps_xml(repo_working_dir, existing_groups, existing_cats)
                self.summary["num_package_groups_exported"] = len(existing_groups)
                self.summary["num_package_categories_exported"] = len(existing_cats)
                progress_status["packagegroups"]["state"] = "FINISHED"
            else:
                progress_status["packagegroups"]["state"] = "SKIPPED"
                _LOG.info("packagegroup unit type in skip list [%s]; skipping export" % skip_types)

            if self.cancelled:
                return publish_conduit.build_failure_report(self.summary, self.details)

            # export errata
            updateinfo_xml_path = None
            if 'erratum' not in skip_types:
                progress_status["errata"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA])
                errata_units = publish_conduit.get_units(criteria=criteria)
                progress_status["errata"]["state"] = "IN_PROGRESS"
                updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir)
                progress_status["errata"]["num_success"] = len(errata_units)
                progress_status["errata"]["state"] = "FINISHED"
                self.summary["num_errata_units_exported"] = len(errata_units)
            else:
                #errata_summary, errata_errors = repo_exporter.export_errata(errata_units, progress_callback=progress_callback)
                progress_status["errata"]["state"] = "SKIPPED"
                _LOG.info("errata unit type in skip list [%s]; skipping export" % skip_types)
            # distro units
            criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_DISTRO])
            distro_units = publish_conduit.get_units(criteria=criteria)
            distro_summary, distro_errors = repo_exporter.export_distributions(distro_units, progress_callback=progress_callback)
            # sum up summary and details
            self.details["errors"] = rpm_errors + distro_errors
            self.summary = dict(self.summary.items() + rpm_summary.items() + distro_summary.items())
        # generate metadata
        metadata_status, metadata_errors = metadata.generate_yum_metadata(
            repo_working_dir, publish_conduit, config, progress_callback, is_cancelled=self.cancelled,
            group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=publish_conduit.get_repo_scratchpad())
        _LOG.info("metadata generation complete at target location %s" % repo_working_dir)
        self.details["errors"] += metadata_errors
        # build iso and publish via HTTPS
        self._publish_isos(repo, config, progress_callback=progress_callback)
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (self.summary, self.details))
        # remove exported content from working dirctory
        iso_util.cleanup_working_dir(self.repo_working_dir)
        if self.details["errors"]:
            return publish_conduit.build_failure_report(self.summary, self.details)
        return publish_conduit.build_success_report(self.summary, self.details)
Пример #9
0
    def publish_repo(self, repo, publish_conduit, config):
        summary = {}
        details = {}
        progress_status = {
            "packages":           {"state": "NOT_STARTED"},
            "distribution":       {"state": "NOT_STARTED"},
            "metadata":           {"state": "NOT_STARTED"},
            "packagegroups":      {"state": "NOT_STARTED"},
            "publish_http":       {"state": "NOT_STARTED"},
            "publish_https":      {"state": "NOT_STARTED"},
            }

        def progress_callback(type_id, status):
            progress_status[type_id] = status
            publish_conduit.set_progress(progress_status)

        self.repo_working_dir = repo.working_dir

        if self.canceled:
            return publish_conduit.build_failure_report(summary, details)
        skip_list = config.get('skip') or []
        # Determine Content in this repo
        unfiltered_units = publish_conduit.get_units()
        # filter compatible units
        rpm_units = filter(lambda u : u.type_id in [TYPE_ID_RPM, TYPE_ID_SRPM], unfiltered_units)
        drpm_units = filter(lambda u : u.type_id == TYPE_ID_DRPM, unfiltered_units)
        rpm_errors = []
        if 'rpm' not in skip_list:
            _LOG.debug("Publish on %s invoked. %s existing units, %s of which are supported to be published." \
                    % (repo.id, len(unfiltered_units), len(rpm_units)))
            # Create symlinks under repo.working_dir
            rpm_status, rpm_errors = self.handle_symlinks(rpm_units, repo.working_dir, progress_callback)
            if not rpm_status:
                _LOG.error("Unable to publish %s items" % (len(rpm_errors)))
        drpm_errors = []
        if 'drpm' not in skip_list:
            _LOG.debug("Publish on %s invoked. %s existing units, %s of which are supported to be published." \
                    % (repo.id, len(unfiltered_units), len(drpm_units)))
            # Create symlinks under repo.working_dir
            drpm_status, drpm_errors = self.handle_symlinks(drpm_units, repo.working_dir, progress_callback)
            if not drpm_status:
                _LOG.error("Unable to publish %s items" % (len(drpm_errors)))
        pkg_errors = rpm_errors + drpm_errors
        pkg_units = rpm_units +  drpm_units
        distro_errors = []
        distro_units = filter(lambda u: u.type_id == TYPE_ID_DISTRO, unfiltered_units)
        if 'distribution' not in skip_list:
            # symlink distribution files if any under repo.working_dir
            distro_status, distro_errors = self.symlink_distribution_unit_files(distro_units, repo.working_dir, progress_callback)
            if not distro_status:
                _LOG.error("Unable to publish distribution tree %s items" % (len(distro_errors)))

        updateinfo_xml_path = None
        if 'erratum' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=TYPE_ID_ERRATA)
            errata_units = publish_conduit.get_units(criteria=criteria)
            updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo.working_dir)

        if self.canceled:
            return publish_conduit.build_failure_report(summary, details)
        groups_xml_path = None
        existing_cats = []
        existing_groups = []
        if 'packagegroup' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
            existing_units = publish_conduit.get_units(criteria)
            existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
            existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
            groups_xml_path = comps_util.write_comps_xml(repo.working_dir, existing_groups, existing_cats)
        metadata_start_time = time.time()
        # update/generate metadata for the published repo
        self.use_createrepo = config.get('use_createrepo')
        if self.use_createrepo:
            metadata_status, metadata_errors = metadata.generate_metadata(
                repo.working_dir, publish_conduit, config, progress_callback, groups_xml_path)
        else:
            # default to per package metadata
            metadata_status, metadata_errors = metadata.generate_yum_metadata(repo.working_dir, rpm_units,
                config, progress_callback, is_cancelled=self.canceled, group_xml_path=groups_xml_path,
                updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=publish_conduit.get_repo_scratchpad())
        metadata_end_time = time.time()
        relpath = self.get_repo_relative_path(repo, config)
        if relpath.startswith("/"):
            relpath = relpath[1:]
        #
        # Handle publish link for HTTPS
        #
        https_publish_dir = self.get_https_publish_dir(config)
        https_repo_publish_dir = os.path.join(https_publish_dir, relpath).rstrip('/')
        if config.get("https"):
            # Publish for HTTPS
            self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir))
                util.create_symlink(repo.working_dir, https_repo_publish_dir)
                summary["https_publish_dir"] = https_repo_publish_dir
                self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(https_repo_publish_dir):
                _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir)
                util.remove_symlink(https_publish_dir, https_repo_publish_dir)
        #
        # Handle publish link for HTTP
        #
        http_publish_dir = self.get_http_publish_dir(config)
        http_repo_publish_dir = os.path.join(http_publish_dir, relpath).rstrip('/')
        if config.get("http"):
            # Publish for HTTP
            self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir))
                util.create_symlink(repo.working_dir, http_repo_publish_dir)
                summary["http_publish_dir"] = http_repo_publish_dir
                self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(http_repo_publish_dir):
                _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir)
                util.remove_symlink(http_publish_dir, http_repo_publish_dir)

        summary["num_package_units_attempted"] = len(pkg_units)
        summary["num_package_units_published"] = len(pkg_units) - len(pkg_errors)
        summary["num_package_units_errors"] = len(pkg_errors)
        summary["num_distribution_units_attempted"] = len(distro_units)
        summary["num_distribution_units_published"] = len(distro_units) - len(distro_errors)
        summary["num_distribution_units_errors"] = len(distro_errors)
        summary["num_package_groups_published"] = len(existing_groups)
        summary["num_package_categories_published"] = len(existing_cats)
        summary["relative_path"] = relpath
        if metadata_status is False and not len(metadata_errors):
            summary["skip_metadata_update"] = True
        else:
            summary["skip_metadata_update"] = False
        details["errors"] = pkg_errors + distro_errors + metadata_errors
        details['time_metadata_sec'] = metadata_end_time - metadata_start_time
        # metadata generate skipped vs run
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (summary, details))
        if details["errors"]:
            return publish_conduit.build_failure_report(summary, details)
        return publish_conduit.build_success_report(summary, details)
Пример #10
0
    def publish_group(self, repo_group, publish_conduit, config):
        """
        see parent class for doc string
        """
        self.group_working_dir = group_working_dir = repo_group.working_dir
        skip_types = config.get("skip") or []
        self.init_group_progress()
        self.group_progress_status["group-id"] = repo_group.id

        # progress callback for group status
        def group_progress_callback(type_id, status):
            self.group_progress_status[type_id] = status
            publish_conduit.set_progress(self.group_progress_status)

        # loop through each repo in the group and perform exports
        for repoid in repo_group.repo_ids:
            _LOG.info("Exporting repo %s " % repoid)
            summary = {}
            details = {}
            progress_status = {
                                    "rpms":               {"state": "NOT_STARTED"},
                                    "errata":             {"state": "NOT_STARTED"},
                                    "distribution":       {"state": "NOT_STARTED"},
                                    "packagegroups":      {"state": "NOT_STARTED"},}
            def progress_callback(type_id, status):
                progress_status[type_id] = status
                publish_conduit.set_progress(progress_status)
            repo_working_dir = "%s/%s" % (group_working_dir, repoid)
            repo_exporter = RepoExporter(repo_working_dir, skip=skip_types)
            # check if any datefilter is set on the distributor
            date_filter = repo_exporter.create_date_range_filter(config)
            _LOG.debug("repo working dir %s" % repo_working_dir)
            groups_xml_path = None
            updateinfo_xml_path = None
            if date_filter:
                # If a date range is specified, we only export the errata within that range
                # and associated rpm units. This might change once we have dates associated
                # to other units.
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA], unit_filters=date_filter)
                errata_units = publish_conduit.get_units(repoid, criteria=criteria)
                # we only include binary and source; drpms are not associated to errata
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM])
                rpm_units = publish_conduit.get_units(repoid, criteria=criteria)
                rpm_units = repo_exporter.get_errata_rpms(errata_units, rpm_units)
                rpm_status, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback)
                if self.canceled:
                    return publish_conduit.build_failure_report(summary, details)
                # export errata and generate updateinfo xml
                updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir)
                progress_status["errata"]["num_success"] = len(errata_units)
                progress_status["errata"]["state"] = "FINISHED"
                summary["num_package_units_attempted"] = len(rpm_units)
                summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors)
                summary["num_package_units_errors"] = len(rpm_errors)
                summary["num_errata_units_exported"] = len(errata_units)
                details["errors"] = rpm_errors
            else:

                # export rpm units(this includes binary, source and delta)
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM, TYPE_ID_DRPM])
                rpm_units = publish_conduit.get_units(repoid, criteria)
                rpm_status, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback)
                summary["num_package_units_attempted"] = len(rpm_units)
                summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors)
                summary["num_package_units_errors"] = len(rpm_errors)

                # export package groups information and generate comps.xml
                groups_xml_path = None
                if "packagegroup" not in skip_types:
                    progress_status["packagegroups"]["state"] = "STARTED"
                    criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
                    existing_units = publish_conduit.get_units(repoid, criteria)
                    existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
                    existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
                    groups_xml_path = comps_util.write_comps_xml(repo_working_dir, existing_groups, existing_cats)
                    summary["num_package_groups_exported"] = len(existing_groups)
                    summary["num_package_categories_exported"] = len(existing_cats)
                    progress_status["packagegroups"]["state"] = "FINISHED"
                else:
                    progress_status["packagegroups"]["state"] = "SKIPPED"
                    _LOG.info("packagegroup unit type in skip list [%s]; skipping export" % skip_types)

                if self.canceled:
                    return publish_conduit.build_failure_report(summary, details)

                # export errata units and associated rpms
                progress_status["errata"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA])
                errata_units = publish_conduit.get_units(repoid, criteria=criteria)
                progress_status["errata"]["state"] = "IN_PROGRESS"
                updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir)
                progress_status["errata"]["num_success"] = len(errata_units)
                progress_status["errata"]["state"] = "FINISHED"
                summary["num_errata_units_exported"] = len(errata_units)

                # export distributions
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_DISTRO])
                distro_units = publish_conduit.get_units(repoid, criteria)
                distro_status, distro_errors = repo_exporter.export_distributions(distro_units, progress_callback=progress_callback)
                summary["num_distribution_units_attempted"] = len(distro_units)
                summary["num_distribution_units_exported"] = len(distro_units) - len(distro_errors)
                summary["num_distribution_units_errors"] = len(distro_errors)

                self.group_progress_status["repositories"][repoid] = progress_status
                self.set_progress("repositories", self.group_progress_status["repositories"], group_progress_callback)

                details["errors"] = rpm_errors + distro_errors
                self.group_summary[repoid] = summary
                self.group_details[repoid] = details

            # generate metadata for the exported repo
            repo_scratchpad = publish_conduit.get_repo_scratchpad(repoid)
            metadata_status, metadata_errors = metadata.generate_yum_metadata(
                repo_working_dir, rpm_units, config, progress_callback, is_cancelled=self.canceled,
                group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=repo_scratchpad)
            details["errors"] += metadata_errors
        # generate and publish isos
        self._publish_isos(repo_group, config, progress_callback=group_progress_callback)
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (self.group_summary, self.group_details))

        # remove exported content from working dirctory
        iso_util.cleanup_working_dir(self.group_working_dir)

        # check for any errors
        if not len([self.group_details[repoid]["errors"] for repoid in self.group_details.keys()]):
            return publish_conduit.build_failure_report(self.group_summary, self.group_details)

        return publish_conduit.build_success_report(self.group_summary, self.group_details)