Пример #1
0
def export_package_groups_and_cats(working_dir, units, progress_callback=None):
    """
    Exports the the given package groups and package categories to the given working directory.
    Because both package groups and categories are needed to write the groups xml file, they
    are both exported here.

    :param working_dir:         The full path to the directory to export the content to
    :type  working_dir:         str
    :param units:               The package groups and package categories to export.
    :type  units:               list of AssociatedUnit
    :param progress_callback:   the progress callback function
    :type  progress_callback:   function

    :return: a tuple consisting of the groups_xml_path and the summary, in that order
    :rtype:  (str, dict)
    """
    set_progress(models.PackageGroup.TYPE, {constants.PROGRESS_STATE_KEY: constants.STATE_RUNNING}, progress_callback)
    set_progress(
        models.PackageCategory.TYPE, {constants.PROGRESS_STATE_KEY: constants.STATE_RUNNING}, progress_callback
    )
    summary = {}

    # Collect the existing groups and categories
    existing_groups = filter(lambda u: u.type_id == models.PackageGroup.TYPE, units)
    existing_cats = filter(lambda u: u.type_id == models.PackageCategory.TYPE, units)
    groups_xml_path = comps_util.write_comps_xml(working_dir, existing_groups, existing_cats)
    summary["num_package_groups_exported"] = len(existing_groups)
    summary["num_package_categories_exported"] = len(existing_cats)

    set_progress(models.PackageGroup.TYPE, {constants.PROGRESS_STATE_KEY: constants.STATE_COMPLETE}, progress_callback)
    set_progress(
        models.PackageCategory.TYPE, {constants.PROGRESS_STATE_KEY: constants.STATE_COMPLETE}, progress_callback
    )

    return groups_xml_path, summary
Пример #2
0
 def test_write_comps_with_centos6_comps_xml(self):
         repo = mock.Mock(spec=Repository)
         repo.id = "test_write_comps_with_i18n_data"
         repo.working_dir = self.working_dir
         sync_conduit = importer_mocks.get_sync_conduit()
         repo_src_dir = os.path.join(self.data_dir, "test_comps_import_with_dots_in_pkg_names")
         # Simulate a sync with CentOS 6 comps.xml data
         # The test data contains issues such as:
         #  1) conditional_package_names that contain a '.' in the key name
         #     InvalidDocument: key 'openoffice.org-langpack-en' must not contain '.'
         #  2) unicode strings which are not being encoded correctly during write
         #     UnicodeEncodeError: 'ascii' codec can't encode characters in position 334-341: ordinal not in range(128)
         avail_groups, avail_cats = comps.get_available(repo_src_dir)
         groups, group_units = comps.get_new_group_units(avail_groups, {}, sync_conduit, repo)
         cats, cat_units = comps.get_new_category_units(avail_cats, {}, sync_conduit, repo)
         yum_distributor = YumDistributor()
         comps_xml_out_path = comps_util.write_comps_xml(repo, group_units.values(), cat_units.values())
         self.assertEqual(comps_xml_out_path, os.path.join(repo.working_dir, "comps.xml"))
         yc = yum.comps.Comps()
         yc.add(comps_xml_out_path)
         self.assertTrue(len(group_units), len(yc.groups))
         self.assertTrue(len(cat_units), len(yc.categories))
Пример #3
0
    def publish_repo(self, repo, publish_conduit, config):
        summary = {}
        details = {}
        progress_status = {
            "packages":           {"state": "NOT_STARTED"},
            "distribution":       {"state": "NOT_STARTED"},
            "metadata":           {"state": "NOT_STARTED"},
            "packagegroups":      {"state": "NOT_STARTED"},
            "publish_http":       {"state": "NOT_STARTED"},
            "publish_https":      {"state": "NOT_STARTED"},
            }

        def progress_callback(type_id, status):
            progress_status[type_id] = status
            publish_conduit.set_progress(progress_status)

        self.repo_working_dir = repo.working_dir

        if self.canceled:
            return publish_conduit.build_cancel_report(summary, details)
        skip_list = config.get('skip') or []
        # Determine Content in this repo
        pkg_units = []
        pkg_errors = []
        if 'rpm' not in skip_list:
            for type_id in [TYPE_ID_RPM, TYPE_ID_SRPM]:
                criteria = UnitAssociationCriteria(type_ids=type_id,
                    unit_fields=['id', 'name', 'version', 'release', 'arch', 'epoch', '_storage_path', "checksum", "checksumtype" ])
                pkg_units += publish_conduit.get_units(criteria=criteria)
            drpm_units = []
            if 'drpm' not in skip_list:
                criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM)
                drpm_units = publish_conduit.get_units(criteria=criteria)
            pkg_units += drpm_units
            # Create symlinks under repo.working_dir
            pkg_status, pkg_errors = self.handle_symlinks(pkg_units, repo.working_dir, progress_callback)
            if not pkg_status:
                _LOG.error("Unable to publish %s items" % (len(pkg_errors)))

        distro_errors = []
        distro_units =  []
        if 'distribution' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DISTRO)
            distro_units = publish_conduit.get_units(criteria=criteria)
            # symlink distribution files if any under repo.working_dir
            distro_status, distro_errors = self.symlink_distribution_unit_files(distro_units, repo.working_dir, publish_conduit, progress_callback)
            if not distro_status:
                _LOG.error("Unable to publish distribution tree %s items" % (len(distro_errors)))

        updateinfo_xml_path = None
        if 'erratum' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=TYPE_ID_ERRATA)
            errata_units = publish_conduit.get_units(criteria=criteria)
            updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo.working_dir)

        if self.canceled:
            return publish_conduit.build_cancel_report(summary, details)
        groups_xml_path = None
        existing_cats = []
        existing_groups = []
        if 'packagegroup' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
            existing_units = publish_conduit.get_units(criteria)
            existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
            existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
            groups_xml_path = comps_util.write_comps_xml(repo.working_dir, existing_groups, existing_cats)
        metadata_start_time = time.time()
        # update/generate metadata for the published repo
        self.use_createrepo = config.get('use_createrepo')
        if self.use_createrepo:
            metadata_status, metadata_errors = metadata.generate_metadata(
                repo.working_dir, publish_conduit, config, progress_callback, groups_xml_path)
        else:
            metadata_status, metadata_errors = metadata.generate_yum_metadata(repo.id, repo.working_dir, publish_conduit, config,
                progress_callback, is_cancelled=self.canceled, group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path,
                repo_scratchpad=publish_conduit.get_repo_scratchpad())

        metadata_end_time = time.time()
        relpath = self.get_repo_relative_path(repo, config)
        if relpath.startswith("/"):
            relpath = relpath[1:]

        # Build the https and http publishing paths
        https_publish_dir = self.get_https_publish_dir(config)
        https_repo_publish_dir = os.path.join(https_publish_dir, relpath).rstrip('/')
        http_publish_dir = self.get_http_publish_dir(config)
        http_repo_publish_dir = os.path.join(http_publish_dir, relpath).rstrip('/')

        # Clean up the old publish directories, if they exist.
        scratchpad = publish_conduit.get_repo_scratchpad()
        if OLD_REL_PATH_KEYWORD in scratchpad:
            old_relative_path = scratchpad[OLD_REL_PATH_KEYWORD]
            old_https_repo_publish_dir = os.path.join(https_publish_dir, old_relative_path)
            if os.path.exists(old_https_repo_publish_dir):
                util.remove_repo_publish_dir(https_publish_dir, old_https_repo_publish_dir)
            old_http_repo_publish_dir = os.path.join(http_publish_dir, old_relative_path)
            if os.path.exists(old_http_repo_publish_dir):
                util.remove_repo_publish_dir(http_publish_dir, old_http_repo_publish_dir)

        # Now write the current publish relative path to the scratch pad. This way, if the relative path
        # changes before the next publish, we can clean up the old path.
        scratchpad[OLD_REL_PATH_KEYWORD] = relpath
        publish_conduit.set_repo_scratchpad(scratchpad)

        # Handle publish link for HTTPS
        if config.get("https"):
            # Publish for HTTPS
            self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir))
                util.create_symlink(repo.working_dir, https_repo_publish_dir)
                util.generate_listing_files(https_publish_dir, https_repo_publish_dir)
                summary["https_publish_dir"] = https_repo_publish_dir
                self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(https_repo_publish_dir):
                _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir)
                util.remove_repo_publish_dir(https_publish_dir, https_repo_publish_dir)

        # Handle publish link for HTTP
        if config.get("http"):
            # Publish for HTTP
            self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir))
                util.create_symlink(repo.working_dir, http_repo_publish_dir)
                util.generate_listing_files(http_publish_dir, http_repo_publish_dir)
                summary["http_publish_dir"] = http_repo_publish_dir
                self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(http_repo_publish_dir):
                _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir)
                util.remove_repo_publish_dir(http_publish_dir, http_repo_publish_dir)

        summary["num_package_units_attempted"] = len(pkg_units)
        summary["num_package_units_published"] = len(pkg_units) - len(pkg_errors)
        summary["num_package_units_errors"] = len(pkg_errors)
        summary["num_distribution_units_attempted"] = len(distro_units)
        summary["num_distribution_units_published"] = len(distro_units) - len(distro_errors)
        summary["num_distribution_units_errors"] = len(distro_errors)
        summary["num_package_groups_published"] = len(existing_groups)
        summary["num_package_categories_published"] = len(existing_cats)
        summary["relative_path"] = relpath
        if metadata_status is False and not len(metadata_errors):
            summary["skip_metadata_update"] = True
        else:
            summary["skip_metadata_update"] = False
        details["errors"] = pkg_errors + distro_errors # metadata_errors
        details['time_metadata_sec'] = metadata_end_time - metadata_start_time
        # metadata generate skipped vs run
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (summary, details))
        if details["errors"]:
            return publish_conduit.build_failure_report(summary, details)
        return publish_conduit.build_success_report(summary, details)
Пример #4
0
    def publish_repo(self, repo, publish_conduit, config):
        publish_start_time = time.time()
        _LOG.info("Start publish time %s" % publish_start_time)
        progress_status = {
            "rpms":               {"state": "NOT_STARTED"},
            "errata":             {"state": "NOT_STARTED"},
            "distribution":       {"state": "NOT_STARTED"},
            "metadata":           {"state": "NOT_STARTED"},
            "packagegroups":      {"state": "NOT_STARTED"},
            "isos":               {"state": "NOT_STARTED"},
            "publish_http":       {"state": "NOT_STARTED"},
            "publish_https":      {"state": "NOT_STARTED"},
            }

        def progress_callback(type_id, status):
            progress_status[type_id] = status
            publish_conduit.set_progress(progress_status)

        self.repo_working_dir = repo_working_dir = repo.working_dir

        if self.cancelled:
            return publish_conduit.build_failure_report(self.summary, self.details)

        skip_types = config.get("skip") or []
        repo_exporter = RepoExporter(repo_working_dir, skip=skip_types)
        date_filter = repo_exporter.create_date_range_filter(config)
        groups_xml_path = None
        updateinfo_xml_path = None
        if date_filter:
            # export errata by date and associated rpm units
            progress_status["errata"]["state"] = "STARTED"
            criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA], unit_filters=date_filter)
            errata_units = publish_conduit.get_units(criteria=criteria)
            rpm_units = self.__get_rpm_units(publish_conduit)
            drpm_criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM)
            rpm_units += publish_conduit.get_units(criteria=drpm_criteria)
            rpm_units = repo_exporter.get_errata_rpms(errata_units, rpm_units)
            rpm_summary, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback)
            if self.cancelled:
                return publish_conduit.build_failure_report(self.summary, self.details)
            updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir)
            progress_status["errata"]["num_success"] = len(errata_units)
            progress_status["errata"]["state"] = "FINISHED"
            self.summary = dict(self.summary.items() + rpm_summary.items())
            self.summary["num_errata_units_exported"] = len(errata_units)
            self.details["errors"] = rpm_errors
        else:
            # export everything
            # export rpms
            rpm_units = self.__get_rpm_units(publish_conduit)
            drpm_criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM)
            rpm_units += publish_conduit.get_units(criteria=drpm_criteria)
            rpm_summary, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback)
            # export package groups
            groups_xml_path = None
            if "packagegroup" not in skip_types:
                progress_status["packagegroups"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
                existing_units = publish_conduit.get_units(criteria=criteria)
                existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
                existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
                groups_xml_path = comps_util.write_comps_xml(repo_working_dir, existing_groups, existing_cats)
                self.summary["num_package_groups_exported"] = len(existing_groups)
                self.summary["num_package_categories_exported"] = len(existing_cats)
                progress_status["packagegroups"]["state"] = "FINISHED"
            else:
                progress_status["packagegroups"]["state"] = "SKIPPED"
                _LOG.info("packagegroup unit type in skip list [%s]; skipping export" % skip_types)

            if self.cancelled:
                return publish_conduit.build_failure_report(self.summary, self.details)

            # export errata
            updateinfo_xml_path = None
            if 'erratum' not in skip_types:
                progress_status["errata"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA])
                errata_units = publish_conduit.get_units(criteria=criteria)
                progress_status["errata"]["state"] = "IN_PROGRESS"
                updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir)
                progress_status["errata"]["num_success"] = len(errata_units)
                progress_status["errata"]["state"] = "FINISHED"
                self.summary["num_errata_units_exported"] = len(errata_units)
            else:
                #errata_summary, errata_errors = repo_exporter.export_errata(errata_units, progress_callback=progress_callback)
                progress_status["errata"]["state"] = "SKIPPED"
                _LOG.info("errata unit type in skip list [%s]; skipping export" % skip_types)
            # distro units
            criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_DISTRO])
            distro_units = publish_conduit.get_units(criteria=criteria)
            distro_summary, distro_errors = repo_exporter.export_distributions(distro_units, progress_callback=progress_callback)
            # sum up summary and details
            self.details["errors"] = rpm_errors + distro_errors
            self.summary = dict(self.summary.items() + rpm_summary.items() + distro_summary.items())
        # generate metadata
        metadata_status, metadata_errors = metadata.generate_yum_metadata(
            repo_working_dir, publish_conduit, config, progress_callback, is_cancelled=self.cancelled,
            group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=publish_conduit.get_repo_scratchpad())
        _LOG.info("metadata generation complete at target location %s" % repo_working_dir)
        self.details["errors"] += metadata_errors
        # build iso and publish via HTTPS
        self._publish_isos(repo, config, progress_callback=progress_callback)
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (self.summary, self.details))
        # remove exported content from working dirctory
        iso_util.cleanup_working_dir(self.repo_working_dir)
        if self.details["errors"]:
            return publish_conduit.build_failure_report(self.summary, self.details)
        return publish_conduit.build_success_report(self.summary, self.details)
Пример #5
0
    def publish_repo(self, repo, publish_conduit, config):
        summary = {}
        details = {}
        progress_status = {
            "packages":           {"state": "NOT_STARTED"},
            "distribution":       {"state": "NOT_STARTED"},
            "metadata":           {"state": "NOT_STARTED"},
            "packagegroups":      {"state": "NOT_STARTED"},
            "publish_http":       {"state": "NOT_STARTED"},
            "publish_https":      {"state": "NOT_STARTED"},
            }

        def progress_callback(type_id, status):
            progress_status[type_id] = status
            publish_conduit.set_progress(progress_status)

        self.repo_working_dir = repo.working_dir

        if self.canceled:
            return publish_conduit.build_failure_report(summary, details)
        skip_list = config.get('skip') or []
        # Determine Content in this repo
        unfiltered_units = publish_conduit.get_units()
        # filter compatible units
        rpm_units = filter(lambda u : u.type_id in [TYPE_ID_RPM, TYPE_ID_SRPM], unfiltered_units)
        drpm_units = filter(lambda u : u.type_id == TYPE_ID_DRPM, unfiltered_units)
        rpm_errors = []
        if 'rpm' not in skip_list:
            _LOG.info("Publish on %s invoked. %s existing units, %s of which are supported to be published." \
                    % (repo.id, len(unfiltered_units), len(rpm_units)))
            # Create symlinks under repo.working_dir
            rpm_status, rpm_errors = self.handle_symlinks(rpm_units, repo.working_dir, progress_callback)
            if not rpm_status:
                _LOG.error("Unable to publish %s items" % (len(rpm_errors)))
        drpm_errors = []
        if 'drpm' not in skip_list:
            _LOG.info("Publish on %s invoked. %s existing units, %s of which are supported to be published." \
                    % (repo.id, len(unfiltered_units), len(drpm_units)))
            # Create symlinks under repo.working_dir
            drpm_status, drpm_errors = self.handle_symlinks(drpm_units, repo.working_dir, progress_callback)
            if not drpm_status:
                _LOG.error("Unable to publish %s items" % (len(drpm_errors)))
        pkg_errors = rpm_errors + drpm_errors
        pkg_units = rpm_units +  drpm_units
        distro_errors = []
        distro_units = filter(lambda u: u.type_id == TYPE_ID_DISTRO, unfiltered_units)
        if 'distribution' not in skip_list:
            # symlink distribution files if any under repo.working_dir
            distro_status, distro_errors = self.symlink_distribution_unit_files(distro_units, repo.working_dir, progress_callback)
            if not distro_status:
                _LOG.error("Unable to publish distribution tree %s items" % (len(distro_errors)))
        # update/generate metadata for the published repo
        repo_scratchpad = publish_conduit.get_repo_scratchpad()
        src_working_dir = ''
        if repo_scratchpad.has_key("importer_working_dir"):
            src_working_dir = repo_scratchpad['importer_working_dir']

        if self.canceled:
            return publish_conduit.build_failure_report(summary, details)
        groups_xml_path = None
        existing_cats = []
        existing_groups = []
        if 'packagegroup' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
            existing_units = publish_conduit.get_units(criteria)
            existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
            existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
            groups_xml_path = comps_util.write_comps_xml(repo, existing_groups, existing_cats)
        metadata_start_time = time.time()
        self.copy_importer_repodata(src_working_dir, repo.working_dir)
        metadata_status, metadata_errors = metadata.generate_metadata(
                repo, publish_conduit, config, progress_callback, groups_xml_path)
        metadata_end_time = time.time()
        relpath = self.get_repo_relative_path(repo, config)
        if relpath.startswith("/"):
            relpath = relpath[1:]
        #
        # Handle publish link for HTTPS
        #
        https_publish_dir = self.get_https_publish_dir(config)
        https_repo_publish_dir = os.path.join(https_publish_dir, relpath).rstrip('/')
        if config.get("https"):
            # Publish for HTTPS
            self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir))
                util.create_symlink(repo.working_dir, https_repo_publish_dir)
                summary["https_publish_dir"] = https_repo_publish_dir
                self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(https_repo_publish_dir):
                _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir)
                util.remove_symlink(https_publish_dir, https_repo_publish_dir)
        #
        # Handle publish link for HTTP
        #
        http_publish_dir = self.get_http_publish_dir(config)
        http_repo_publish_dir = os.path.join(http_publish_dir, relpath).rstrip('/')
        if config.get("http"):
            # Publish for HTTP
            self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir))
                util.create_symlink(repo.working_dir, http_repo_publish_dir)
                summary["http_publish_dir"] = http_repo_publish_dir
                self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(http_repo_publish_dir):
                _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir)
                util.remove_symlink(http_publish_dir, http_repo_publish_dir)

        summary["num_package_units_attempted"] = len(pkg_units)
        summary["num_package_units_published"] = len(pkg_units) - len(pkg_errors)
        summary["num_package_units_errors"] = len(pkg_errors)
        summary["num_distribution_units_attempted"] = len(distro_units)
        summary["num_distribution_units_published"] = len(distro_units) - len(distro_errors)
        summary["num_distribution_units_errors"] = len(distro_errors)
        summary["num_package_groups_published"] = len(existing_groups)
        summary["num_package_categories_published"] = len(existing_cats)
        summary["relative_path"] = relpath
        if metadata_status is False and not len(metadata_errors):
            summary["skip_metadata_update"] = True
        else:
            summary["skip_metadata_update"] = False
        details["errors"] = pkg_errors + distro_errors + metadata_errors
        details['time_metadata_sec'] = metadata_end_time - metadata_start_time
        # metadata generate skipped vs run
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (summary, details))
        if details["errors"]:
            return publish_conduit.build_failure_report(summary, details)
        return publish_conduit.build_success_report(summary, details)
Пример #6
0
    def publish_repo(self, repo, publish_conduit, config):
        publish_start_time = time.time()
        _LOG.info("Start publish time %s" % publish_start_time)
        summary = {}
        details = {}
        progress_status = {
            "rpms":               {"state": "NOT_STARTED"},
            "errata":             {"state": "NOT_STARTED"},
            "distribution":       {"state": "NOT_STARTED"},
            "packagegroups":      {"state": "NOT_STARTED"},
            "isos":               {"state": "NOT_STARTED"},
            "publish_http":       {"state": "NOT_STARTED"},
            }

        def progress_callback(type_id, status):
            progress_status[type_id] = status
            publish_conduit.set_progress(progress_status)

        self.repo_working_dir = repo_working_dir = repo.working_dir

        if self.canceled:
            return publish_conduit.build_failure_report(summary, details)

        skip_types = config.get("skip") or []
        date_filter = self.create_date_range_filter(config)
        if date_filter:
            # export errata by date and associated rpm units
            if "errata" not in skip_types:
                progress_status["errata"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA], unit_filters=date_filter)
                errata_units = publish_conduit.get_units(criteria)
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM, TYPE_ID_DRPM])
                rpm_units = publish_conduit.get_units(criteria)
                rpm_units = self._get_errata_rpms(errata_units, rpm_units)
                rpm_status, rpm_errors = self._export_rpms(rpm_units, repo_working_dir, progress_callback=progress_callback)
                progress_status["rpms"]["state"] = "FINISHED"
                if self.canceled:
                    return publish_conduit.build_failure_report(summary, details)
                # generate metadata
                metadata_status, metadata_errors = metadata.generate_metadata(
                        repo, publish_conduit, config, progress_callback)
                _LOG.info("metadata generation complete at target location %s" % repo_working_dir)
                errata_status, errata_errors = self._export_errata(errata_units, repo_working_dir, progress_callback=progress_callback)
                progress_status["errata"]["state"] = "FINISHED"

                summary["num_package_units_attempted"] = len(rpm_units)
                summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors)
                summary["num_package_units_errors"] = len(rpm_errors)
                details["errors"] = rpm_errors +  errata_errors + metadata_errors
            else:
                progress_status["errata"]["state"] = "SKIPPED"
                _LOG.info("erratum unit type in skip list [%s]; skipping export" % skip_types)
        else:
            # export everything
            rpm_units = []
            rpm_errors = []
            if "rpm" not in skip_types:
                progress_status["rpms"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM, TYPE_ID_DRPM])
                rpm_units = publish_conduit.get_units(criteria)
                rpm_status, rpm_errors = self._export_rpms(rpm_units, repo_working_dir, progress_callback=progress_callback)
                progress_status["rpms"]["state"] = "FINISHED"
                summary["num_package_units_attempted"] = len(rpm_units)
                summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors)
                summary["num_package_units_errors"] = len(rpm_errors)
            else:
                progress_status["rpms"]["state"] = "SKIPPED"
                _LOG.info("rpm unit type in skip list [%s]; skipping export" % skip_types)
            # package groups
            groups_xml_path = None
            if "packagegroup" not in skip_types:
                progress_status["packagegroups"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
                existing_units = publish_conduit.get_units(criteria)
                existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
                existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
                groups_xml_path = comps_util.write_comps_xml(repo, existing_groups, existing_cats)
                summary["num_package_groups_exported"] = len(existing_groups)
                summary["num_package_categories_exported"] = len(existing_cats)
                progress_status["packagegroups"]["state"] = "FINISHED"
            else:
                progress_status["packagegroups"]["state"] = "SKIPPED"
                _LOG.info("packagegroup unit type in skip list [%s]; skipping export" % skip_types)

            if self.canceled:
                return publish_conduit.build_failure_report(summary, details)
            # generate metadata
            metadata_status, metadata_errors = metadata.generate_metadata(
                    repo, publish_conduit, config, progress_callback, groups_xml_path)
            _LOG.info("metadata generation complete at target location %s" % repo_working_dir)
            errata_errors = []
            if "errata" not in skip_types:
                progress_status["errata"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA])
                errata_units = publish_conduit.get_units(criteria)
                rpm_units = self._get_errata_rpms(errata_units, rpm_units)
                self._export_rpms(rpm_units, repo_working_dir, progress_callback=progress_callback)
                errata_status, errata_errors = self._export_errata(errata_units, repo_working_dir, progress_callback=progress_callback)
                summary["num_errata_units_exported"] = len(errata_units)
                progress_status["errata"]["state"] = "FINISHED"
            else:
                progress_status["errata"]["state"] = "SKIPPED"
                _LOG.info("erratum unit type in skip list [%s]; skipping export" % skip_types)

            distro_errors = []
            if "distribution" not in skip_types:
                # distro units
                progress_status["distribution"]["state"] = "STARTED"
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_DISTRO])
                distro_units = publish_conduit.get_units(criteria)
                distro_status, distro_errors = self._export_distributions(distro_units, repo_working_dir, progress_callback=progress_callback)
                progress_status["distribution"]["state"] = "FINISHED"

                summary["num_distribution_units_attempted"] = len(distro_units)
                summary["num_distribution_units_exported"] = len(distro_units) - len(distro_errors)
                summary["num_distribution_units_errors"] = len(distro_errors)
            else:
                progress_status["distribution"]["state"] = "SKIPPED"
                _LOG.info("distribution unit type in skip list [%s]; skipping export" % skip_types)

            details["errors"] = rpm_errors + distro_errors + errata_errors + metadata_errors
        # build iso and publish via HTTPS
        https_publish_dir = self.get_https_publish_iso_dir(config)
        https_repo_publish_dir = os.path.join(https_publish_dir, repo.id).rstrip('/')
        prefix = config.get('iso_prefix') or repo.id
        if config.get("https"):
            # Publish for HTTPS
            self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir))
                iso_status, iso_errors = self.generate_isos(repo_working_dir, https_repo_publish_dir, prefix=prefix, progress_callback=progress_callback)
                summary["https_publish_dir"] = https_repo_publish_dir
                self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback)
                progress_status["isos"]["state"] = "FINISHED"
            except:
                self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(https_repo_publish_dir):
                _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir)
                shutil.rmtree(https_repo_publish_dir)

        # Handle publish link for HTTP
        # build iso and publish via HTTP
        http_publish_dir = self.get_http_publish_iso_dir(config)
        http_repo_publish_dir = os.path.join(http_publish_dir, repo.id).rstrip('/')
        if config.get("http"):
            # Publish for HTTP
            self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir))
                iso_status, iso_errors = self.generate_isos(repo_working_dir, http_repo_publish_dir, prefix=prefix, progress_callback=progress_callback)
                print iso_status, iso_errors
                summary["http_publish_dir"] = http_repo_publish_dir
                self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback)
                progress_status["isos"]["state"] = "FINISHED"
            except:
                self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(http_repo_publish_dir):
                _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir)
                shutil.rmtree(http_repo_publish_dir)
        # metadata generate skipped vs run
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (summary, details))
        # remove exported content from working dirctory
        self.cleanup()
        if details["errors"]:
            return publish_conduit.build_failure_report(summary, details)
        return publish_conduit.build_success_report(summary, details)
Пример #7
0
    def publish_group(self, repo_group, publish_conduit, config):
        """
        see parent class for doc string
        """
        self.group_working_dir = group_working_dir = repo_group.working_dir
        skip_types = config.get("skip") or []
        self.init_group_progress()
        self.group_progress_status["group-id"] = repo_group.id

        # progress callback for group status
        def group_progress_callback(type_id, status):
            self.group_progress_status[type_id] = status
            publish_conduit.set_progress(self.group_progress_status)

        # loop through each repo in the group and perform exports
        for repoid in repo_group.repo_ids:
            _LOG.info("Exporting repo %s " % repoid)
            summary = {}
            details = {}
            progress_status = {
                                    "rpms":               {"state": "NOT_STARTED"},
                                    "errata":             {"state": "NOT_STARTED"},
                                    "distribution":       {"state": "NOT_STARTED"},
                                    "packagegroups":      {"state": "NOT_STARTED"},}
            def progress_callback(type_id, status):
                progress_status[type_id] = status
                publish_conduit.set_progress(progress_status)
            repo_working_dir = "%s/%s" % (group_working_dir, repoid)
            repo_exporter = RepoExporter(repo_working_dir, skip=skip_types)
            # check if any datefilter is set on the distributor
            date_filter = repo_exporter.create_date_range_filter(config)
            _LOG.info("repo working dir %s" % repo_working_dir)
            if date_filter:
                # If a date range is specified, we only export the errata within that range
                # and associated rpm units. This might change once we have dates associated
                # to other units.
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA], unit_filters=date_filter)
                errata_units = publish_conduit.get_units(repoid, criteria=criteria)
                # we only include binary and source; drpms are not associated to errata
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM])
                rpm_units = publish_conduit.get_units(repoid, criteria=criteria)
                rpm_units = repo_exporter.get_errata_rpms(errata_units, rpm_units)
                rpm_status, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback)
                if self.canceled:
                    return publish_conduit.build_failure_report(summary, details)

                # generate metadata
                metadata_status, metadata_errors = metadata.generate_metadata(
                       repo_working_dir, publish_conduit, config, progress_callback)
                _LOG.info("metadata generation complete at target location %s" % repo_working_dir)
                # export errata and generate updateinfo xml
                errata_status, errata_errors = repo_exporter.export_errata(errata_units, progress_callback=progress_callback)

                summary["num_package_units_attempted"] = len(rpm_units)
                summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors)
                summary["num_package_units_errors"] = len(rpm_errors)
                details["errors"] = rpm_errors +  errata_errors + metadata_errors
            else:

                # export rpm units(this includes binary, source and delta)
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM, TYPE_ID_DRPM])
                rpm_units = publish_conduit.get_units(repoid, criteria)
                rpm_status, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback)
                summary["num_package_units_attempted"] = len(rpm_units)
                summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors)
                summary["num_package_units_errors"] = len(rpm_errors)

                # export package groups information and generate comps.xml
                groups_xml_path = None
                if "packagegroup" not in skip_types:
                    progress_status["packagegroups"]["state"] = "STARTED"
                    criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
                    existing_units = publish_conduit.get_units(repoid, criteria)
                    existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
                    existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
                    groups_xml_path = comps_util.write_comps_xml(repo_working_dir, existing_groups, existing_cats)
                    summary["num_package_groups_exported"] = len(existing_groups)
                    summary["num_package_categories_exported"] = len(existing_cats)
                    progress_status["packagegroups"]["state"] = "FINISHED"
                else:
                    progress_status["packagegroups"]["state"] = "SKIPPED"
                    _LOG.info("packagegroup unit type in skip list [%s]; skipping export" % skip_types)

                if self.canceled:
                    return publish_conduit.build_failure_report(summary, details)

                # generate metadata
                metadata_status, metadata_errors = metadata.generate_metadata(
                        repo_working_dir, publish_conduit, config, progress_callback, groups_xml_path)
                _LOG.info("metadata generation complete at target location %s" % repo_working_dir)

                # export errata units and associated rpms
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA])
                errata_units = publish_conduit.get_units(repoid, criteria)
                errata_status, errata_errors = repo_exporter.export_errata(errata_units, progress_callback=progress_callback)
                summary["num_errata_units_exported"] = len(errata_units)

                # export distributions
                criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_DISTRO])
                distro_units = publish_conduit.get_units(repoid, criteria)
                distro_status, distro_errors = repo_exporter.export_distributions(distro_units, progress_callback=progress_callback)
                summary["num_distribution_units_attempted"] = len(distro_units)
                summary["num_distribution_units_exported"] = len(distro_units) - len(distro_errors)
                summary["num_distribution_units_errors"] = len(distro_errors)

                self.group_progress_status["repositories"][repoid] = progress_status
                self.set_progress("repositories", self.group_progress_status["repositories"], group_progress_callback)

                details["errors"] = rpm_errors + distro_errors + errata_errors + metadata_errors
                self.group_summary[repoid] = summary
                self.group_details[repoid] = details

        # generate and publish isos
        self._publish_isos(repo_group, config, progress_callback=group_progress_callback)
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (self.group_summary, self.group_details))

        # remove exported content from working dirctory
        iso_util.cleanup_working_dir(self.group_working_dir)

        # check for any errors
        if not len([self.group_details[repoid]["errors"] for repoid in self.group_details.keys()]):
            return publish_conduit.build_failure_report(self.group_summary, self.group_details)

        return publish_conduit.build_success_report(self.group_summary, self.group_details)