def publish_repo(self, repo, publish_conduit, config):
        publish_start_time = time.time()
        _LOG.info("Start publish time %s" % publish_start_time)

        source_repo_id = config.get('source_repo_id')
        destination_dist_config = self.find_yum_distributor(repo.id)['config']
        source_working_dir = self.source_working_dir(source_repo_id)
        working_dir = self.full_working_dir(repo.id, publish_start_time)

        os.makedirs(working_dir)
        #copy contents from source's working directory to destinations
        if not self.copy_directory(source_working_dir, working_dir):
            publish_conduit.set_progress(self.summary)
            raise Exception("Failed to copy metadata.  See errors for more details.")

        #symlink the destination's publish directories
        if destination_dist_config['http']:
            http_publish_dir = os.path.join(HTTP_PUBLISH_DIR, destination_dist_config["relative_url"]).rstrip('/')
            self.link_directory(working_dir, http_publish_dir)
            util.generate_listing_files(HTTP_PUBLISH_DIR, http_publish_dir)
            self.update_repomd(http_publish_dir)

        if destination_dist_config['https']:
            https_publish_dir = os.path.join(HTTPS_PUBLISH_DIR, destination_dist_config["relative_url"]).rstrip('/')
            self.link_directory(working_dir, https_publish_dir)
            util.generate_listing_files(HTTPS_PUBLISH_DIR, https_publish_dir)
            self.update_repomd(https_publish_dir)

        self.clean_path(self.base_working_dir(repo.id), str(publish_start_time))

        publish_conduit.set_progress(self.summary)
        if len(self.summary["errors"]) > 0:
            raise Exception("Failed to link metadata.  See errors for more details.")
        else:
            return publish_conduit.build_success_report(self.summary, self.details)
示例#2
0
    def clean_simple_hosting_directories(self, start_location, containing_dir):
        """
        Remove any orphaned directory structure left by the removal of a repo.

        :param start_location: path of the symlink of the repo that was removed.
        :type  start_location: str
        :param containing_dir: path of the location of the base dir containing hosted repos
        :type  containing_dir: str
        """
        up_dir = os.path.dirname(start_location)

        # This function is potentially dangerous so it is important to restrict it to the
        # publish directories.
        if containing_dir not in os.path.dirname(up_dir):
            return

        # If the only file is the listing file, it is safe to delete the file and containing dir.
        if os.listdir(up_dir) == ['listing']:
            os.remove(os.path.join(up_dir, 'listing'))
            try:
                os.rmdir(up_dir)
            except OSError:
                # If this happened, there was a concurrency issue and it is no longer safe to
                # remove the directory. It is possible that the concurrent operation created the
                # listing file before this operation deleted it, so to be safe, we need to
                # regenerate the listing file.
                util.generate_listing_files(up_dir, up_dir)
                return

        self.clean_simple_hosting_directories(up_dir, containing_dir)
示例#3
0
    def clean_simple_hosting_directories(self, start_location, containing_dir):
        """
        Remove any orphaned directory structure left by the removal of a repo.

        :param start_location: path of the symlink of the repo that was removed.
        :type  start_location: str
        :param containing_dir: path of the location of the base dir containing hosted repos
        :type  containing_dir: str
        """
        up_dir = os.path.dirname(start_location)

        # This function is potentially dangerous so it is important to restrict it to the
        # publish directories.
        if containing_dir not in os.path.dirname(up_dir):
            return

        # If the only file is the listing file, it is safe to delete the file and containing dir.
        if os.listdir(up_dir) == ['listing']:
            os.remove(os.path.join(up_dir, 'listing'))
            try:
                os.rmdir(up_dir)
            except OSError:
                # If this happened, there was a concurrency issue and it is no longer safe to
                # remove the directory. It is possible that the concurrent operation created the
                # listing file before this operation deleted it, so to be safe, we need to
                # regenerate the listing file.
                util.generate_listing_files(up_dir, up_dir)
                return

        self.clean_simple_hosting_directories(up_dir, containing_dir)
示例#4
0
    def test_all(self):
        tmp_dir = tempfile.mkdtemp()
        try:
            # setup a directory structure and define the expected listing file values
            publish_dir = os.path.join(tmp_dir, 'a/b/c')
            os.makedirs(publish_dir)
            os.makedirs(os.path.join(tmp_dir, 'a/d'))
            os.makedirs(os.path.join(tmp_dir, 'a/b/e'))
            expected = ['a', 'b\nd', 'c\ne']

            # run it
            util.generate_listing_files(tmp_dir, publish_dir)

            # ensure that each listing file exists and has the correct contents
            current_path = tmp_dir
            for next_dir, expected_listing in zip(['a', 'b', 'c'], expected):
                file_path = os.path.join(current_path, 'listing')
                with open(file_path) as open_file:
                    self.assertEqual(open_file.read(), expected_listing)
                current_path = os.path.join(current_path, next_dir)

            # make sure there is not a listing file inside the repo's publish dir
            self.assertFalse(
                os.path.exists(os.path.join(publish_dir, 'listing')))

        finally:
            shutil.rmtree(tmp_dir, ignore_errors=True)
示例#5
0
    def process_unit(self, root_publish_dir):
        """
        Publish a directory from the repo to a target directory.
        """

        # Find the location of the master repository tree structure
        master_publish_dir = os.path.join(configuration.get_master_publish_dir(self.parent.repo),
                                          self.parent.timestamp)

        # Find the location of the published repository tree structure
        repo_relative_dir = configuration.get_repo_relative_path(self.parent.repo, self.parent.config)
        repo_publish_dir = os.path.join(root_publish_dir, repo_relative_dir)
        # Without the trailing '/'
        if repo_publish_dir.endswith('/'):
            repo_publish_dir = repo_publish_dir[:-1]

        # Create the parent directory of the published repository tree, if needed
        repo_publish_dir_parent = repo_publish_dir.rsplit('/', 1)[0]
        if not os.path.exists(repo_publish_dir_parent):
            os.makedirs(repo_publish_dir_parent, 0750)

        # Create a temporary symlink in the parent of the published directory tree
        tmp_link_name = os.path.join(repo_publish_dir_parent, self.parent.timestamp)
        os.symlink(master_publish_dir, tmp_link_name)

        # Rename the symlink to the official published repository directory name.
        # This has two desirable effects:
        # 1. it will overwrite an existing link, if it's there
        # 2. the operation is atomic, instantly changing the published directory
        # NOTE: it's not easy (possible?) to directly edit the target of a symlink
        os.rename(tmp_link_name, repo_publish_dir)

        # (Re)generate the listing files
        util.generate_listing_files(root_publish_dir, repo_publish_dir)
示例#6
0
    def test_all(self):
        tmp_dir = tempfile.mkdtemp()
        try:
            # setup a directory structure and define the expected listing file values
            publish_dir = os.path.join(tmp_dir, 'a/b/c')
            os.makedirs(publish_dir)
            os.makedirs(os.path.join(tmp_dir, 'a/d'))
            os.makedirs(os.path.join(tmp_dir, 'a/b/e'))
            expected = ['a', 'b\nd', 'c\ne']

            # run it
            util.generate_listing_files(tmp_dir, publish_dir)

            # ensure that each listing file exists and has the correct contents
            current_path = tmp_dir
            for next_dir, expected_listing in zip(['a', 'b', 'c'], expected):
                file_path = os.path.join(current_path, 'listing')
                with open(file_path) as open_file:
                    self.assertEqual(open_file.read(), expected_listing)
                current_path = os.path.join(current_path, next_dir)

            # make sure there is not a listing file inside the repo's publish dir
            self.assertFalse(os.path.exists(os.path.join(publish_dir, 'listing')))

        finally:
            shutil.rmtree(tmp_dir, ignore_errors=True)
示例#7
0
    def publish_repo(self, repo, publish_conduit, config):
        publish_start_time = time.time()
        _LOG.info("Start publish time %s" % publish_start_time)

        source_repo_id = config.get('source_repo_id')
        destination_dist_config = self.find_yum_distributor(repo.id)['config']
        source_working_dir = self.source_working_dir(source_repo_id)
        working_dir = self.full_working_dir(repo.id, publish_start_time)

        self.safe_makedirs(working_dir)
        #copy contents from source's working directory to destinations
        if not self.copy_directory(source_working_dir, working_dir):
            publish_conduit.set_progress(self.summary)
            raise Exception(
                "Failed to copy metadata.  See errors for more details.")

        #symlink the destination's publish directories
        if destination_dist_config['http']:
            http_publish_dir = os.path.join(
                HTTP_PUBLISH_DIR,
                destination_dist_config["relative_url"]).rstrip('/')
            self.link_directory(working_dir, http_publish_dir)
            util.generate_listing_files(HTTP_PUBLISH_DIR, http_publish_dir)
            self.update_repomd(http_publish_dir)

        if destination_dist_config['https']:
            https_publish_dir = os.path.join(
                HTTPS_PUBLISH_DIR,
                destination_dist_config["relative_url"]).rstrip('/')
            self.link_directory(working_dir, https_publish_dir)
            util.generate_listing_files(HTTPS_PUBLISH_DIR, https_publish_dir)
            self.update_repomd(https_publish_dir)

        self.clean_path(self.base_working_dir(repo.id),
                        str(publish_start_time))

        publish_conduit.set_progress(self.summary)
        if len(self.summary["errors"]) > 0:
            raise Exception(
                "Failed to link metadata.  See errors for more details.")
        else:
            return publish_conduit.build_success_report(
                self.summary, self.details)
示例#8
0
 def process_main(self, item=None):
     util.generate_listing_files(self.root_dir, self.target_dir)
示例#9
0
 def process_main(self):
     util.generate_listing_files(self.root_dir, self.target_dir)
示例#10
0
    def publish_group(self, repo_group, publish_conduit, config):
        """
        Publishes the given repository group.

        :param repo_group:      metadata describing the repository group
        :type  repo_group:      pulp.plugins.model.RepositoryGroup
        :param publish_conduit: provides access to relevant Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoGroupPublishConduit
        :param config:          plugin configuration
        :type  config:          pulp.plugins.config.PluginConfiguration
        :return:                report describing the publish run
        :rtype:                 pulp.plugins.model.PublishReport
        """
        # First, validate the configuration because there may be override config options, and currently,
        # validate_config is not called prior to publishing by the manager.
        valid_config, msg = export_utils.validate_export_config(config)
        if not valid_config:
            raise PulpDataException(msg)

        _logger.info("Beginning export of the following repository group: [%s]" % repo_group.id)

        # The progress report for a group publish
        progress_status = {
            constants.PROGRESS_REPOS_KEYWORD: {constants.PROGRESS_STATE_KEY: constants.STATE_NOT_STARTED},
            constants.PROGRESS_ISOS_KEYWORD: {constants.PROGRESS_STATE_KEY: constants.STATE_NOT_STARTED},
            constants.PROGRESS_PUBLISH_HTTP: {constants.PROGRESS_STATE_KEY: constants.STATE_NOT_STARTED},
            constants.PROGRESS_PUBLISH_HTTPS: {constants.PROGRESS_STATE_KEY: constants.STATE_NOT_STARTED},
        }

        def progress_callback(progress_keyword, status):
            """
            Progress callback used to update the progress report for the publish conduit

            :param progress_keyword:    The keyword to assign the status to in the progress report dict
            :type  progress_keyword:    str
            :param status:              The status to assign to the keyword.
            :type  status:              dict
            """
            progress_status[progress_keyword] = status
            publish_conduit.set_progress(progress_status)

        # Before starting, clean out the working directory. Done to remove last published ISOs
        shutil.rmtree(repo_group.working_dir, ignore_errors=True)
        os.makedirs(repo_group.working_dir)

        # Retrieve the configuration for each repository, the skip types, and the date filter
        packed_config = export_utils.retrieve_group_export_config(repo_group, config)
        rpm_repositories, self.date_filter = packed_config

        # Update the progress for the repositories section
        repos_progress = export_utils.init_progress_report(len(rpm_repositories))
        progress_callback(constants.PROGRESS_REPOS_KEYWORD, repos_progress)

        # For every repository, extract the requested types to the working directory
        for repo_id, working_dir in rpm_repositories:
            # Create a repo conduit, which makes sharing code with the export and yum distributors easier
            repo_conduit = RepoPublishConduit(repo_id, publish_conduit.distributor_id)

            # If there is a date filter perform an incremental export, otherwise do everything
            if self.date_filter:
                result = export_utils.export_incremental_content(working_dir, repo_conduit, self.date_filter)
            else:
                result = export_utils.export_complete_repo(repo_id, working_dir, repo_conduit, config)
            if not config.get(constants.EXPORT_DIRECTORY_KEYWORD):
                util.generate_listing_files(repo_group.working_dir, working_dir)
            else:
                export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD)
                util.generate_listing_files(export_dir, working_dir)

            self.summary[repo_id] = result[0]
            self.details[repo_id] = result[1]

            repos_progress[constants.PROGRESS_ITEMS_LEFT_KEY] -= 1
            repos_progress[constants.PROGRESS_NUM_SUCCESS_KEY] += 1
            progress_callback(constants.PROGRESS_REPOS_KEYWORD, repos_progress)

        repos_progress[constants.PROGRESS_STATE_KEY] = constants.STATE_COMPLETE
        progress_callback(constants.PROGRESS_REPOS_KEYWORD, repos_progress)

        # If there was no export directory, publish via ISOs
        if not config.get(constants.EXPORT_DIRECTORY_KEYWORD):
            self._publish_isos(repo_group, config, progress_callback)

        for repo_id, repo_dir in rpm_repositories:
            if repo_id in self.details and len(self.details[repo_id]["errors"]) != 0:
                return publish_conduit.build_failure_report(self.summary, self.details)

        self.summary["repositories_exported"] = len(rpm_repositories)
        self.summary["repositories_skipped"] = len(repo_group.repo_ids) - len(rpm_repositories)

        return publish_conduit.build_success_report(self.summary, self.details)
示例#11
0
    def publish_repo(self, repo, publish_conduit, config):
        summary = {}
        details = {}
        progress_status = {
            "packages":           {"state": "NOT_STARTED"},
            "distribution":       {"state": "NOT_STARTED"},
            "metadata":           {"state": "NOT_STARTED"},
            "packagegroups":      {"state": "NOT_STARTED"},
            "publish_http":       {"state": "NOT_STARTED"},
            "publish_https":      {"state": "NOT_STARTED"},
            }

        def progress_callback(type_id, status):
            progress_status[type_id] = status
            publish_conduit.set_progress(progress_status)

        self.repo_working_dir = repo.working_dir

        if self.canceled:
            return publish_conduit.build_cancel_report(summary, details)
        skip_list = config.get('skip') or []
        # Determine Content in this repo
        pkg_units = []
        pkg_errors = []
        if 'rpm' not in skip_list:
            for type_id in [TYPE_ID_RPM, TYPE_ID_SRPM]:
                criteria = UnitAssociationCriteria(type_ids=type_id,
                    unit_fields=['id', 'name', 'version', 'release', 'arch', 'epoch', '_storage_path', "checksum", "checksumtype" ])
                pkg_units += publish_conduit.get_units(criteria=criteria)
            drpm_units = []
            if 'drpm' not in skip_list:
                criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM)
                drpm_units = publish_conduit.get_units(criteria=criteria)
            pkg_units += drpm_units
            # Create symlinks under repo.working_dir
            pkg_status, pkg_errors = self.handle_symlinks(pkg_units, repo.working_dir, progress_callback)
            if not pkg_status:
                _LOG.error("Unable to publish %s items" % (len(pkg_errors)))

        distro_errors = []
        distro_units =  []
        if 'distribution' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DISTRO)
            distro_units = publish_conduit.get_units(criteria=criteria)
            # symlink distribution files if any under repo.working_dir
            distro_status, distro_errors = self.symlink_distribution_unit_files(distro_units, repo.working_dir, publish_conduit, progress_callback)
            if not distro_status:
                _LOG.error("Unable to publish distribution tree %s items" % (len(distro_errors)))

        updateinfo_xml_path = None
        if 'erratum' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=TYPE_ID_ERRATA)
            errata_units = publish_conduit.get_units(criteria=criteria)
            updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo.working_dir)

        if self.canceled:
            return publish_conduit.build_cancel_report(summary, details)
        groups_xml_path = None
        existing_cats = []
        existing_groups = []
        if 'packagegroup' not in skip_list:
            criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY])
            existing_units = publish_conduit.get_units(criteria)
            existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units)
            existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units)
            groups_xml_path = comps_util.write_comps_xml(repo.working_dir, existing_groups, existing_cats)
        metadata_start_time = time.time()
        # update/generate metadata for the published repo
        self.use_createrepo = config.get('use_createrepo')
        if self.use_createrepo:
            metadata_status, metadata_errors = metadata.generate_metadata(
                repo.working_dir, publish_conduit, config, progress_callback, groups_xml_path)
        else:
            metadata_status, metadata_errors = metadata.generate_yum_metadata(repo.id, repo.working_dir, publish_conduit, config,
                progress_callback, is_cancelled=self.canceled, group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path,
                repo_scratchpad=publish_conduit.get_repo_scratchpad())

        metadata_end_time = time.time()
        relpath = self.get_repo_relative_path(repo, config)
        if relpath.startswith("/"):
            relpath = relpath[1:]

        # Build the https and http publishing paths
        https_publish_dir = self.get_https_publish_dir(config)
        https_repo_publish_dir = os.path.join(https_publish_dir, relpath).rstrip('/')
        http_publish_dir = self.get_http_publish_dir(config)
        http_repo_publish_dir = os.path.join(http_publish_dir, relpath).rstrip('/')

        # Clean up the old publish directories, if they exist.
        scratchpad = publish_conduit.get_repo_scratchpad()
        if OLD_REL_PATH_KEYWORD in scratchpad:
            old_relative_path = scratchpad[OLD_REL_PATH_KEYWORD]
            old_https_repo_publish_dir = os.path.join(https_publish_dir, old_relative_path)
            if os.path.exists(old_https_repo_publish_dir):
                util.remove_repo_publish_dir(https_publish_dir, old_https_repo_publish_dir)
            old_http_repo_publish_dir = os.path.join(http_publish_dir, old_relative_path)
            if os.path.exists(old_http_repo_publish_dir):
                util.remove_repo_publish_dir(http_publish_dir, old_http_repo_publish_dir)

        # Now write the current publish relative path to the scratch pad. This way, if the relative path
        # changes before the next publish, we can clean up the old path.
        scratchpad[OLD_REL_PATH_KEYWORD] = relpath
        publish_conduit.set_repo_scratchpad(scratchpad)

        # Handle publish link for HTTPS
        if config.get("https"):
            # Publish for HTTPS
            self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir))
                util.create_symlink(repo.working_dir, https_repo_publish_dir)
                util.generate_listing_files(https_publish_dir, https_repo_publish_dir)
                summary["https_publish_dir"] = https_repo_publish_dir
                self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(https_repo_publish_dir):
                _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir)
                util.remove_repo_publish_dir(https_publish_dir, https_repo_publish_dir)

        # Handle publish link for HTTP
        if config.get("http"):
            # Publish for HTTP
            self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback)
            try:
                _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir))
                util.create_symlink(repo.working_dir, http_repo_publish_dir)
                util.generate_listing_files(http_publish_dir, http_repo_publish_dir)
                summary["http_publish_dir"] = http_repo_publish_dir
                self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback)
            except:
                self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback)
        else:
            self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback)
            if os.path.lexists(http_repo_publish_dir):
                _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir)
                util.remove_repo_publish_dir(http_publish_dir, http_repo_publish_dir)

        summary["num_package_units_attempted"] = len(pkg_units)
        summary["num_package_units_published"] = len(pkg_units) - len(pkg_errors)
        summary["num_package_units_errors"] = len(pkg_errors)
        summary["num_distribution_units_attempted"] = len(distro_units)
        summary["num_distribution_units_published"] = len(distro_units) - len(distro_errors)
        summary["num_distribution_units_errors"] = len(distro_errors)
        summary["num_package_groups_published"] = len(existing_groups)
        summary["num_package_categories_published"] = len(existing_cats)
        summary["relative_path"] = relpath
        if metadata_status is False and not len(metadata_errors):
            summary["skip_metadata_update"] = True
        else:
            summary["skip_metadata_update"] = False
        details["errors"] = pkg_errors + distro_errors # metadata_errors
        details['time_metadata_sec'] = metadata_end_time - metadata_start_time
        # metadata generate skipped vs run
        _LOG.info("Publish complete:  summary = <%s>, details = <%s>" % (summary, details))
        if details["errors"]:
            return publish_conduit.build_failure_report(summary, details)
        return publish_conduit.build_success_report(summary, details)
示例#12
0
    def publish_repo(self, repo, publish_conduit, config):
        """
        Export a yum repository to a given directory, or to ISO

        :param repo:            metadata describing the repository
        :type  repo:            pulp.plugins.model.Repository
        :param publish_conduit: provides access to relevant Pulp functionality
        :type  publish_conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
        :param config:          plugin configuration
        :type  config:          pulp.plugins.config.PluginConfiguration

        :return: report describing the publish run
        :rtype:  pulp.plugins.model.PublishReport
        """
        # First, validate the configuration because there may be override config options, and
        # currently, validate_config is not called prior to publishing by the manager.
        valid_config, msg = export_utils.validate_export_config(config)
        if not valid_config:
            raise PulpDataException(msg)

        _logger.info('Starting export of [%s]' % repo.id)

        progress_status = {
            ids.TYPE_ID_RPM: {'state': constants.STATE_NOT_STARTED},
            ids.TYPE_ID_ERRATA: {'state': constants.STATE_NOT_STARTED},
            ids.TYPE_ID_DISTRO: {'state': constants.STATE_NOT_STARTED},
            ids.TYPE_ID_PKG_CATEGORY: {'state': constants.STATE_NOT_STARTED},
            ids.TYPE_ID_PKG_GROUP: {'state': constants.STATE_NOT_STARTED},
            'metadata': {'state': constants.STATE_NOT_STARTED},
            'isos': {'state': constants.STATE_NOT_STARTED},
            'publish_http': {'state': constants.STATE_NOT_STARTED},
            'publish_https': {'state': constants.STATE_NOT_STARTED},
        }

        def progress_callback(type_id, status):
            progress_status[type_id] = status
            publish_conduit.set_progress(progress_status)

        # Retrieve a config tuple and unpack it for use
        config_settings = export_utils.retrieve_repo_config(repo, config)
        self.working_dir, self.date_filter = config_settings

        # Before starting, clean out the working directory. Done to remove last published ISOs
        shutil.rmtree(repo.working_dir, ignore_errors=True)
        os.makedirs(repo.working_dir)

        # If a date filter is not present, do a complete export. If it is, do an incremental export.
        if self.date_filter:
            result = export_utils.export_incremental_content(self.working_dir, publish_conduit,
                                                             self.date_filter, progress_callback)
        else:
            result = export_utils.export_complete_repo(repo.id, self.working_dir, publish_conduit,
                                                       config, progress_callback)

        self.summary = result[0]
        self.details = result[1]

        if not config.get(constants.EXPORT_DIRECTORY_KEYWORD):
            util.generate_listing_files(repo.working_dir, self.working_dir)
            # build iso and publish via HTTPS
            self._publish_isos(repo, config, progress_callback)
        else:
            export_dir = config.get(constants.EXPORT_DIRECTORY_KEYWORD)
            util.generate_listing_files(export_dir, self.working_dir)

        if len(self.details['errors']) != 0:
            return publish_conduit.build_failure_report(self.summary, self.details)
        return publish_conduit.build_success_report(self.summary, self.details)