def test_remove_symlink(self): pub_dir = self.http_publish_dir link_path = os.path.join(pub_dir, "a", "b", "c", "d", "e") os.makedirs(link_path) link_path = os.path.join(link_path, "temp_link").rstrip('/') os.symlink(self.https_publish_dir, link_path) self.assertTrue(os.path.exists(link_path)) util.remove_symlink(pub_dir, link_path) self.assertFalse(os.path.exists(link_path)) self.assertEqual(len(os.listdir(pub_dir)), 0)
def _handle_orphaned_distributions(self, units, repo_working_dir, scratchpad): distro_unit_ids = [u.id for u in units] published_distro_units = scratchpad.get(constants.PUBLISHED_DISTRIBUTION_FILES_KEY, []) for distroid in published_distro_units: if distroid not in distro_unit_ids: # distro id on scratchpad not in the repo; remove the associated symlinks for orphaned_path in published_distro_units[distroid]: if os.path.islink(orphaned_path): _LOG.debug("cleaning up orphaned distribution path %s" % orphaned_path) util.remove_symlink(repo_working_dir, orphaned_path) # remove the cleaned up distroid from scratchpad del scratchpad[constants.PUBLISHED_DISTRIBUTION_FILES_KEY][distroid] return scratchpad
def publish_repo(self, repo, publish_conduit, config): summary = {} details = {} progress_status = { "packages": {"state": "NOT_STARTED"}, "distribution": {"state": "NOT_STARTED"}, "metadata": {"state": "NOT_STARTED"}, "packagegroups": {"state": "NOT_STARTED"}, "publish_http": {"state": "NOT_STARTED"}, "publish_https": {"state": "NOT_STARTED"}, } def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) self.repo_working_dir = repo.working_dir if self.canceled: return publish_conduit.build_failure_report(summary, details) skip_list = config.get('skip') or [] # Determine Content in this repo unfiltered_units = publish_conduit.get_units() # filter compatible units rpm_units = filter(lambda u : u.type_id in [TYPE_ID_RPM, TYPE_ID_SRPM], unfiltered_units) drpm_units = filter(lambda u : u.type_id == TYPE_ID_DRPM, unfiltered_units) rpm_errors = [] if 'rpm' not in skip_list: _LOG.info("Publish on %s invoked. %s existing units, %s of which are supported to be published." \ % (repo.id, len(unfiltered_units), len(rpm_units))) # Create symlinks under repo.working_dir rpm_status, rpm_errors = self.handle_symlinks(rpm_units, repo.working_dir, progress_callback) if not rpm_status: _LOG.error("Unable to publish %s items" % (len(rpm_errors))) drpm_errors = [] if 'drpm' not in skip_list: _LOG.info("Publish on %s invoked. %s existing units, %s of which are supported to be published." \ % (repo.id, len(unfiltered_units), len(drpm_units))) # Create symlinks under repo.working_dir drpm_status, drpm_errors = self.handle_symlinks(drpm_units, repo.working_dir, progress_callback) if not drpm_status: _LOG.error("Unable to publish %s items" % (len(drpm_errors))) pkg_errors = rpm_errors + drpm_errors pkg_units = rpm_units + drpm_units distro_errors = [] distro_units = filter(lambda u: u.type_id == TYPE_ID_DISTRO, unfiltered_units) if 'distribution' not in skip_list: # symlink distribution files if any under repo.working_dir distro_status, distro_errors = self.symlink_distribution_unit_files(distro_units, repo.working_dir, progress_callback) if not distro_status: _LOG.error("Unable to publish distribution tree %s items" % (len(distro_errors))) # update/generate metadata for the published repo repo_scratchpad = publish_conduit.get_repo_scratchpad() src_working_dir = '' if repo_scratchpad.has_key("importer_working_dir"): src_working_dir = repo_scratchpad['importer_working_dir'] if self.canceled: return publish_conduit.build_failure_report(summary, details) groups_xml_path = None existing_cats = [] existing_groups = [] if 'packagegroup' not in skip_list: criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY]) existing_units = publish_conduit.get_units(criteria) existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units) existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units) groups_xml_path = comps_util.write_comps_xml(repo, existing_groups, existing_cats) metadata_start_time = time.time() self.copy_importer_repodata(src_working_dir, repo.working_dir) metadata_status, metadata_errors = metadata.generate_metadata( repo, publish_conduit, config, progress_callback, groups_xml_path) metadata_end_time = time.time() relpath = self.get_repo_relative_path(repo, config) if relpath.startswith("/"): relpath = relpath[1:] # # Handle publish link for HTTPS # https_publish_dir = self.get_https_publish_dir(config) https_repo_publish_dir = os.path.join(https_publish_dir, relpath).rstrip('/') if config.get("https"): # Publish for HTTPS self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback) try: _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir)) util.create_symlink(repo.working_dir, https_repo_publish_dir) summary["https_publish_dir"] = https_repo_publish_dir self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback) except: self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback) else: self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback) if os.path.lexists(https_repo_publish_dir): _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir) util.remove_symlink(https_publish_dir, https_repo_publish_dir) # # Handle publish link for HTTP # http_publish_dir = self.get_http_publish_dir(config) http_repo_publish_dir = os.path.join(http_publish_dir, relpath).rstrip('/') if config.get("http"): # Publish for HTTP self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback) try: _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir)) util.create_symlink(repo.working_dir, http_repo_publish_dir) summary["http_publish_dir"] = http_repo_publish_dir self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback) except: self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback) else: self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback) if os.path.lexists(http_repo_publish_dir): _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir) util.remove_symlink(http_publish_dir, http_repo_publish_dir) summary["num_package_units_attempted"] = len(pkg_units) summary["num_package_units_published"] = len(pkg_units) - len(pkg_errors) summary["num_package_units_errors"] = len(pkg_errors) summary["num_distribution_units_attempted"] = len(distro_units) summary["num_distribution_units_published"] = len(distro_units) - len(distro_errors) summary["num_distribution_units_errors"] = len(distro_errors) summary["num_package_groups_published"] = len(existing_groups) summary["num_package_categories_published"] = len(existing_cats) summary["relative_path"] = relpath if metadata_status is False and not len(metadata_errors): summary["skip_metadata_update"] = True else: summary["skip_metadata_update"] = False details["errors"] = pkg_errors + distro_errors + metadata_errors details['time_metadata_sec'] = metadata_end_time - metadata_start_time # metadata generate skipped vs run _LOG.info("Publish complete: summary = <%s>, details = <%s>" % (summary, details)) if details["errors"]: return publish_conduit.build_failure_report(summary, details) return publish_conduit.build_success_report(summary, details)
def publish_repo(self, repo, publish_conduit, config): summary = {} details = {} progress_status = { "packages": {"state": "NOT_STARTED"}, "distribution": {"state": "NOT_STARTED"}, "metadata": {"state": "NOT_STARTED"}, "packagegroups": {"state": "NOT_STARTED"}, "publish_http": {"state": "NOT_STARTED"}, "publish_https": {"state": "NOT_STARTED"}, } def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) self.repo_working_dir = repo.working_dir if self.canceled: return publish_conduit.build_failure_report(summary, details) skip_list = config.get('skip') or [] # Determine Content in this repo pkg_units = [] pkg_errors = [] if 'rpm' not in skip_list: for type_id in [TYPE_ID_RPM, TYPE_ID_SRPM]: criteria = UnitAssociationCriteria(type_ids=type_id, unit_fields=['id', 'name', 'version', 'release', 'arch', 'epoch', '_storage_path', "checksum", "checksumtype" ]) pkg_units += publish_conduit.get_units(criteria=criteria) drpm_units = [] if 'drpm' not in skip_list: criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM) drpm_units = publish_conduit.get_units(criteria=criteria) pkg_units += drpm_units # Create symlinks under repo.working_dir pkg_status, pkg_errors = self.handle_symlinks(pkg_units, repo.working_dir, progress_callback) if not pkg_status: _LOG.error("Unable to publish %s items" % (len(pkg_errors))) distro_errors = [] distro_units = [] if 'distribution' not in skip_list: criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DISTRO) distro_units = publish_conduit.get_units(criteria=criteria) # symlink distribution files if any under repo.working_dir distro_status, distro_errors = self.symlink_distribution_unit_files(distro_units, repo.working_dir, publish_conduit, progress_callback) if not distro_status: _LOG.error("Unable to publish distribution tree %s items" % (len(distro_errors))) updateinfo_xml_path = None if 'erratum' not in skip_list: criteria = UnitAssociationCriteria(type_ids=TYPE_ID_ERRATA) errata_units = publish_conduit.get_units(criteria=criteria) updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo.working_dir) if self.canceled: return publish_conduit.build_failure_report(summary, details) groups_xml_path = None existing_cats = [] existing_groups = [] if 'packagegroup' not in skip_list: criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY]) existing_units = publish_conduit.get_units(criteria) existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units) existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units) groups_xml_path = comps_util.write_comps_xml(repo.working_dir, existing_groups, existing_cats) metadata_start_time = time.time() # update/generate metadata for the published repo self.use_createrepo = config.get('use_createrepo') if self.use_createrepo: metadata_status, metadata_errors = metadata.generate_metadata( repo.working_dir, publish_conduit, config, progress_callback, groups_xml_path) else: metadata_status, metadata_errors = metadata.generate_yum_metadata(repo.working_dir, publish_conduit, config, progress_callback, is_cancelled=self.canceled, group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=publish_conduit.get_repo_scratchpad()) metadata_end_time = time.time() relpath = self.get_repo_relative_path(repo, config) if relpath.startswith("/"): relpath = relpath[1:] # # Handle publish link for HTTPS # https_publish_dir = self.get_https_publish_dir(config) https_repo_publish_dir = os.path.join(https_publish_dir, relpath).rstrip('/') if config.get("https"): # Publish for HTTPS self.set_progress("publish_https", {"state" : "IN_PROGRESS"}, progress_callback) try: _LOG.info("HTTPS Publishing repo <%s> to <%s>" % (repo.id, https_repo_publish_dir)) util.create_symlink(repo.working_dir, https_repo_publish_dir) summary["https_publish_dir"] = https_repo_publish_dir self.set_progress("publish_https", {"state" : "FINISHED"}, progress_callback) except: self.set_progress("publish_https", {"state" : "FAILED"}, progress_callback) else: self.set_progress("publish_https", {"state" : "SKIPPED"}, progress_callback) if os.path.lexists(https_repo_publish_dir): _LOG.debug("Removing link for %s since https is not set" % https_repo_publish_dir) util.remove_symlink(https_publish_dir, https_repo_publish_dir) # # Handle publish link for HTTP # http_publish_dir = self.get_http_publish_dir(config) http_repo_publish_dir = os.path.join(http_publish_dir, relpath).rstrip('/') if config.get("http"): # Publish for HTTP self.set_progress("publish_http", {"state" : "IN_PROGRESS"}, progress_callback) try: _LOG.info("HTTP Publishing repo <%s> to <%s>" % (repo.id, http_repo_publish_dir)) util.create_symlink(repo.working_dir, http_repo_publish_dir) summary["http_publish_dir"] = http_repo_publish_dir self.set_progress("publish_http", {"state" : "FINISHED"}, progress_callback) except: self.set_progress("publish_http", {"state" : "FAILED"}, progress_callback) else: self.set_progress("publish_http", {"state" : "SKIPPED"}, progress_callback) if os.path.lexists(http_repo_publish_dir): _LOG.debug("Removing link for %s since http is not set" % http_repo_publish_dir) util.remove_symlink(http_publish_dir, http_repo_publish_dir) summary["num_package_units_attempted"] = len(pkg_units) summary["num_package_units_published"] = len(pkg_units) - len(pkg_errors) summary["num_package_units_errors"] = len(pkg_errors) summary["num_distribution_units_attempted"] = len(distro_units) summary["num_distribution_units_published"] = len(distro_units) - len(distro_errors) summary["num_distribution_units_errors"] = len(distro_errors) summary["num_package_groups_published"] = len(existing_groups) summary["num_package_categories_published"] = len(existing_cats) summary["relative_path"] = relpath if metadata_status is False and not len(metadata_errors): summary["skip_metadata_update"] = True else: summary["skip_metadata_update"] = False details["errors"] = pkg_errors + distro_errors # metadata_errors details['time_metadata_sec'] = metadata_end_time - metadata_start_time # metadata generate skipped vs run _LOG.info("Publish complete: summary = <%s>, details = <%s>" % (summary, details)) if details["errors"]: return publish_conduit.build_failure_report(summary, details) return publish_conduit.build_success_report(summary, details)