def test_distribution_exports(self): feed_url = "file://%s/pulp_unittest/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "pulp_unittest" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) dunit_key = {} dunit_key['id'] = "ks-TestFamily-TestVariant-16-x86_64" dunit_key['version'] = "16" dunit_key['arch'] = "x86_64" dunit_key['family'] = "TestFamily" dunit_key['variant'] = "TestVariant" metadata = { "files" : [{"checksumtype" : "sha256", "relativepath" : "images/fileA.txt", "fileName" : "fileA.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileA.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.repo_working_dir, "checksum" : "22603a94360ee24b7034c74fa13d70dd122aa8c4be2010fc1361e1e6b0b410ab", "filename" : "fileA.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileB.txt", "fileName" : "fileB.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileB.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.repo_working_dir, "checksum" : "8dc89e9883c098443f6616e60a8e489254bf239eeade6e4b4943b7c8c0c345a4", "filename" : "fileB.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileC.iso", "fileName" : "fileC.iso", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileC.iso", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.repo_working_dir, "checksum" : "099f2bafd533e97dcfee778bc24138c40f114323785ac1987a0db66e07086f74", "filename" : "fileC.iso", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 } ],} distro_unit = Unit(distribution.TYPE_ID_DISTRO, dunit_key, metadata, '') distro_unit.storage_path = "%s/ks-TestFamily-TestVariant-16-x86_64" % self.pkg_dir symlink_dir = "%s/%s" % (self.repo_working_dir, "isos") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=[distro_unit], pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) repo_exporter = RepoExporter(symlink_dir) # status, errors = iso_distributor._export_distributions([distro_unit], symlink_dir) status, errors = repo_exporter.export_distributions([distro_unit]) print status, errors self.assertTrue(status) for file in metadata['files']: print os.path.isfile("%s/%s" % (symlink_dir, file['relativepath'])) self.assertTrue(os.path.isfile("%s/%s" % (symlink_dir, file['relativepath'])))
def test_export_rpm(self): feed_url = "file://%s/test_repo_for_export/" % (self.data_dir) repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_repo_for_export" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) self.assertTrue(summary is not None) self.assertTrue(details is not None) self.assertTrue(status) unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256', 'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'} unit_a = Unit(TYPE_ID_RPM, unit_key_a, {}, '') unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', } unit_b = Unit(TYPE_ID_RPM, unit_key_b, {}, '') unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', } unit_c = Unit(TYPE_ID_RPM, unit_key_c, {}, '') unit_c.storage_path = "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir existing_units = [] for unit in [unit_a, unit_b, unit_c]: existing_units.append(unit) symlink_dir = "%s/%s" % (self.repo_working_dir, "isos") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) print symlink_dir # status, errors = iso_distributor._export_rpms(existing_units, symlink_dir) repo_exporter = RepoExporter(symlink_dir) status, errors = repo_exporter.export_rpms(existing_units) print status, errors self.assertTrue(status) self.assertEquals(len(os.listdir(symlink_dir)), 3)
def test_errata_export(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {'updated' : '2010-03-30 08:07:30'} unit_key_a = {'id' : '','name' :'patb', 'version' :'0.1', 'release' : '2', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'md5', 'checksum': 'f3c197a29d9b66c5b65c5d62b25db5b4'} unit_key_b = {'id' : '', 'name' :'emoticons', 'version' :'0.1', 'release' :'2', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'md5', 'checksum' : '366bb5e73a5905eacb82c96e0578f92b'} existing_units = [] for unit in [unit_key_a, unit_key_b]: existing_units.append(Unit(TYPE_ID_RPM, unit, metadata, '')) sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key['id'] = "RHEA-2010:9999" mdata = { 'description' : "test", 'from_str': '*****@*****.**', 'issued': '2010-03-30 08:07:30', 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)', 'packages': [{'arch': 'x86_64', 'epoch': '0', 'filename': 'patb-0.1-2.x86_64.rpm', 'name': 'patb', 'release': '2', 'src': '', 'sum': ('md5', 'f3c197a29d9b66c5b65c5d62b25db5b4'), 'version': '0.1'}, {'arch': 'x86_64', 'epoch': '0', 'filename': 'emoticons-0.1-2.x86_64.rpm', 'name': 'emoticons', 'release': '2', 'src': '', 'sum': ('md5', '366bb5e73a5905eacb82c96e0578f92b'), 'version': '0.1'}], 'short': 'rhel-i386-server-vt-5'}], 'pushcount': 1, 'reboot_suggested': False, 'references': [], 'release': '', 'rights': '', 'status': 'final', 'summary': '', 'title': 'emoticons enhancement fix and enhancement update', 'updated': '2010-03-30 08:07:30', 'version': '1', 'type' : 'enhancement', 'severity' : 'Low', 'solution' : ''} errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, '')] symlink_dir = "%s/%s" % (self.repo_working_dir, repo.id) iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) print symlink_dir repo_exporter = RepoExporter(symlink_dir) # rpm_units = iso_distributor._get_errata_rpms(errata_unit, existing_units) rpm_units = repo_exporter.get_errata_rpms(errata_unit, existing_units) print "RPMS in ERRATA",rpm_units # iso_distributor._export_rpms(rpm_units, self.repo_working_dir) repo_exporter.export_rpms(rpm_units) status, errors = repo_exporter.export_errata(errata_unit) # status, errors = iso_distributor._export_errata(errata_unit, symlink_dir) self.assertTrue(os.path.exists("%s/%s" % (symlink_dir, "updateinfo.xml"))) self.assertTrue(status) ftypes = util.get_repomd_filetypes("%s/%s" % (symlink_dir, "repodata/repomd.xml")) print ftypes self.assertTrue("updateinfo" in ftypes)
def publish_repo(self, repo, publish_conduit, config): publish_start_time = time.time() _LOG.info("Start publish time %s" % publish_start_time) progress_status = { "rpms": {"state": "NOT_STARTED"}, "errata": {"state": "NOT_STARTED"}, "distribution": {"state": "NOT_STARTED"}, "metadata": {"state": "NOT_STARTED"}, "packagegroups": {"state": "NOT_STARTED"}, "isos": {"state": "NOT_STARTED"}, "publish_http": {"state": "NOT_STARTED"}, "publish_https": {"state": "NOT_STARTED"}, } def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) self.repo_working_dir = repo_working_dir = repo.working_dir if self.cancelled: return publish_conduit.build_failure_report(self.summary, self.details) skip_types = config.get("skip") or [] repo_exporter = RepoExporter(repo_working_dir, skip=skip_types) date_filter = repo_exporter.create_date_range_filter(config) groups_xml_path = None updateinfo_xml_path = None if date_filter: # export errata by date and associated rpm units progress_status["errata"]["state"] = "STARTED" criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA], unit_filters=date_filter) errata_units = publish_conduit.get_units(criteria=criteria) rpm_units = self.__get_rpm_units(publish_conduit) drpm_criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM) rpm_units += publish_conduit.get_units(criteria=drpm_criteria) rpm_units = repo_exporter.get_errata_rpms(errata_units, rpm_units) rpm_summary, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback) if self.cancelled: return publish_conduit.build_failure_report(self.summary, self.details) updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir) progress_status["errata"]["num_success"] = len(errata_units) progress_status["errata"]["state"] = "FINISHED" self.summary = dict(self.summary.items() + rpm_summary.items()) self.summary["num_errata_units_exported"] = len(errata_units) self.details["errors"] = rpm_errors else: # export everything # export rpms rpm_units = self.__get_rpm_units(publish_conduit) drpm_criteria = UnitAssociationCriteria(type_ids=TYPE_ID_DRPM) rpm_units += publish_conduit.get_units(criteria=drpm_criteria) rpm_summary, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback) # export package groups groups_xml_path = None if "packagegroup" not in skip_types: progress_status["packagegroups"]["state"] = "STARTED" criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY]) existing_units = publish_conduit.get_units(criteria=criteria) existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units) existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units) groups_xml_path = comps_util.write_comps_xml(repo_working_dir, existing_groups, existing_cats) self.summary["num_package_groups_exported"] = len(existing_groups) self.summary["num_package_categories_exported"] = len(existing_cats) progress_status["packagegroups"]["state"] = "FINISHED" else: progress_status["packagegroups"]["state"] = "SKIPPED" _LOG.info("packagegroup unit type in skip list [%s]; skipping export" % skip_types) if self.cancelled: return publish_conduit.build_failure_report(self.summary, self.details) # export errata updateinfo_xml_path = None if 'erratum' not in skip_types: progress_status["errata"]["state"] = "STARTED" criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA]) errata_units = publish_conduit.get_units(criteria=criteria) progress_status["errata"]["state"] = "IN_PROGRESS" updateinfo_xml_path = updateinfo.updateinfo(errata_units, repo_working_dir) progress_status["errata"]["num_success"] = len(errata_units) progress_status["errata"]["state"] = "FINISHED" self.summary["num_errata_units_exported"] = len(errata_units) else: #errata_summary, errata_errors = repo_exporter.export_errata(errata_units, progress_callback=progress_callback) progress_status["errata"]["state"] = "SKIPPED" _LOG.info("errata unit type in skip list [%s]; skipping export" % skip_types) # distro units criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_DISTRO]) distro_units = publish_conduit.get_units(criteria=criteria) distro_summary, distro_errors = repo_exporter.export_distributions(distro_units, progress_callback=progress_callback) # sum up summary and details self.details["errors"] = rpm_errors + distro_errors self.summary = dict(self.summary.items() + rpm_summary.items() + distro_summary.items()) # generate metadata metadata_status, metadata_errors = metadata.generate_yum_metadata( repo_working_dir, publish_conduit, config, progress_callback, is_cancelled=self.cancelled, group_xml_path=groups_xml_path, updateinfo_xml_path=updateinfo_xml_path, repo_scratchpad=publish_conduit.get_repo_scratchpad()) _LOG.info("metadata generation complete at target location %s" % repo_working_dir) self.details["errors"] += metadata_errors # build iso and publish via HTTPS self._publish_isos(repo, config, progress_callback=progress_callback) _LOG.info("Publish complete: summary = <%s>, details = <%s>" % (self.summary, self.details)) # remove exported content from working dirctory iso_util.cleanup_working_dir(self.repo_working_dir) if self.details["errors"]: return publish_conduit.build_failure_report(self.summary, self.details) return publish_conduit.build_success_report(self.summary, self.details)
def test_errata_export(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = "sha" sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {} unit_key_a = { "id": "", "name": "patb", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "md5", "checksum": "f3c197a29d9b66c5b65c5d62b25db5b4", } unit_key_b = { "id": "", "name": "emoticons", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "md5", "checksum": "366bb5e73a5905eacb82c96e0578f92b", } existing_units = [] for unit in [unit_key_a, unit_key_b]: existing_units.append(Unit(TYPE_ID_RPM, unit, metadata, "")) sync_conduit = importer_mocks.get_sync_conduit( type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir ) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key["id"] = "RHEA-2010:9999" mdata = { "description": "test", "from_str": "*****@*****.**", "issued": "2010-03-30 08:07:30", "pkglist": [ { "name": "RHEL Virtualization (v. 5 for 32-bit x86)", "packages": [ { "arch": "x86_64", "epoch": "0", "filename": "patb-0.1-2.x86_64.rpm", "name": "patb", "release": "2", "src": "", "sum": ("md5", "f3c197a29d9b66c5b65c5d62b25db5b4"), "version": "0.1", }, { "arch": "x86_64", "epoch": "0", "filename": "emoticons-0.1-2.x86_64.rpm", "name": "emoticons", "release": "2", "src": "", "sum": ("md5", "366bb5e73a5905eacb82c96e0578f92b"), "version": "0.1", }, ], "short": "rhel-i386-server-vt-5", } ], "pushcount": 1, "reboot_suggested": False, "references": [], "release": "", "rights": "", "status": "final", "summary": "", "title": "emoticons enhancement fix and enhancement update", "updated": "2010-03-30 08:07:30", "version": "1", "type": "enhancement", "severity": "Low", "solution": "", } errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, "")] symlink_dir = "%s/%s" % (self.repo_working_dir, repo.id) iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) print symlink_dir repo_exporter = RepoExporter(symlink_dir) # rpm_units = iso_distributor._get_errata_rpms(errata_unit, existing_units) rpm_units = repo_exporter.get_errata_rpms(errata_unit, existing_units) print "RPMS in ERRATA", rpm_units # iso_distributor._export_rpms(rpm_units, self.repo_working_dir) repo_exporter.export_rpms(rpm_units) status, errors = repo_exporter.export_errata(errata_unit) # status, errors = iso_distributor._export_errata(errata_unit, symlink_dir) self.assertTrue(os.path.exists("%s/%s" % (symlink_dir, "updateinfo.xml"))) self.assertTrue(status) ftypes = util.get_repomd_filetypes("%s/%s" % (symlink_dir, "repodata/repomd.xml")) print ftypes self.assertTrue("updateinfo" in ftypes)
def publish_group(self, repo_group, publish_conduit, config): """ see parent class for doc string """ self.group_working_dir = group_working_dir = repo_group.working_dir skip_types = config.get("skip") or [] self.init_group_progress() self.group_progress_status["group-id"] = repo_group.id # progress callback for group status def group_progress_callback(type_id, status): self.group_progress_status[type_id] = status publish_conduit.set_progress(self.group_progress_status) # loop through each repo in the group and perform exports for repoid in repo_group.repo_ids: _LOG.info("Exporting repo %s " % repoid) summary = {} details = {} progress_status = { "rpms": {"state": "NOT_STARTED"}, "errata": {"state": "NOT_STARTED"}, "distribution": {"state": "NOT_STARTED"}, "packagegroups": {"state": "NOT_STARTED"},} def progress_callback(type_id, status): progress_status[type_id] = status publish_conduit.set_progress(progress_status) repo_working_dir = "%s/%s" % (group_working_dir, repoid) repo_exporter = RepoExporter(repo_working_dir, skip=skip_types) # check if any datefilter is set on the distributor date_filter = repo_exporter.create_date_range_filter(config) _LOG.info("repo working dir %s" % repo_working_dir) if date_filter: # If a date range is specified, we only export the errata within that range # and associated rpm units. This might change once we have dates associated # to other units. criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA], unit_filters=date_filter) errata_units = publish_conduit.get_units(repoid, criteria=criteria) # we only include binary and source; drpms are not associated to errata criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM]) rpm_units = publish_conduit.get_units(repoid, criteria=criteria) rpm_units = repo_exporter.get_errata_rpms(errata_units, rpm_units) rpm_status, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback) if self.canceled: return publish_conduit.build_failure_report(summary, details) # generate metadata metadata_status, metadata_errors = metadata.generate_metadata( repo_working_dir, publish_conduit, config, progress_callback) _LOG.info("metadata generation complete at target location %s" % repo_working_dir) # export errata and generate updateinfo xml errata_status, errata_errors = repo_exporter.export_errata(errata_units, progress_callback=progress_callback) summary["num_package_units_attempted"] = len(rpm_units) summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors) summary["num_package_units_errors"] = len(rpm_errors) details["errors"] = rpm_errors + errata_errors + metadata_errors else: # export rpm units(this includes binary, source and delta) criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM, TYPE_ID_DRPM]) rpm_units = publish_conduit.get_units(repoid, criteria) rpm_status, rpm_errors = repo_exporter.export_rpms(rpm_units, progress_callback=progress_callback) summary["num_package_units_attempted"] = len(rpm_units) summary["num_package_units_exported"] = len(rpm_units) - len(rpm_errors) summary["num_package_units_errors"] = len(rpm_errors) # export package groups information and generate comps.xml groups_xml_path = None if "packagegroup" not in skip_types: progress_status["packagegroups"]["state"] = "STARTED" criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_PKG_GROUP, TYPE_ID_PKG_CATEGORY]) existing_units = publish_conduit.get_units(repoid, criteria) existing_groups = filter(lambda u : u.type_id in [TYPE_ID_PKG_GROUP], existing_units) existing_cats = filter(lambda u : u.type_id in [TYPE_ID_PKG_CATEGORY], existing_units) groups_xml_path = comps_util.write_comps_xml(repo_working_dir, existing_groups, existing_cats) summary["num_package_groups_exported"] = len(existing_groups) summary["num_package_categories_exported"] = len(existing_cats) progress_status["packagegroups"]["state"] = "FINISHED" else: progress_status["packagegroups"]["state"] = "SKIPPED" _LOG.info("packagegroup unit type in skip list [%s]; skipping export" % skip_types) if self.canceled: return publish_conduit.build_failure_report(summary, details) # generate metadata metadata_status, metadata_errors = metadata.generate_metadata( repo_working_dir, publish_conduit, config, progress_callback, groups_xml_path) _LOG.info("metadata generation complete at target location %s" % repo_working_dir) # export errata units and associated rpms criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_ERRATA]) errata_units = publish_conduit.get_units(repoid, criteria) errata_status, errata_errors = repo_exporter.export_errata(errata_units, progress_callback=progress_callback) summary["num_errata_units_exported"] = len(errata_units) # export distributions criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_DISTRO]) distro_units = publish_conduit.get_units(repoid, criteria) distro_status, distro_errors = repo_exporter.export_distributions(distro_units, progress_callback=progress_callback) summary["num_distribution_units_attempted"] = len(distro_units) summary["num_distribution_units_exported"] = len(distro_units) - len(distro_errors) summary["num_distribution_units_errors"] = len(distro_errors) self.group_progress_status["repositories"][repoid] = progress_status self.set_progress("repositories", self.group_progress_status["repositories"], group_progress_callback) details["errors"] = rpm_errors + distro_errors + errata_errors + metadata_errors self.group_summary[repoid] = summary self.group_details[repoid] = details # generate and publish isos self._publish_isos(repo_group, config, progress_callback=group_progress_callback) _LOG.info("Publish complete: summary = <%s>, details = <%s>" % (self.group_summary, self.group_details)) # remove exported content from working dirctory iso_util.cleanup_working_dir(self.group_working_dir) # check for any errors if not len([self.group_details[repoid]["errors"] for repoid in self.group_details.keys()]): return publish_conduit.build_failure_report(self.group_summary, self.group_details) return publish_conduit.build_success_report(self.group_summary, self.group_details)