def test_yum_plugin_generate_yum_metadata_checksum_default(self, mock_YumMetadataGenerator, mock_distributor_manager): repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_publish" num_units = 10 relative_url = "rel_a/rel_b/rel_c/" existing_units = self.get_units(count=num_units) publish_conduit = distributor_mocks.get_publish_conduit(type_id="rpm", existing_units=existing_units, checksum_type=None, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, relative_url=relative_url, http=False, https=True) distributor = YumDistributor() distributor.process_repo_auth_certificate_bundle = mock.Mock() config_conduit = mock.Mock(spec=RepoConfigConduit) config_conduit.get_repo_distributors_by_relative_url.return_value = MockCursor([]) metadata.generate_yum_metadata(repo.id, repo.working_dir, publish_conduit, config) mock_YumMetadataGenerator.assert_called_with(ANY, checksum_type=metadata.DEFAULT_CHECKSUM, skip_metadata_types=ANY, is_cancelled=ANY, group_xml_path=ANY, updateinfo_xml_path=ANY, custom_metadata_dict=ANY) self.assertFalse(mock_distributor_manager.called)
def test_yum_generate_metadata(self): global metadata_progress_status metadata_progress_status = {} def set_progress(progress): global metadata_progress_status metadata_progress_status = progress def progress_callback(type_id, status): metadata_progress_status[type_id] = status mock_publish_conduit.set_progress(metadata_progress_status) mock_repo = mock.Mock(spec=Repository) mock_repo.id = "test_repo" repo_scratchpad = {"checksum_type" : "sha", "repodata" : {}} mock_repo.working_dir = os.path.join(self.temp_dir, "test_yum_repo_metadata") # Confirm required and optional are successful units_to_write= mock.Mock() units_to_write.metadata = {} units_to_write.metadata["repodata"] = {} units_to_write.metadata["repodata"]["primary"] = """<package type="rpm"><name>feedless</name><arch>noarch</arch><version epoch="0" ver="1.0" rel="1"/><checksum type="sha" pkgid="YES">c1181097439ae4c69793c91febd8513475fb7ed6</checksum><summary>dummy testing pkg</summary><description>A dumb 1Mb pkg.</description><packager/><url/><time file="1299184404" build="1299168170"/><size package="1050973" installed="2097152" archive="1048976"/><location href="feedless-1.0-1.noarch.rpm"/><format><rpm:license>GPLv2</rpm:license><rpm:vendor/><rpm:group>Application</rpm:group><rpm:buildhost>pulp-qe-rhel5.usersys.redhat.com</rpm:buildhost><rpm:sourcerpm>feedless-1.0-1.src.rpm</rpm:sourcerpm><rpm:header-range start="456" end="1846"/><rpm:provides><rpm:entry name="feedless" flags="EQ" epoch="0" ver="1.0" rel="1"/></rpm:provides><rpm:requires><rpm:entry name="rpmlib(CompressedFileNames)" flags="LE" epoch="0" ver="3.0.4" rel="1" pre="1"/><rpm:entry name="rpmlib(PayloadFilesHavePrefix)" flags="LE" epoch="0" ver="4.0" rel="1" pre="1"/></rpm:requires></format></package>""" units_to_write.metadata["repodata"]["filelists"] = """<package pkgid="c1181097439ae4c69793c91febd8513475fb7ed6" name="feedless" arch="noarch"><version epoch="0" ver="1.0" rel="1"/><file>/tmp/rpm_test/feedless/key</file><file type="dir">/tmp/rpm_test/feedless</file></package>""" units_to_write.metadata["repodata"]["other"] = """<package pkgid="c1181097439ae4c69793c91febd8513475fb7ed6" name="feedless" arch="noarch"><version epoch="0" ver="1.0" rel="1"/></package>""" optional_kwargs = {"use_createrepo" : False} config = distributor_mocks.get_basic_config(**optional_kwargs) mock_publish_conduit = distributor_mocks.get_publish_conduit() mock_publish_conduit.set_progress = mock.Mock() mock_publish_conduit.set_progress.side_effect = set_progress status, errors = metadata.generate_yum_metadata(mock_repo.working_dir, mock_publish_conduit, config, progress_callback=progress_callback, repo_scratchpad=repo_scratchpad) self.assertEquals(status, True) self.assertEquals(metadata_progress_status['metadata']['state'], "FINISHED")
def test_publish(self): repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_publish" num_units = 10 relative_url = "rel_a/rel_b/rel_c/" existing_units = self.get_units(count=num_units) publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, relative_url=relative_url, http=False, https=True) distributor = YumDistributor() distributor.process_repo_auth_certificate_bundle = mock.Mock() status, msg = distributor.validate_config(repo, config, None) self.assertTrue(status) report = distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(report.success_flag) summary = report.summary self.assertEqual(summary["num_package_units_attempted"], num_units) self.assertEqual(summary["num_package_units_published"], num_units) self.assertEqual(summary["num_package_units_errors"], 0) # Verify we did not attempt to publish to http expected_repo_http_publish_dir = os.path.join(self.http_publish_dir, relative_url) self.assertFalse(os.path.exists(expected_repo_http_publish_dir)) expected_repo_https_publish_dir = os.path.join(self.https_publish_dir, relative_url).rstrip('/') self.assertEqual(summary["https_publish_dir"], expected_repo_https_publish_dir) self.assertTrue(os.path.exists(expected_repo_https_publish_dir)) details = report.details self.assertEqual(len(details["errors"]), 0) # # Add a verification of the publish directory # self.assertTrue(os.path.exists(summary["https_publish_dir"])) self.assertTrue(os.path.islink(summary["https_publish_dir"].rstrip("/"))) source_of_link = os.readlink(expected_repo_https_publish_dir.rstrip("/")) self.assertEquals(source_of_link, repo.working_dir) # # Verify the expected units # for u in existing_units: expected_link = os.path.join(expected_repo_https_publish_dir, u.metadata["relativepath"]) self.assertTrue(os.path.exists(expected_link)) actual_target = os.readlink(expected_link) expected_target = u.storage_path self.assertEqual(actual_target, expected_target) # # Now test flipping so https is disabled and http is enabled # config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, relative_url=relative_url, http=True, https=False) report = distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(report.success_flag) # Verify we did publish to http self.assertTrue(os.path.exists(expected_repo_http_publish_dir)) # Verify we did not publish to https self.assertFalse(os.path.exists(expected_repo_https_publish_dir)) # Verify we cleaned up the misc dirs under the https dir self.assertEquals(len(os.listdir(self.https_publish_dir)), 0)
def test_distributor_removed(self, delete_protected_repo, mock_factory): """ Make sure the distributor_removed() method cleans up the published files. """ # Create and publish repo to both http and https directories repo = mock.Mock(spec=Repository) repo.id = 'about_to_be_removed' repo.working_dir = self.repo_working_dir existing_units = self.get_units(count=5) publish_conduit = distributor_mocks.get_publish_conduit(type_id="rpm", existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=True) distributor = YumDistributor() publish_conduit.repo_id = repo.id publish_conduit.distributor_id = 'foo' report = distributor.publish_repo(repo, publish_conduit, config) publishing_paths = [os.path.join(directory, repo.id) \ for directory in [self.http_publish_dir, self.https_publish_dir]] # The publishing paths should exist self.assertTrue(all([os.path.exists(directory) for directory in publishing_paths])) delete_protected_repo.reset_mock() distributor.distributor_removed(repo, config) # Neither publishing path should exist now self.assertFalse(all([os.path.exists(directory) for directory in publishing_paths])) # delete_protected_repo should have been called delete_protected_repo.assert_called_once_with(repo.id)
def test_yum_plugin_generate_yum_metadata_checksum_from_conduit_sha1_conversion(self, mock_YumMetadataGenerator, mock_distributor_manager): repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_publish" num_units = 10 relative_url = "rel_a/rel_b/rel_c/" existing_units = self.get_units(count=num_units) publish_conduit = distributor_mocks.get_publish_conduit(type_id="rpm", existing_units=existing_units, pkg_dir=self.pkg_dir) publish_conduit.repo_id = 'foo' publish_conduit.distributor_id = TYPE_ID_DISTRIBUTOR_YUM config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, relative_url=relative_url, http=False, https=True) distributor = YumDistributor() distributor.process_repo_auth_certificate_bundle = mock.Mock() config_conduit = mock.Mock(spec=RepoConfigConduit) config_conduit.get_repo_distributors_by_relative_url.return_value = MockCursor([]) metadata.generate_yum_metadata(repo.id, repo.working_dir, publish_conduit, config, repo_scratchpad={'checksum_type': 'sha'}) mock_YumMetadataGenerator.assert_called_with(ANY, checksum_type='sha1', skip_metadata_types=ANY, is_cancelled=ANY, group_xml_path=ANY, updateinfo_xml_path=ANY, custom_metadata_dict=ANY) mock_distributor_manager.return_value.update_distributor_config.\ assert_called_with(ANY, ANY, {'checksum_type': 'sha1'})
def test_generate_metadata(self): mock_repo = mock.Mock(spec=Repository) mock_repo.id = "test_repo" mock_repo.scratchpad = {"checksum_type": "sha"} mock_repo.working_dir = os.path.join(self.data_dir, "test_repo_metadata") # Confirm required and optional are successful optional_kwargs = {"generate_metadata": 1} config = distributor_mocks.get_basic_config(**optional_kwargs) mock_publish_conduit = distributor_mocks.get_publish_conduit() status, errors = metadata.generate_metadata(mock_repo, mock_publish_conduit, config) self.assertEquals(status, True) optional_kwargs = {"generate_metadata": 0} config = distributor_mocks.get_basic_config(**optional_kwargs) mock_publish_conduit = distributor_mocks.get_publish_conduit() status, errors = metadata.generate_metadata(mock_repo, mock_publish_conduit, config) self.assertEquals(status, False)
def test_cancel_generate_repodata(self): global metadata_progress_status metadata_progress_status = {} def set_progress(progress): global metadata_progress_status metadata_progress_status = progress def progress_callback(type_id, status): metadata_progress_status[type_id] = status mock_publish_conduit.set_progress(metadata_progress_status) mock_repo = mock.Mock(spec=Repository) mock_repo.id = "test_repo" mock_repo.scratchpad = {"checksum_type" : "sha"} mock_repo.working_dir = os.path.join(self.temp_dir, "test_yum_repo_metadata") # Confirm required and optional are successful units_to_write= mock.Mock() units_to_write.metadata = {} units_to_write.metadata["repodata"] = {} repo_scratchpad = {"checksum_type" : "sha", "repodata" : {}} optional_kwargs = {} config = distributor_mocks.get_basic_config(**optional_kwargs) mock_publish_conduit = distributor_mocks.get_publish_conduit() mock_publish_conduit.set_progress = mock.Mock() mock_publish_conduit.set_progress.side_effect = set_progress status, errors = metadata.generate_yum_metadata(mock_repo.working_dir, mock_publish_conduit, config, progress_callback=progress_callback, is_cancelled=True, repo_scratchpad=repo_scratchpad) self.assertEquals(status, False) self.assertEquals(metadata_progress_status['metadata']['state'], "CANCELED")
def test_skip_types_export(self): repo = mock.Mock(spec=Repository) repo.id = "testrepo" repo.working_dir = self.repo_working_dir distributor = ISODistributor() config = distributor_mocks.get_basic_config(http=True, https=False, skip=["rpm", "errata", "packagegroup"]) publish_conduit = distributor_mocks.get_publish_conduit(existing_units=[], pkg_dir=self.pkg_dir) report = distributor.publish_repo(repo, publish_conduit, config) print report.summary summary_keys = { "rpm": ["num_package_units_attempted", "num_package_units_exported", "num_package_units_errors"], "distribution": [ "num_distribution_units_attempted", "num_distribution_units_exported", "num_distribution_units_errors", ], "packagegroup": ["num_package_groups_exported", "num_package_categories_exported"], "erratum": ["num_errata_units_exported"], } # check rpm,packagegroup,erratum info is skipped for key in summary_keys["rpm"] + summary_keys["packagegroup"] + summary_keys["erratum"]: self.assertTrue(key not in report.summary) # check distro info is present and not skipped for key in summary_keys["distribution"]: self.assertTrue(key in report.summary)
def test_publish_repo(self): repo = MagicMock(spec=Repository) repo.id = 'lebowski' repo.working_dir = self.temp_dir publish_conduit = distributor_mocks.get_publish_conduit(existing_units=self.existing_units) config = distributor_mocks.get_basic_config(**{constants.CONFIG_SERVE_HTTP: True, constants.CONFIG_SERVE_HTTPS: True}) # We haven't implemented reporting yet, so we don't yet assert anything about the report # here. report = self.iso_distributor.publish_repo(repo, publish_conduit, config) # Let's verify that the publish directory looks right publishing_paths = [os.path.join(directory, 'lebowski') \ for directory in [constants.ISO_HTTP_DIR, constants.ISO_HTTPS_DIR]] for publishing_path in publishing_paths: for unit in self.existing_units: expected_symlink_path = os.path.join(publishing_path, unit.unit_key['name']) self.assertTrue(os.path.islink(expected_symlink_path)) expected_symlink_destination = os.path.join('/', 'path', unit.unit_key['name']) self.assertEqual(os.path.realpath(expected_symlink_path), expected_symlink_destination) # Now let's have a look at the PULP_MANIFEST file to make sure it was generated and # published correctly. manifest_filename = os.path.join(publishing_path, constants.ISO_MANIFEST_FILENAME) manifest_rows = [] with open(manifest_filename) as manifest_file: manifest = csv.reader(manifest_file) for row in manifest: manifest_rows.append(row) expected_manifest_rows = [['test.iso', 'sum1', '1'], ['test2.iso', 'sum2', '2'], ['test3.iso', 'sum3', '3']] self.assertEqual(manifest_rows, expected_manifest_rows)
def test_repo_export_isos(self): feed_url = "file://%s/pulp_unittest/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "pulp_unittest" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256', 'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'} unit_a = Unit(TYPE_ID_RPM, unit_key_a, {}, '') unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', } unit_b = Unit(TYPE_ID_RPM, unit_key_b, {}, '') unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', } unit_c = Unit(TYPE_ID_RPM, unit_key_c, {}, '') unit_c.storage_path = "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir existing_units = [] for unit in [unit_a, unit_b, unit_c]: existing_units.append(unit) sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir) importerErrata = errata.ImporterErrata() importerErrata.sync(repo, sync_conduit, config) repo.working_dir = "%s/%s" % (self.repo_working_dir, "export") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) # test https publish config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=False, https=True, generate_metadata=True) report = iso_distributor.publish_repo(repo, publish_conduit, config) print report self.assertTrue(os.path.exists("%s/%s" % (self.https_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.http_publish_dir)), 0) # test http publish config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False) report = iso_distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 1) # make sure the iso name defaults to repoid self.assertTrue( isos_list[-1].startswith(repo.id)) # test isoprefix: iso_prefix = "mock-iso-prefix" config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False, iso_prefix=iso_prefix) report = iso_distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 2) print isos_list # make sure the iso name uses the prefix self.assertTrue( isos_list[-1].startswith(iso_prefix))
def __init__(self, working_dir, pkg_dir, config): threading.Thread.__init__(self) self.repo = mock.Mock(spec=Repository) self.repo.working_dir = working_dir self.repo.id = "test_cancel_publish" self.publish_conduit = distributor_mocks.get_publish_conduit(pkg_dir=pkg_dir) self.publish_conduit.set_progress = mock.Mock() self.publish_conduit.set_progress.side_effect = set_progress self.config = config self.distributor = YumDistributor()
def test_publish_progress(self): global progress_status progress_status = None group_progress_status = None def set_progress(progress): global progress_status progress_status = progress PROGRESS_FIELDS = ["num_success", "num_error", "items_left", "items_total", "error_details"] publish_conduit = distributor_mocks.get_publish_conduit(pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False) distributor = GroupISODistributor() repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_progress_sync" repo_group = mock.Mock(spec=RepositoryGroup) repo_group.id = "test_group" repo_group.repo_ids = [repo.id,] repo_group.working_dir = self.group_working_dir publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress distributor.publish_group(repo_group, publish_conduit, config) print distributor.group_progress_status self.assertTrue(progress_status is not None) self.assertEqual(progress_status['group-id'], repo_group.id) self.assertTrue("rpms" in progress_status['repositories'][repo.id]) self.assertTrue(progress_status['repositories'][repo.id]["rpms"].has_key("state")) self.assertEqual(progress_status['repositories'][repo.id]["rpms"]["state"], "FINISHED") for field in PROGRESS_FIELDS: self.assertTrue(field in progress_status['repositories'][repo.id]["rpms"]) self.assertTrue("distribution" in progress_status['repositories'][repo.id]) self.assertTrue(progress_status['repositories'][repo.id]["distribution"].has_key("state")) self.assertEqual(progress_status['repositories'][repo.id]["distribution"]["state"], "FINISHED") for field in PROGRESS_FIELDS: self.assertTrue(field in progress_status['repositories'][repo.id]["distribution"]) self.assertTrue("errata" in progress_status['repositories'][repo.id]) self.assertTrue(progress_status['repositories'][repo.id]["errata"].has_key("state")) self.assertEqual(progress_status['repositories'][repo.id]["errata"]["state"], "FINISHED") self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") ISO_PROGRESS_FIELDS = ["num_success", "num_error", "items_left", "items_total", "error_details", "written_files", "current_file", "size_total", "size_left"] for field in ISO_PROGRESS_FIELDS: self.assertTrue( field in progress_status["isos"]) self.assertTrue("publish_http" in progress_status) self.assertEqual(progress_status["publish_http"]["state"], "FINISHED") self.assertTrue("publish_https" in progress_status) self.assertEqual(progress_status["publish_https"]["state"], "SKIPPED")
def test_distribution_exports(self): feed_url = "file://%s/pulp_unittest/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "pulp_unittest" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) dunit_key = {} dunit_key['id'] = "ks-TestFamily-TestVariant-16-x86_64" dunit_key['version'] = "16" dunit_key['arch'] = "x86_64" dunit_key['family'] = "TestFamily" dunit_key['variant'] = "TestVariant" metadata = { "files" : [{"checksumtype" : "sha256", "relativepath" : "images/fileA.txt", "fileName" : "fileA.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileA.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.repo_working_dir, "checksum" : "22603a94360ee24b7034c74fa13d70dd122aa8c4be2010fc1361e1e6b0b410ab", "filename" : "fileA.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileB.txt", "fileName" : "fileB.txt", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileB.txt", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.repo_working_dir, "checksum" : "8dc89e9883c098443f6616e60a8e489254bf239eeade6e4b4943b7c8c0c345a4", "filename" : "fileB.txt", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 }, { "checksumtype" : "sha256", "relativepath" : "images/fileC.iso", "fileName" : "fileC.iso", "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileC.iso", "item_type" : "tree_file", "savepath" : "%s/testr1/images" % self.repo_working_dir, "checksum" : "099f2bafd533e97dcfee778bc24138c40f114323785ac1987a0db66e07086f74", "filename" : "fileC.iso", "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, "size" : 0 } ],} distro_unit = Unit(distribution.TYPE_ID_DISTRO, dunit_key, metadata, '') distro_unit.storage_path = "%s/ks-TestFamily-TestVariant-16-x86_64" % self.pkg_dir symlink_dir = "%s/%s" % (self.repo_working_dir, "isos") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=[distro_unit], pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) repo_exporter = RepoExporter(symlink_dir) # status, errors = iso_distributor._export_distributions([distro_unit], symlink_dir) status, errors = repo_exporter.export_distributions([distro_unit]) print status, errors self.assertTrue(status) for file in metadata['files']: print os.path.isfile("%s/%s" % (symlink_dir, file['relativepath'])) self.assertTrue(os.path.isfile("%s/%s" % (symlink_dir, file['relativepath'])))
def test_get_checksum_type(self): mock_repo = mock.Mock(spec=Repository) mock_repo.id = "test_repo" mock_repo.working_dir = os.path.join(self.data_dir, "pulp_unittest") optional_kwargs = {"checksum_type" : "sha512"} config = distributor_mocks.get_basic_config(**optional_kwargs) mock_publish_conduit = distributor_mocks.get_publish_conduit() _checksum_type_value = metadata.get_repo_checksum_type(mock_publish_conduit, config) print _checksum_type_value self.assertEquals(_checksum_type_value, optional_kwargs['checksum_type']) optional_kwargs = {} config = distributor_mocks.get_basic_config(**optional_kwargs) _checksum_type_value = metadata.get_repo_checksum_type(mock_publish_conduit, config) print _checksum_type_value self.assertEquals(_checksum_type_value, "sha") mock_repo.scratchpad = None optional_kwargs = {} config = distributor_mocks.get_basic_config(**optional_kwargs) mock_publish_conduit = distributor_mocks.get_publish_conduit(checksum_type=None) _checksum_type_value = metadata.get_repo_checksum_type(mock_publish_conduit, config) print _checksum_type_value self.assertEquals(_checksum_type_value, "sha256")
def test_generate_isos(self): repo = mock.Mock(spec=Repository) repo.id = "test_repo_for_export" repo.working_dir = self.repo_iso_working_dir global progress_status progress_status = None def set_progress(progress): global progress_status progress_status = progress publish_conduit = distributor_mocks.get_publish_conduit(pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config( https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False, prefix="test-isos", ) distributor = ISODistributor() def cleanup(): return distributor.cleanup = mock.Mock() distributor.cleanup.side_effect = cleanup publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress progress_status = distributor.init_progress() distributor.publish_repo(repo, publish_conduit, config) repo.id = "test_mock_iso_prefix" distributor.generate_isos(repo.working_dir, "%s/%s" % (self.http_publish_dir, repo.id), prefix="test-isos") self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") self.assertEqual(progress_status["isos"]["num_success"], 1) self.assertTrue(progress_status["isos"]["size_total"] is not None) self.assertEqual(progress_status["isos"]["size_left"], 0) self.assertEqual(progress_status["isos"]["items_total"], 1) self.assertEqual(progress_status["isos"]["items_left"], 0) print progress_status self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) print isos_list self.assertEqual(len(isos_list), 1) # make sure the iso name defaults to repoid self.assertTrue(isos_list[0].startswith("test-isos"))
def test_generate_isos(self): repo = mock.Mock(spec=Repository) repo.id = "test_repo_for_export" repo.working_dir = self.repo_iso_working_dir + "/" + repo.id unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256', 'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'} unit_a = Unit(TYPE_ID_RPM, unit_key_a, {'updated' : ''}, '') unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', } unit_b = Unit(TYPE_ID_RPM, unit_key_b, {'updated' : ''}, '') unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', } unit_c = Unit(TYPE_ID_RPM, unit_key_c, {'updated' : ''}, '') unit_c.storage_path = "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir existing_units = [] for unit in [unit_a, unit_b, unit_c]: existing_units.append(unit) global progress_status progress_status = None def set_progress(progress): global progress_status progress_status = progress publish_conduit = distributor_mocks.get_publish_conduit(pkg_dir=self.pkg_dir, existing_units=existing_units) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False, iso_prefix="test-isos") distributor = ISODistributor() def cleanup(repo_working_dir): return iso_util.cleanup_working_dir = mock.Mock() iso_util.cleanup_working_dir.side_effect = cleanup publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress progress_status = distributor.init_progress() distributor.publish_repo(repo, publish_conduit, config) self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") self.assertEqual(progress_status["isos"]["num_success"], 1) self.assertTrue(progress_status["isos"]["size_total"] is not None) self.assertEqual(progress_status["isos"]["size_left"], 0) self.assertEqual(progress_status["isos"]["items_total"], 1) self.assertEqual(progress_status["isos"]["items_left"], 0) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 1) # make sure the iso name defaults to repoid self.assertTrue( isos_list[0].startswith("test-isos"))
def setUp(self): self.existing_units = [ Unit(ids.TYPE_ID_ISO, {"name": "test.iso", "size": 1, "checksum": "sum1"}, {}, "/path/test.iso"), Unit(ids.TYPE_ID_ISO, {"name": "test2.iso", "size": 2, "checksum": "sum2"}, {}, "/path/test2.iso"), Unit(ids.TYPE_ID_ISO, {"name": "test3.iso", "size": 3, "checksum": "sum3"}, {}, "/path/test3.iso"), ] self.publish_conduit = distributor_mocks.get_publish_conduit(existing_units=self.existing_units) self.temp_dir = tempfile.mkdtemp() # Monkeypatch the publishing location so we don't try to write to /var self._original_iso_http_dir = constants.ISO_HTTP_DIR self._original_iso_https_dir = constants.ISO_HTTPS_DIR constants.ISO_HTTP_DIR = os.path.join(self.temp_dir, "published", "http", "isos") constants.ISO_HTTPS_DIR = os.path.join(self.temp_dir, "published", "https", "isos")
def __init__(self, working_dir): threading.Thread.__init__(self) self.mock_repo = mock.Mock(spec=Repository) self.mock_repo.id = "test_cancel_metadata_generation" self.mock_repo.scratchpad = {"checksum_type" : "sha"} self.mock_repo.working_dir = working_dir optional_kwargs = {"generate_metadata" : 1} self.config = distributor_mocks.get_basic_config(**optional_kwargs) self.mock_publish_conduit = distributor_mocks.get_publish_conduit() self.mock_publish_conduit.set_progress = mock.Mock() self.mock_publish_conduit.set_progress.side_effect = set_progress self.status = None self.errors = None self.finished = False
def test_publish_progress(self): global progress_status progress_status = None def set_progress(progress): global progress_status progress_status = progress PROGRESS_FIELDS = ["num_success", "num_error", "items_left", "items_total", "error_details"] publish_conduit = distributor_mocks.get_publish_conduit(pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config( https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False, ) distributor = ISODistributor() repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_progress_sync" publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(progress_status is not None) self.assertTrue("rpms" in progress_status) self.assertTrue(progress_status["rpms"].has_key("state")) self.assertEqual(progress_status["rpms"]["state"], "FINISHED") for field in PROGRESS_FIELDS: self.assertTrue(field in progress_status["rpms"]) self.assertTrue("distribution" in progress_status) self.assertTrue(progress_status["distribution"].has_key("state")) self.assertEqual(progress_status["distribution"]["state"], "FINISHED") for field in PROGRESS_FIELDS: self.assertTrue(field in progress_status["distribution"]) self.assertTrue("errata" in progress_status) self.assertTrue(progress_status["errata"].has_key("state")) self.assertEqual(progress_status["errata"]["state"], "FINISHED") self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") self.assertTrue("publish_http" in progress_status) self.assertEqual(progress_status["publish_http"]["state"], "FINISHED") self.assertTrue("publish_https" in progress_status) self.assertEqual(progress_status["publish_https"]["state"], "SKIPPED")
def test_empty_publish(self): repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_empty_publish" existing_units = [] publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, http=True, https=True) distributor = YumDistributor() report = distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(report.success_flag) summary = report.summary self.assertEqual(summary["num_package_units_attempted"], 0) self.assertEqual(summary["num_package_units_published"], 0) self.assertEqual(summary["num_package_units_errors"], 0) expected_repo_https_publish_dir = os.path.join(self.https_publish_dir, repo.id).rstrip('/') expected_repo_http_publish_dir = os.path.join(self.http_publish_dir, repo.id).rstrip('/') self.assertEqual(summary["https_publish_dir"], expected_repo_https_publish_dir) self.assertEqual(summary["http_publish_dir"], expected_repo_http_publish_dir) details = report.details self.assertEqual(len(details["errors"]), 0)
def test_export_rpm(self): feed_url = "file://%s/test_repo_for_export/" % (self.data_dir) repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_repo_for_export" sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) self.assertTrue(summary is not None) self.assertTrue(details is not None) self.assertTrue(status) unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256', 'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'} unit_a = Unit(TYPE_ID_RPM, unit_key_a, {}, '') unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', } unit_b = Unit(TYPE_ID_RPM, unit_key_b, {}, '') unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', } unit_c = Unit(TYPE_ID_RPM, unit_key_c, {}, '') unit_c.storage_path = "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir existing_units = [] for unit in [unit_a, unit_b, unit_c]: existing_units.append(unit) symlink_dir = "%s/%s" % (self.repo_working_dir, "isos") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) print symlink_dir # status, errors = iso_distributor._export_rpms(existing_units, symlink_dir) repo_exporter = RepoExporter(symlink_dir) status, errors = repo_exporter.export_rpms(existing_units) print status, errors self.assertTrue(status) self.assertEquals(len(os.listdir(symlink_dir)), 3)
def test_publish_comps(self): repo = mock.Mock(spec=Repository) repo.id = "test_publish_comps" repo.working_dir = self.working_dir # Create 2 pkg groups grp_a = self.create_dummy_pkg_group_unit(repo.id, "group_a") grp_b = self.create_dummy_pkg_group_unit(repo.id, "group_b") # Create 2 pkg categories cat_a = self.create_dummy_pkg_category_unit(repo.id, "cat_a", ["group_a"]) cat_b = self.create_dummy_pkg_category_unit(repo.id, "cat_b", ["group_b"]) # Add the grps/cats to the publish_conduit publish_conduit = distributor_mocks.get_publish_conduit( existing_units=[grp_a, grp_b, cat_a, cat_b]) config = distributor_mocks.get_basic_config(relative_url=repo.id, http=True, https=True, generate_metadata=True) # Publish the repo, be sure 'generate_metadata' is True yum_distributor = YumDistributor() report = yum_distributor.publish_repo(repo, publish_conduit, config) self.assertTrue(report.success_flag) self.assertEqual(report.summary["num_package_groups_published"], 2) self.assertEqual(report.summary["num_package_categories_published"], 2) expected_comps_xml = os.path.join(repo.working_dir, "comps.xml") self.assertTrue(os.path.exists(expected_comps_xml)) # # Find the path that createrepo added the comps.xml as # repomd_xml = os.path.join(repo.working_dir, "repodata", "repomd.xml") self.assertTrue(os.path.exists(repomd_xml)) md_types = util.get_repomd_filetypes(repomd_xml) self.assertTrue('group' in md_types) groups_path = util.get_repomd_filetype_path(repomd_xml, "group") self.assertTrue(groups_path) groups_path = os.path.join(repo.working_dir, groups_path) self.assertTrue(os.path.exists(groups_path)) # # Use yum to read the repodata and verify the info written matches # our dummy data # yc = yum.comps.Comps() yc.add(groups_path) self.assertEqual(len(yc.groups), 2) self.assertEqual(len(yc.categories), 2) for g in yc.groups: eg = None if g.groupid == "group_a": eg = grp_a elif g.groupid == "group_b": eg = grp_b else: # Unexpected error self.assertTrue(False) self.assertEqual(g.name, eg.metadata["name"]) self.assertEqual(g.description, eg.metadata["description"]) self.assertEqual(g.user_visible, eg.metadata["user_visible"]) self.assertEqual(g.display_order, eg.metadata["display_order"]) self.assertEqual(g.default, eg.metadata["default"]) self.assertEqual(g.langonly, eg.metadata["langonly"]) for pkg_name in g.mandatory_packages: self.assertTrue(pkg_name in eg.metadata["mandatory_package_names"]) for pkg_name in g.optional_packages: self.assertTrue(pkg_name in eg.metadata["optional_package_names"]) for pkg_name in g.default_packages: self.assertTrue(pkg_name in eg.metadata["default_package_names"]) # # Below is related to pymongo not liking dots in a pkg_name # We are storing conditional_package_names as a list of tuples, (name, values) # convert to a dictionary to make it easier to compare against yum's data # cond_lookup = {} for expected_name, expected_values in eg.metadata["conditional_package_names"]: cond_lookup[expected_name] = expected_values for pkg_name in g.conditional_packages: # We are converting our expected value to a str below to match the behavior # we see from yum self.assertEqual(g.conditional_packages[pkg_name], str(cond_lookup[pkg_name])) for c in yc.categories: ec = None if c.categoryid == "cat_a": ec = cat_a elif c.categoryid == "cat_b": ec = cat_b else: # Unexpected error self.assertTrue(False) self.assertEqual(c.name, ec.metadata["name"]) self.assertEqual(c.description, ec.metadata["description"]) self.assertEqual(c.display_order, ec.metadata["display_order"]) self.assertEqual(len(c._groups), len(ec.metadata["packagegroupids"])) for grpid in c._groups: self.assertTrue(grpid in ec.metadata["packagegroupids"])
def test_group_publish_isos(self): feed_url = "file://%s/pulp_unittest/" % self.data_dir repo_1 = mock.Mock(spec=Repository) repo_1.id = "test_repo_for_export_1" repo_1.working_dir = self.repo_working_dir repo_1.checksumtype = 'sha' repo_2 = mock.Mock(spec=Repository) repo_2.id = "test_repo_for_export_2" repo_2.working_dir = self.repo_working_dir repo_2.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo_1, sync_conduit, config) status, summary, details = importerRPM.sync(repo_2, sync_conduit, config) unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256', 'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'} unit_a = Unit(TYPE_ID_RPM, unit_key_a, {}, '') unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', } unit_b = Unit(TYPE_ID_RPM, unit_key_b, {}, '') unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256', 'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', } unit_c = Unit(TYPE_ID_RPM, unit_key_c, {}, '') unit_c.storage_path = "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir existing_units = [] for unit in [unit_a, unit_b, unit_c]: existing_units.append(unit) sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir) importerErrata = errata.ImporterErrata() importerErrata.sync(repo_1, sync_conduit, config) importerErrata.sync(repo_2, sync_conduit, config) repo_group = mock.Mock(spec=RepositoryGroup) repo_group.id = "test_group" repo_group.repo_ids = [repo_1.id, repo_2.id] repo_group.working_dir = self.group_working_dir global progress_status progress_status = None def set_progress(progress): global progress_status progress_status = progress publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http_publish_dir=self.http_publish_dir, generate_metadata=True, http=True, https=False, iso_prefix="test-isos") distributor = GroupISODistributor() def cleanup(repo_working_dir): return iso_util.cleanup_working_dir.cleanup = mock.Mock() iso_util.cleanup_working_dir.side_effect = cleanup publish_conduit.set_progress = mock.Mock() publish_conduit.set_progress.side_effect = set_progress distributor.publish_group(repo_group, publish_conduit, config) self.assertTrue("isos" in progress_status) self.assertTrue(progress_status["isos"].has_key("state")) self.assertEqual(progress_status["isos"]["state"], "FINISHED") self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo_group.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo_group.id)) print isos_list self.assertEqual(len(isos_list), 1) # make sure the iso name defaults to repoid self.assertTrue( isos_list[0].startswith("test-isos"))
def test_iso_export_by_date_range(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {'updated' : '2010-03-30 08:07:30'} existing_units = [] unit_key_a = {'id' : '','name' :'patb', 'version' :'0.1', 'release' : '2', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha', 'checksum': '017c12050a97cf6095892498750c2a39d2bf535e'} rpm_unit_a = Unit(TYPE_ID_RPM, unit_key_a, metadata, '') rpm_unit_a.storage_path = "%s/patb/0.1/2/noarch/017c12050a97cf6095892498750c2a39d2bf535e/patb-0.1-2.noarch.rpm" % self.pkg_dir existing_units.append(rpm_unit_a) unit_key_b = {'id' : '', 'name' :'emoticons', 'version' :'0.1', 'release' :'2', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha', 'checksum' : '663c89b0d29bfd5479d8736b716d50eed9495dbb'} rpm_unit_b = Unit(TYPE_ID_RPM, unit_key_b, metadata, '') rpm_unit_b.storage_path = "%s/emoticons/0.1/2/noarch/663c89b0d29bfd5479d8736b716d50eed9495dbb/emoticons-0.1-2.noarch.rpm" % self.pkg_dir existing_units.append(rpm_unit_b) sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key['id'] = "RHEA-2010:9999" mdata = { 'description' : "test", 'from_str': '*****@*****.**', 'issued': '2010-03-30 08:07:30', 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)', 'packages': [{'arch': 'x86_64', 'epoch': '0', 'filename': 'patb-0.1-2.x86_64.rpm', 'name': 'patb', 'release': '2', 'src': '', 'sum': ('sha', '017c12050a97cf6095892498750c2a39d2bf535e'), 'version': '0.1'}, {'arch': 'x86_64', 'epoch': '0', 'filename': 'emoticons-0.1-2.x86_64.rpm', 'name': 'emoticons', 'release': '2', 'src': '', 'sum': ('sha', '663c89b0d29bfd5479d8736b716d50eed9495dbb'), 'version': '0.1'}], 'short': 'rhel-i386-server-vt-5'}], 'pushcount': 1, 'reboot_suggested': False, 'references': [], 'release': '', 'rights': '', 'status': 'final', 'summary': '', 'title': 'emoticons enhancement fix and enhancement update', 'updated': '2010-03-30 08:07:30', 'version': '1', 'type' : 'enhancement', 'severity' : 'Low', 'solution' : ''} unit_key_2 = dict() unit_key_2['id'] = "RHEA-2008:9999" mdata_2 = { 'description' : "test", 'from_str': '*****@*****.**', 'issued': '2008-03-30 00:00:00', 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)', 'packages': [{'arch': 'x86_64', 'epoch': '0', 'filename': 'patb-0.1-2.x86_64.rpm', 'name': 'patb', 'release': '2', 'src': '', 'sum': ('sha', '017c12050a97cf6095892498750c2a39d2bf535e'), 'version': '0.1'}, {'arch': 'x86_64', 'epoch': '0', 'filename': 'emoticons-0.1-2.x86_64.rpm', 'name': 'emoticons', 'release': '2', 'src': '', 'sum': ('sha', '663c89b0d29bfd5479d8736b716d50eed9495dbb'), 'version': '0.1'}], 'short': 'rhel-i386-server-vt-5'}], 'pushcount': 1, 'reboot_suggested': False, 'references': [], 'release': '', 'rights': '', 'status': 'final', 'summary': '', 'title': 'emoticons enhancement fix and enhancement update', 'updated': '2008-03-30 00:00:00', 'version': '1', 'type' : 'enhancement', 'severity' : 'Low', 'solution' : ''} errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, ''), Unit(TYPE_ID_ERRATA, unit_key_2, mdata_2, '')] existing_units += errata_unit print existing_units repo.working_dir = "%s/%s" % (self.repo_working_dir, "export") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) # test http publish config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False, start_date="2009-03-30 08:07:30", end_date="2012-03-30 08:07:30", generate_metadata=True) def cleanup(rpm_working_dir): return iso_util.cleanup_working_dir = mock.Mock() iso_util.cleanup_working_dir.side_effect = cleanup report = iso_distributor.publish_repo(repo, publish_conduit, config) ftypes = util.get_repomd_filetypes("%s/%s" % (repo.working_dir, "repodata/repomd.xml")) self.assertTrue("updateinfo" in ftypes) updateinfo_path = util.get_repomd_filetype_path("%s/%s" % (repo.working_dir, "repodata/repomd.xml"), "updateinfo") updateinfo_path = os.path.join(repo.working_dir, updateinfo_path) self.assertTrue(os.path.exists(updateinfo_path)) elist = updateinfo.get_errata(updateinfo_path) self.assertEquals(len(elist), 1) self.assertTrue(unit_key_2['id'] not in elist[0]) self.assertEquals(elist[0]['id'], unit_key['id']) self.assertEquals(elist[0]['issued'], mdata['issued']) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 1)
def test_errata_export(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = 'sha' sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {'updated' : '2010-03-30 08:07:30'} unit_key_a = {'id' : '','name' :'patb', 'version' :'0.1', 'release' : '2', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'md5', 'checksum': 'f3c197a29d9b66c5b65c5d62b25db5b4'} unit_key_b = {'id' : '', 'name' :'emoticons', 'version' :'0.1', 'release' :'2', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'md5', 'checksum' : '366bb5e73a5905eacb82c96e0578f92b'} existing_units = [] for unit in [unit_key_a, unit_key_b]: existing_units.append(Unit(TYPE_ID_RPM, unit, metadata, '')) sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key['id'] = "RHEA-2010:9999" mdata = { 'description' : "test", 'from_str': '*****@*****.**', 'issued': '2010-03-30 08:07:30', 'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)', 'packages': [{'arch': 'x86_64', 'epoch': '0', 'filename': 'patb-0.1-2.x86_64.rpm', 'name': 'patb', 'release': '2', 'src': '', 'sum': ('md5', 'f3c197a29d9b66c5b65c5d62b25db5b4'), 'version': '0.1'}, {'arch': 'x86_64', 'epoch': '0', 'filename': 'emoticons-0.1-2.x86_64.rpm', 'name': 'emoticons', 'release': '2', 'src': '', 'sum': ('md5', '366bb5e73a5905eacb82c96e0578f92b'), 'version': '0.1'}], 'short': 'rhel-i386-server-vt-5'}], 'pushcount': 1, 'reboot_suggested': False, 'references': [], 'release': '', 'rights': '', 'status': 'final', 'summary': '', 'title': 'emoticons enhancement fix and enhancement update', 'updated': '2010-03-30 08:07:30', 'version': '1', 'type' : 'enhancement', 'severity' : 'Low', 'solution' : ''} errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, '')] symlink_dir = "%s/%s" % (self.repo_working_dir, repo.id) iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) print symlink_dir repo_exporter = RepoExporter(symlink_dir) # rpm_units = iso_distributor._get_errata_rpms(errata_unit, existing_units) rpm_units = repo_exporter.get_errata_rpms(errata_unit, existing_units) print "RPMS in ERRATA",rpm_units # iso_distributor._export_rpms(rpm_units, self.repo_working_dir) repo_exporter.export_rpms(rpm_units) status, errors = repo_exporter.export_errata(errata_unit) # status, errors = iso_distributor._export_errata(errata_unit, symlink_dir) self.assertTrue(os.path.exists("%s/%s" % (symlink_dir, "updateinfo.xml"))) self.assertTrue(status) ftypes = util.get_repomd_filetypes("%s/%s" % (symlink_dir, "repodata/repomd.xml")) print ftypes self.assertTrue("updateinfo" in ftypes)
def test_errata_export(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = "sha" sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {} unit_key_a = { "id": "", "name": "patb", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "md5", "checksum": "f3c197a29d9b66c5b65c5d62b25db5b4", } unit_key_b = { "id": "", "name": "emoticons", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "md5", "checksum": "366bb5e73a5905eacb82c96e0578f92b", } existing_units = [] for unit in [unit_key_a, unit_key_b]: existing_units.append(Unit(TYPE_ID_RPM, unit, metadata, "")) sync_conduit = importer_mocks.get_sync_conduit( type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir ) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key["id"] = "RHEA-2010:9999" mdata = { "description": "test", "from_str": "*****@*****.**", "issued": "2010-03-30 08:07:30", "pkglist": [ { "name": "RHEL Virtualization (v. 5 for 32-bit x86)", "packages": [ { "arch": "x86_64", "epoch": "0", "filename": "patb-0.1-2.x86_64.rpm", "name": "patb", "release": "2", "src": "", "sum": ("md5", "f3c197a29d9b66c5b65c5d62b25db5b4"), "version": "0.1", }, { "arch": "x86_64", "epoch": "0", "filename": "emoticons-0.1-2.x86_64.rpm", "name": "emoticons", "release": "2", "src": "", "sum": ("md5", "366bb5e73a5905eacb82c96e0578f92b"), "version": "0.1", }, ], "short": "rhel-i386-server-vt-5", } ], "pushcount": 1, "reboot_suggested": False, "references": [], "release": "", "rights": "", "status": "final", "summary": "", "title": "emoticons enhancement fix and enhancement update", "updated": "2010-03-30 08:07:30", "version": "1", "type": "enhancement", "severity": "Low", "solution": "", } errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, "")] symlink_dir = "%s/%s" % (self.repo_working_dir, repo.id) iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True) print symlink_dir repo_exporter = RepoExporter(symlink_dir) # rpm_units = iso_distributor._get_errata_rpms(errata_unit, existing_units) rpm_units = repo_exporter.get_errata_rpms(errata_unit, existing_units) print "RPMS in ERRATA", rpm_units # iso_distributor._export_rpms(rpm_units, self.repo_working_dir) repo_exporter.export_rpms(rpm_units) status, errors = repo_exporter.export_errata(errata_unit) # status, errors = iso_distributor._export_errata(errata_unit, symlink_dir) self.assertTrue(os.path.exists("%s/%s" % (symlink_dir, "updateinfo.xml"))) self.assertTrue(status) ftypes = util.get_repomd_filetypes("%s/%s" % (symlink_dir, "repodata/repomd.xml")) print ftypes self.assertTrue("updateinfo" in ftypes)
def test_iso_export_by_date_range(self): feed_url = "file://%s/test_errata_local_sync/" % self.data_dir repo = mock.Mock(spec=Repository) repo.working_dir = self.repo_working_dir repo.id = "test_errata_local_sync" repo.checksumtype = "sha" sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir) config = importer_mocks.get_basic_config(feed_url=feed_url) importerRPM = importer_rpm.ImporterRPM() status, summary, details = importerRPM.sync(repo, sync_conduit, config) metadata = {} existing_units = [] unit_key_a = { "id": "", "name": "patb", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "sha", "checksum": "017c12050a97cf6095892498750c2a39d2bf535e", } rpm_unit_a = Unit(TYPE_ID_RPM, unit_key_a, metadata, "") rpm_unit_a.storage_path = ( "%s/patb/0.1/2/noarch/017c12050a97cf6095892498750c2a39d2bf535e/patb-0.1-2.noarch.rpm" % self.pkg_dir ) existing_units.append(rpm_unit_a) unit_key_b = { "id": "", "name": "emoticons", "version": "0.1", "release": "2", "epoch": "0", "arch": "x86_64", "checksumtype": "sha", "checksum": "663c89b0d29bfd5479d8736b716d50eed9495dbb", } rpm_unit_b = Unit(TYPE_ID_RPM, unit_key_b, metadata, "") rpm_unit_b.storage_path = ( "%s/emoticons/0.1/2/noarch/663c89b0d29bfd5479d8736b716d50eed9495dbb/emoticons-0.1-2.noarch.rpm" % self.pkg_dir ) existing_units.append(rpm_unit_b) sync_conduit = importer_mocks.get_sync_conduit( type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir ) importerErrata = errata.ImporterErrata() status, summary, details = importerErrata.sync(repo, sync_conduit, config) unit_key = dict() unit_key["id"] = "RHEA-2010:9999" mdata = { "description": "test", "from_str": "*****@*****.**", "issued": "2010-03-30 08:07:30", "pkglist": [ { "name": "RHEL Virtualization (v. 5 for 32-bit x86)", "packages": [ { "arch": "x86_64", "epoch": "0", "filename": "patb-0.1-2.x86_64.rpm", "name": "patb", "release": "2", "src": "", "sum": ("sha", "017c12050a97cf6095892498750c2a39d2bf535e"), "version": "0.1", }, { "arch": "x86_64", "epoch": "0", "filename": "emoticons-0.1-2.x86_64.rpm", "name": "emoticons", "release": "2", "src": "", "sum": ("sha", "663c89b0d29bfd5479d8736b716d50eed9495dbb"), "version": "0.1", }, ], "short": "rhel-i386-server-vt-5", } ], "pushcount": 1, "reboot_suggested": False, "references": [], "release": "", "rights": "", "status": "final", "summary": "", "title": "emoticons enhancement fix and enhancement update", "updated": "2010-03-30 08:07:30", "version": "1", "type": "enhancement", "severity": "Low", "solution": "", } unit_key_2 = dict() unit_key_2["id"] = "RHEA-2008:9999" mdata_2 = { "description": "test", "from_str": "*****@*****.**", "issued": "2008-03-30 00:00:00", "pkglist": [ { "name": "RHEL Virtualization (v. 5 for 32-bit x86)", "packages": [ { "arch": "x86_64", "epoch": "0", "filename": "patb-0.1-2.x86_64.rpm", "name": "patb", "release": "2", "src": "", "sum": ("sha", "017c12050a97cf6095892498750c2a39d2bf535e"), "version": "0.1", }, { "arch": "x86_64", "epoch": "0", "filename": "emoticons-0.1-2.x86_64.rpm", "name": "emoticons", "release": "2", "src": "", "sum": ("sha", "663c89b0d29bfd5479d8736b716d50eed9495dbb"), "version": "0.1", }, ], "short": "rhel-i386-server-vt-5", } ], "pushcount": 1, "reboot_suggested": False, "references": [], "release": "", "rights": "", "status": "final", "summary": "", "title": "emoticons enhancement fix and enhancement update", "updated": "2008-03-30 00:00:00", "version": "1", "type": "enhancement", "severity": "Low", "solution": "", } errata_unit = [Unit(TYPE_ID_ERRATA, unit_key, mdata, ""), Unit(TYPE_ID_ERRATA, unit_key_2, mdata_2, "")] existing_units += errata_unit print existing_units repo.working_dir = "%s/%s" % (self.repo_working_dir, "export") iso_distributor = ISODistributor() publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) # test http publish config = distributor_mocks.get_basic_config( http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False, start_date="2009-03-30 08:07:30", end_date="2012-03-30 08:07:30", generate_metadata=True, ) def cleanup(rpm_working_dir): return iso_util.cleanup_working_dir = mock.Mock() iso_util.cleanup_working_dir.side_effect = cleanup report = iso_distributor.publish_repo(repo, publish_conduit, config) ftypes = util.get_repomd_filetypes("%s/%s" % (repo.working_dir, "repodata/repomd.xml")) self.assertTrue("updateinfo" in ftypes) updateinfo_path = "%s/%s" % (repo.working_dir, "updateinfo.xml") self.assertTrue(os.path.exists(updateinfo_path)) elist = updateinfo.get_errata(updateinfo_path) self.assertEquals(len(elist), 1) self.assertTrue(unit_key_2["id"] not in elist[0]) self.assertEquals(elist[0]["id"], unit_key["id"]) self.assertEquals(elist[0]["issued"], mdata["issued"]) self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id))) self.assertEquals(len(os.listdir(self.https_publish_dir)), 0) isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id)) self.assertEqual(len(isos_list), 1)