Ejemplo n.º 1
0
    def test_repo_export_isos(self):
        feed_url = "file://%s/pulp_unittest/" % self.data_dir
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.repo_working_dir
        repo.id = "pulp_unittest"
        repo.checksumtype = 'sha'
        sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        importerRPM = importer_rpm.ImporterRPM()
        status, summary, details = importerRPM.sync(repo, sync_conduit, config)
        unit_key_a = {'id' : '','name' :'pulp-dot-2.0-test', 'version' :'0.1.2', 'release' : '1.fc11', 'epoch':'0', 'arch' : 'x86_64', 'checksumtype' : 'sha256',
                      'checksum': '435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979', 'type_id' : 'rpm'}
        unit_a = Unit(TYPE_ID_RPM, unit_key_a, {}, '')
        unit_a.storage_path = "%s/pulp-dot-2.0-test/0.1.2/1.fc11/x86_64/435d92e6c09248b501b8d2ae786f92ccfad69fab8b1bc774e2b66ff6c0d83979/pulp-dot-2.0-test-0.1.2-1.fc11.x86_64.rpm" % self.pkg_dir
        unit_key_b = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.2.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256',
                      'checksum': '4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7', 'type_id' : 'rpm', }
        unit_b = Unit(TYPE_ID_RPM, unit_key_b, {}, '')
        unit_b.storage_path = "%s/pulp-test-package/0.2.1/1.fc11/x86_64/4dbde07b4a8eab57e42ed0c9203083f1d61e0b13935d1a569193ed8efc9ecfd7/pulp-test-package-0.2.1-1.fc11.x86_64.rpm" % self.pkg_dir
        unit_key_c = {'id' : '', 'name' :'pulp-test-package', 'version' :'0.3.1', 'release' :'1.fc11', 'epoch':'0','arch' : 'x86_64', 'checksumtype' :'sha256',
                      'checksum': '6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f', 'type_id' : 'rpm', }
        unit_c = Unit(TYPE_ID_RPM, unit_key_c, {}, '')
        unit_c.storage_path =  "%s/pulp-test-package/0.3.1/1.fc11/x86_64/6bce3f26e1fc0fc52ac996f39c0d0e14fc26fb8077081d5b4dbfb6431b08aa9f/pulp-test-package-0.3.1-1.fc11.x86_64.rpm" % self.pkg_dir
        existing_units = []
        for unit in [unit_a, unit_b, unit_c]:
            existing_units.append(unit)
        sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir)
        importerErrata = errata.ImporterErrata()
        importerErrata.sync(repo, sync_conduit, config)
        repo.working_dir = "%s/%s" % (self.repo_working_dir, "export")
        iso_distributor = ISODistributor()
        publish_conduit = distributor_mocks.get_publish_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir)
        # test https publish
        config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=False, https=True, generate_metadata=True)
        report = iso_distributor.publish_repo(repo, publish_conduit, config)
        print report
        self.assertTrue(os.path.exists("%s/%s" % (self.https_publish_dir, repo.id)))
        self.assertEquals(len(os.listdir(self.http_publish_dir)), 0)
        # test http publish
        config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False)
        report = iso_distributor.publish_repo(repo, publish_conduit, config)

        self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id)))
        self.assertEquals(len(os.listdir(self.https_publish_dir)), 0)
        isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id))
        self.assertEqual(len(isos_list), 1)
        # make sure the iso name defaults to repoid
        self.assertTrue( isos_list[-1].startswith(repo.id))
        # test isoprefix:
        iso_prefix = "mock-iso-prefix"
        config = distributor_mocks.get_basic_config(http_publish_dir=self.http_publish_dir, https_publish_dir=self.https_publish_dir, http=True, https=False, iso_prefix=iso_prefix)
        report = iso_distributor.publish_repo(repo, publish_conduit, config)

        self.assertTrue(os.path.exists("%s/%s" % (self.http_publish_dir, repo.id)))
        self.assertEquals(len(os.listdir(self.https_publish_dir)), 0)
        isos_list = os.listdir("%s/%s" % (self.http_publish_dir, repo.id))
        self.assertEqual(len(isos_list), 2)
        print isos_list
        # make sure the iso name uses the prefix
        self.assertTrue( isos_list[-1].startswith(iso_prefix))
Ejemplo n.º 2
0
    def test_remove_old_packages(self):
        feed_url = "http://jmatthews.fedorapeople.org/repo_multiple_versions/"
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_remove_old_packages"
        sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, pkg_dir=self.pkg_dir)
        ###
        # Test that old packages are not in rpmList and are never intended to be downloaded
        # Additionallity verify that already existing packages which are NOT orphaned are also
        # removed with remove_old functionality
        ###
        config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=False, num_old_packages=0)
        importerRPM = importer_rpm.ImporterRPM()
        status, summary, details = importerRPM.sync(repo, sync_conduit, config)
        self.assertTrue(status)
        self.assertEquals(summary["num_synced_new_rpms"], 12)
        pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir)
        self.assertEquals(len(pkgs), 12)

        yumRepoGrinder = importer_rpm.get_yumRepoGrinder(repo.id, repo.working_dir, config)
        yumRepoGrinder.setup(basepath=repo.working_dir)
        rpm_items = yumRepoGrinder.getRPMItems()
        yumRepoGrinder.stop()
        del yumRepoGrinder
        self.assertEquals(len(rpm_items), 12)

        existing_units = []
        for rpm in rpm_items:
            u = Unit(
                TYPE_ID_RPM,
                importer_rpm.form_rpm_unit_key(rpm),
                importer_rpm.form_rpm_metadata(rpm),
                os.path.join(self.pkg_dir, rpm["pkgpath"], rpm["filename"]),
            )
            existing_units.append(u)
        config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=True, num_old_packages=6)
        sync_conduit = importer_mocks.get_sync_conduit(
            type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir
        )
        importerRPM = importer_rpm.ImporterRPM()
        status, summary, details = importerRPM.sync(repo, sync_conduit, config)
        self.assertTrue(status)
        self.assertEquals(summary["num_rpms"], 7)
        self.assertEquals(summary["num_orphaned_rpms"], 5)
        self.assertEquals(summary["num_synced_new_rpms"], 0)
        self.assertEquals(summary["num_not_synced_rpms"], 0)
        pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir)
        self.assertEquals(len(pkgs), 7)

        config = importer_mocks.get_basic_config(feed_url=feed_url, remove_old=True, num_old_packages=0)
        importerRPM = importer_rpm.ImporterRPM()
        status, summary, details = importerRPM.sync(repo, sync_conduit, config)
        self.assertTrue(status)
        self.assertEquals(summary["num_rpms"], 1)
        self.assertEquals(summary["num_orphaned_rpms"], 11)
        self.assertEquals(summary["num_synced_new_rpms"], 0)
        self.assertEquals(summary["num_not_synced_rpms"], 0)
        pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir)
        self.assertEquals(len(pkgs), 1)
Ejemplo n.º 3
0
    def test_errata_sync_with_repos_that_share_upstream_url(self):
        # This test is for https://bugzilla.redhat.com/show_bug.cgi?id=870495
        feed_url = "http://example.com/test_repo/"

        # Set up repo_1 and sync it
        importer_1 = YumImporter()
        repo_1 = mock.Mock(spec=Repository)
        repo_1.working_dir = self.working_dir
        repo_1.id = "test_repo_1"
        sync_conduit_1 = importer_mocks.get_sync_conduit()
        config_1 = importer_mocks.get_basic_config(feed_url=feed_url)
        self.simulate_sync(repo_1, self.repo_dir)
        importer_errata_1 = errata.ImporterErrata()
        status_1, summary_1, details_1 = importer_errata_1.sync(repo_1, sync_conduit_1, config_1)
        self.assertTrue(status_1)
        self.assertTrue(summary_1 is not None)
        self.assertTrue(details_1 is not None)
        self.assertEquals(summary_1["num_new_errata"], 52)
        self.assertEquals(summary_1["num_existing_errata"], 0)
        self.assertEquals(summary_1["num_orphaned_errata"], 0)
        self.assertEquals(details_1["num_bugfix_errata"], 36)
        self.assertEquals(details_1["num_security_errata"], 7)
        self.assertEquals(details_1["num_enhancement_errata"], 9)
        # We should have called save_unit() once for each errata, in sync().
        self.assertEqual(len(sync_conduit_1.save_unit.mock_calls), 52)

        # Now let's set up another repo with the same URL, and then sync. We should get the same
        # errata.
        importer_2 = YumImporter()
        repo_2 = mock.Mock(spec=Repository)
        working_dir_2 = os.path.join(self.temp_dir, "working_2")
        os.makedirs(working_dir_2)
        repo_2.working_dir = working_dir_2
        repo_2.id = "test_repo_2"
        unit_key = {'id': "RHBA-2007:0112"}
        metadata = {'updated' : "2007-03-14 00:00:00",
                    'pkglist': [{'name': 'RHEL Virtualization (v. 5 for 32-bit x86)'}]}
        existing_units = [Unit(TYPE_ID_ERRATA, unit_key, metadata, '')]
        existing_units[0].updated = metadata['updated']
        sync_conduit_2 = importer_mocks.get_sync_conduit(existing_units=existing_units)
        config_2 = importer_mocks.get_basic_config(feed_url=feed_url)
        self.simulate_sync(repo_2, self.repo_dir)
        importer_errata_2 = errata.ImporterErrata()
        status_2, summary_2, details_2 = importer_errata_2.sync(repo_2, sync_conduit_2, config_2)
        self.assertTrue(status_2)
        self.assertTrue(summary_2 is not None)
        self.assertTrue(details_2 is not None)
        self.assertEquals(summary_2["num_new_errata"], 51)
        self.assertEquals(summary_2["num_existing_errata"], 1)
        self.assertEquals(summary_2["num_orphaned_errata"], 0)
        self.assertEquals(details_2["num_bugfix_errata"], 35)
        self.assertEquals(details_2["num_security_errata"], 7)
        self.assertEquals(details_2["num_enhancement_errata"], 9)

        # There should be the same number of calls to save_unit() as there are errata,
        # because sync() calls it once for each of the 51 new erratum, and get_new_errata_units()
        # also calls it once for the one errata that already existed
        self.assertEqual(len(sync_conduit_2.save_unit.mock_calls), 52)
Ejemplo n.º 4
0
    def test_sync_of_orphaned_data(self):
        # Sync repo with some initial data
        # Modify the underlying directory to make it look like source has changed
        # Re-sync
        # Verify orphaned groups/categories were removed
        ic = ImporterComps()
        repo_src_dir = os.path.join(self.data_dir, "test_orphaned_data_initial")
        feed_url = "file://%s" % (repo_src_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        repo = mock.Mock(spec=Repository)
        repo.id = "test_sync_of_orphaned_data"
        repo.working_dir = self.working_dir
        # Simulate a repo sync, copy the source contents to the repo.working_dir
        self.simulate_sync(repo, repo_src_dir)

        sync_conduit = importer_mocks.get_sync_conduit()
        status, summary, details = ic.sync(repo, sync_conduit, config)
        self.assertTrue(status)
        self.assertEqual(summary["num_available_groups"], 3)
        self.assertEqual(summary["num_available_categories"], 2)
        self.assertEqual(summary["num_new_groups"], 3)
        self.assertEqual(summary["num_new_categories"], 2)
        self.assertEqual(summary["num_orphaned_groups"], 0)
        self.assertEqual(summary["num_orphaned_categories"], 0)
        self.assertTrue(summary["time_total_sec"] > 0)
        #
        # Simulate the existing_units 
        #
        avail_groups, avail_cats = comps.get_available(repo_src_dir)
        existing_cats, existing_cat_units = comps.get_new_category_units(avail_cats, {}, sync_conduit, repo)
        existing_groups, existing_group_units = comps.get_new_group_units(avail_groups, {}, sync_conduit, repo)
        self.assertEquals(len(existing_cats), 2)
        self.assertEquals(len(existing_groups), 3)

        existing_units = []
        existing_units.extend(existing_group_units.values())
        existing_units.extend(existing_cat_units.values())
        self.assertEquals(len(existing_units), (len(existing_cats) + len(existing_groups)))
        # 
        # Now we will simulate a change to the feed and pass in our existing units
        #
        repo_src_dir = os.path.join(self.data_dir, "test_orphaned_data_final")
        feed_url = "file://%s" % (repo_src_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        sync_conduit = importer_mocks.get_sync_conduit(existing_units=existing_units)
        self.simulate_sync(repo, repo_src_dir)
        status, summary, details = ic.sync(repo, sync_conduit, config)

        self.assertTrue(status)
        self.assertEqual(summary["num_available_groups"], 2)
        self.assertEqual(summary["num_available_categories"], 1)
        self.assertEqual(summary["num_new_groups"], 0)
        self.assertEqual(summary["num_new_categories"], 0)
        self.assertEqual(summary["num_orphaned_groups"], 1)
        self.assertEqual(summary["num_orphaned_categories"], 1)
        self.assertTrue(summary["time_total_sec"] > 0)
Ejemplo n.º 5
0
    def test_bandwidth_limit(self):
        # This test assumes an available bandwidth of more than 100KB for 2 threads
        feed_url = 'http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/test_bandwidth_repo_smaller/'
        expected_size_bytes = 209888 # 5 1MB RPMs are in this repo
        expected_num_packages = 2
        num_threads = 2
        max_speed = 25 # KB/sec

        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_bandwidth_limit"
        sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url, num_threads=num_threads, max_speed=max_speed)

        start = time.time()
        importerRPM = importer_rpm.ImporterRPM()
        status, summary, details = importerRPM.sync(repo, sync_conduit, config)
        end = time.time()
        self.assertTrue(status)
        self.assertEquals(summary["num_synced_new_rpms"], expected_num_packages)
        self.assertEquals(summary["num_resynced_rpms"], 0)
        self.assertEquals(summary["num_not_synced_rpms"], 0)
        self.assertEquals(summary["num_orphaned_rpms"], 0)
        self.assertEquals(details["size_total"], expected_size_bytes)

        expected = (float(expected_size_bytes)/(num_threads*max_speed*1000))
        actual_A = end - start
        self.assertTrue(actual_A > expected)
        #
        # Clean up and resync with no bandwidth limit
        # Ensure result is quicker than above
        #
        max_speed = 0
        self.clean()
        self.init()
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_bandwidth_limit"
        sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url, num_threads=num_threads, max_speed=max_speed)
        start = time.time()
        importerRPM = importer_rpm.ImporterRPM()
        status, summary, details = importerRPM.sync(repo, sync_conduit, config)
        end = time.time()
        self.assertTrue(status)
        self.assertEquals(summary["num_synced_new_rpms"], expected_num_packages)
        self.assertEquals(summary["num_resynced_rpms"], 0)
        self.assertEquals(summary["num_not_synced_rpms"], 0)
        self.assertEquals(summary["num_orphaned_rpms"], 0)
        self.assertEquals(details["size_total"], expected_size_bytes)
Ejemplo n.º 6
0
 def test_orphaned_distributions(self):
     feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/"
     repo = mock.Mock(spec=Repository)
     repo.working_dir = self.working_dir
     repo.id = "test_repo"
     sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir)
     config = importer_mocks.get_basic_config(feed_url=feed_url)
     importerRPM = importer_rpm.ImporterRPM()
     status, summary, details = importerRPM.sync(repo, sync_conduit, config)
     self.assertTrue(status)
     dunit_key = {}
     dunit_key['id'] = "ks-TestFamily-TestVariant-16-x86_64"
     dunit_key['version'] = "16"
     dunit_key['arch'] = "x86_64"
     dunit_key['family'] = "TestFamily"
     dunit_key['variant'] = "TestVariant"
     metadata = { "files" : [{"checksumtype" : "sha256", 	"relativepath" : "images/fileA.txt", 	"fileName" : "fileA.txt",
                      "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileA.txt",
                      "item_type" : "tree_file",
                      "savepath" : "%s/testr1/images" % self.working_dir,
                      "checksum" : "22603a94360ee24b7034c74fa13d70dd122aa8c4be2010fc1361e1e6b0b410ab",
                      "filename" : "fileA.txt",
                      "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir,
                      "size" : 0 },
                     { 	"checksumtype" : "sha256", 	"relativepath" : "images/fileB.txt", 	"fileName" : "fileB.txt",
                           "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileB.txt",
                           "item_type" : "tree_file",
                           "savepath" : "%s/testr1/images" % self.working_dir,
                           "checksum" : "8dc89e9883c098443f6616e60a8e489254bf239eeade6e4b4943b7c8c0c345a4",
                           "filename" : "fileB.txt",
                           "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, 	"size" : 0 },
                     { 	"checksumtype" : "sha256", 	"relativepath" : "images/fileC.iso", 	"fileName" : "fileC.iso",
                           "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileC.iso",
                           "item_type" : "tree_file",
                           "savepath" : "%s/testr1/images" % self.working_dir,
                           "checksum" : "099f2bafd533e97dcfee778bc24138c40f114323785ac1987a0db66e07086f74",
                           "filename" : "fileC.iso",
                           "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, 	"size" : 0 } ],}
     distro_unit = Unit(distribution.TYPE_ID_DISTRO, dunit_key, metadata, '')
     distro_unit.storage_path = "%s/ks-TestFamily-TestVariant-16-x86_64" % self.pkg_dir
     new_feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/zoo/"
     sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir, existing_units=[distro_unit])
     config = importer_mocks.get_basic_config(feed_url=new_feed_url)
     status, summary, details = importerRPM.sync(repo, sync_conduit, config)
     print status, summary, details
     self.assertTrue(status)
     self.assertTrue(summary is not None)
     self.assertTrue(details is not None)
     self.assertEquals(summary["num_orphaned_distributions"], 1)
Ejemplo n.º 7
0
    def test_basic_sync_groups(self):
        global updated_progress
        updated_progress = None

        def set_progress(status_type, progress):
            global updated_progress
            updated_progress = progress

        ic = ImporterComps()
        repo_src_dir = os.path.join(self.data_dir, "pulp_unittest")
        feed_url = "file://%s" % (repo_src_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        repo = mock.Mock(spec=Repository)
        repo.id = "test_basic_sync_groups"
        repo.working_dir = self.working_dir
        # Simulate a repo sync, copy the source contents to the repo.working_dir
        self.simulate_sync(repo, repo_src_dir)

        sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir)
        status, summary, details = ic.sync(repo, sync_conduit, config, set_progress)
        self.assertTrue(status)
        self.assertEqual(summary["num_available_groups"], 3)
        self.assertEqual(summary["num_available_categories"], 2)
        self.assertEqual(summary["num_new_groups"], 3)
        self.assertEqual(summary["num_new_categories"], 2)
        self.assertEqual(summary["num_orphaned_groups"], 0)
        self.assertEqual(summary["num_orphaned_categories"], 0)
        self.assertTrue(summary["time_total_sec"] > 0)
Ejemplo n.º 8
0
    def test_sync_groups_no_metadata_present(self):
        global updated_progress
        updated_progress = None

        def set_progress(status_type, progress):
            global updated_progress
            updated_progress = progress

        ic = ImporterComps()
        feed_url = "file://%s/simple_repo_no_comps" % (self.data_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_sync_groups_no_metadata_present"
        sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir)
        status, summary, details = ic.sync(repo, sync_conduit, config, set_progress)
        self.assertTrue(status)
        self.assertEqual(updated_progress["state"], "FINISHED")
        self.assertEqual(summary["num_available_groups"], 0)
        self.assertEqual(summary["num_available_categories"], 0)
        self.assertEqual(summary["num_orphaned_groups"], 0)
        self.assertEqual(summary["num_orphaned_categories"], 0)
        self.assertEqual(summary["num_new_groups"], 0)
        self.assertEqual(summary["num_new_categories"], 0)
        self.assertTrue(summary["time_total_sec"] > 0)
Ejemplo n.º 9
0
    def test_skip_packagegroups(self):
        global updated_progress
        updated_progress = None

        def set_progress(progress):
            global updated_progress
            updated_progress = progress

        yi = YumImporter()
        skip = ["packagegroup"]
        repo_src_dir = os.path.join(self.data_dir, "pulp_unittest")
        feed_url = "file://%s" % (repo_src_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url,skip_content_types=skip)
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_skip_packagegroup"
        # Simulate a repo sync, copy the source contents to the repo.working_dir
        self.simulate_sync(repo, repo_src_dir)

        sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir)
        sync_conduit.set_progress = mock.Mock()
        sync_conduit.set_progress = set_progress
        status, summary, details = yi._sync_repo(repo, sync_conduit, config)
        self.assertTrue(status)
        self.assertEqual(updated_progress["comps"]["state"], "SKIPPED")
Ejemplo n.º 10
0
    def test_progress_sync(self):
        global updated_progress
        updated_progress = None

        def set_progress(progress):
            global updated_progress
            updated_progress = progress

        feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/"
        importer = YumImporter()
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_progress_sync"
        sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir)
        sync_conduit.set_progress = mock.Mock()
        sync_conduit.set_progress.side_effect = set_progress
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        status, summary, details = importer._sync_repo(repo, sync_conduit, config)
        self.assertTrue(status)
        self.assertEquals(summary["packages"]["num_synced_new_rpms"], 3)
        self.assertTrue(updated_progress is not None)
        self.assertTrue("metadata" in updated_progress)
        self.assertTrue(updated_progress["metadata"].has_key("state"))
        self.assertTrue("errata" in updated_progress)
        self.assertTrue(updated_progress["errata"].has_key("state"))
        self.assertTrue("content" in updated_progress)
        self.assertTrue(updated_progress["content"].has_key("state"))
        self.assertEquals(updated_progress["content"]["state"], "FINISHED")
        for key in importer_rpm.PROGRESS_REPORT_FIELDS:
            self.assertTrue(key in updated_progress["content"])
Ejemplo n.º 11
0
    def test_repo_scratchpad_settings(self):
        global repo_scratchpad
        repo_scratchpad = {}

        def set_repo_scratchpad(data):
            global repo_scratchpad
            repo_scratchpad = data

        def get_repo_scratchpad():
            global repo_scratchpad
            return repo_scratchpad

        feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/test_drpm_repo/"
        importer = YumImporter()
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_repo_scratchpad"
        sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir)
        sync_conduit.set_repo_scratchpad = mock.Mock()
        sync_conduit.set_repo_scratchpad.side_effect = set_repo_scratchpad
        sync_conduit.get_repo_scratchpad = mock.Mock()
        sync_conduit.get_repo_scratchpad.side_effect = get_repo_scratchpad
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        importer._sync_repo(repo, sync_conduit, config)
        print "SCRATCHPAD %s" %  repo_scratchpad
        self.assertEquals(repo_scratchpad['checksum_type'], 'sha256')
        self.assertTrue(repo_scratchpad.has_key("repodata"))
        self.assertTrue(repo_scratchpad["repodata"].has_key("prestodelta"))
Ejemplo n.º 12
0
 def test_local_sync_with_packages_in_subdir(self):
     feed_url = "file://%s/repo_packages_in_subdirs/" % (self.data_dir)
     repo = mock.Mock(spec=Repository)
     repo.working_dir = self.working_dir
     repo.id = "test_local_sync_with_packages_in_subdir"
     sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir)
     config = importer_mocks.get_basic_config(feed_url=feed_url)
     importerRPM = importer_rpm.ImporterRPM()
     status, summary, details = importerRPM.sync(repo, sync_conduit, config)
     self.assertTrue(summary is not None)
     self.assertTrue(details is not None)
     self.assertTrue(status)
     self.assertEquals(summary["num_synced_new_rpms"], 3)
     self.assertEquals(summary["num_resynced_rpms"], 0)
     self.assertEquals(summary["num_not_synced_rpms"], 0)
     self.assertEquals(summary["num_orphaned_rpms"], 0)
     self.assertEquals(details["size_total"], 6868)
     # Confirm regular RPM files exist under self.pkg_dir
     pkgs = self.get_files_in_dir("*.rpm", self.pkg_dir)
     self.assertEquals(len(pkgs), 3)
     for p in pkgs:
         self.assertTrue(os.path.isfile(p))
     # Confirm symlinks to RPMs exist under repo.working_dir
     sym_links = self.get_files_in_dir("*.rpm", repo.working_dir)
     self.assertEquals(len(pkgs), 3)
     for link in sym_links:
         self.assertTrue(os.path.islink(link))
Ejemplo n.º 13
0
 def test_drpm_sync(self):
     feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/test_drpm_repo/"
     repo = mock.Mock(spec=Repository)
     repo.working_dir = self.working_dir
     repo.id = "test_repo"
     sync_conduit = importer_mocks.get_sync_conduit(pkg_dir=self.pkg_dir)
     config = importer_mocks.get_basic_config(feed_url=feed_url)
     importerRPM = importer_rpm.ImporterRPM()
     status, summary, details = importerRPM.sync(repo, sync_conduit, config)
     self.assertTrue(status)
     self.assertTrue(summary is not None)
     self.assertTrue(details is not None)
     self.assertEquals(summary["num_synced_new_drpms"], 18)
     self.assertEquals(summary["num_resynced_drpms"], 0)
     self.assertEquals(summary["num_orphaned_drpms"], 0)
     # validate drpms on filesystem
     def get_drpm_list(dir):
         dpkgs = []
         for root, dirs, files in os.walk(dir):
             for file in files:
                 dpkgs.append("%s/%s" % (root, file))
         return dpkgs
     dpkgs = filter(lambda x: x.endswith(".drpm"), get_drpm_list(self.pkg_dir))
     self.assertEquals(len(dpkgs), 18)
     # Confirm symlinks
     sym_links = filter(lambda x: x.endswith(".drpm"), get_drpm_list(repo.working_dir))
     self.assertEquals(len(sym_links), 18)
     for link in sym_links:
         self.assertTrue(os.path.islink(link))
Ejemplo n.º 14
0
    def setUp(self):
        self.config = importer_mocks.get_basic_config(
            feed_url='http://fake.com/iso_feed/', max_speed=500.0, num_threads=5,
            ssl_client_cert="Trust me, I'm who I say I am.", ssl_client_key="Secret Key",
            ssl_ca_cert="Uh, I guess that's the right server.",
            proxy_url='http://proxy.com', proxy_port=1234, proxy_user="******",
            proxy_password='******')

        self.temp_dir = tempfile.mkdtemp()
        self.pkg_dir = os.path.join(self.temp_dir, 'content')
        os.mkdir(self.pkg_dir)

        # These checksums correspond to the checksums of the files that our curl mocks will generate. Our
        # curl mocks do not have a test4.iso, so that one is to test removal of old ISOs during sync
        self.existing_units = [
            Unit(TYPE_ID_ISO,
                 {'name': 'test.iso', 'size': 16,
                  'checksum': 'f02d5a72cd2d57fa802840a76b44c6c6920a8b8e6b90b20e26c03876275069e0'},
                 {}, '/path/test.iso'),
            Unit(TYPE_ID_ISO,
                 {'name': 'test2.iso', 'size': 22,
                  'checksum': 'c7fbc0e821c0871805a99584c6a384533909f68a6bbe9a2a687d28d9f3b10c16'},
                 {}, '/path/test2.iso'),
            Unit(TYPE_ID_ISO, {'name': 'test4.iso', 'size': 4, 'checksum': 'sum4'},
                 {}, '/path/test4.iso')]
        self.sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_ISO, pkg_dir=self.pkg_dir,
                                                            existing_units=self.existing_units)

        self.iso_sync_run = ISOSyncRun(self.sync_conduit, self.config)
Ejemplo n.º 15
0
    def test_remove_packages(self):
        feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest/"
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_remove_packages"
        sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        importerRPM = importer_rpm.ImporterRPM()
        status, summary, details = importerRPM.sync(repo, sync_conduit, config)
        self.assertTrue(status)
        self.assertEquals(summary["num_synced_new_rpms"], 3)
        self.assertEquals(len(self.get_files_in_dir("*.rpm", self.pkg_dir)), 3)
        self.assertEquals(len(self.get_files_in_dir("*.rpm", repo.working_dir)), 3)
        expected_rpms = self.get_expected_rpms_from_pulp_unittest(repo.id)
        # Confirm that both the RPM and the Symlink for each expected rpm does exist
        #  Then run remove_unit
        # Confirm that both the RPM and the Symlink have been deleted from the file system
        for rpm in expected_rpms.values():
            rpm_save_path = os.path.join(rpm["pkgpath"], rpm["filename"])
            self.assertTrue(os.path.exists(rpm_save_path))

            symlink_save_path = os.path.join(rpm["savepath"], rpm["filename"])
            self.assertTrue(os.path.lexists(symlink_save_path))

            unit = Unit(TYPE_ID_RPM, 
                    importer_rpm.form_rpm_unit_key(rpm), 
                    importer_rpm.form_rpm_metadata(rpm),
                    rpm_save_path)
            importer_rpm.remove_unit(sync_conduit, unit)
            self.assertTrue(os.path.exists(rpm_save_path))
            self.assertTrue(os.path.exists(symlink_save_path))
Ejemplo n.º 16
0
    def test_cancel_sync(self):
        global updated_progress
        updated_progress = None

        def set_progress(progress):
            global updated_progress
            updated_progress = progress

        class SyncThread(threading.Thread):
            def __init__(self, importer, repo, sync_conduit, config):
                threading.Thread.__init__(self)
                self.importer = importer
                self.repo = repo
                self.sync_conduit = sync_conduit
                self.config = config
                self.status = None
                self.summary = None
                self.details = None
                self.finished = False

            def run(self):
                status, summary, details = self.importer._sync_repo(self.repo, self.sync_conduit, self.config)
                self.status = status
                self.summary = summary
                self.details = details
                self.finished = True

        feed_url = "http://repos.fedorapeople.org/repos/pulp/pulp/v1/testing/6Server/x86_64/"
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_cancel_sync"
        sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir)
        sync_conduit.set_progress = mock.Mock()
        sync_conduit.set_progress.side_effect = set_progress
        config = importer_mocks.get_basic_config(feed_url=feed_url, num_threads=1, max_speed=25)
        importer = YumImporter()
        sync_thread = SyncThread(importer, repo, sync_conduit, config)
        sync_thread.start()
        # Wait to confirm that sync has started and we are downloading packages
        # We are intentionally setting the 'config' to use 1 thread and max_speed to be low so we will
        # have a chance to cancel the sync before it completes
        for i in range(30):
            if updated_progress and updated_progress.has_key("content") and updated_progress["content"].has_key("state") \
                and updated_progress["content"]["state"] == "IN_PROGRESS":
                break
            time.sleep(1)
        self.assertEquals(updated_progress["metadata"]["state"], "FINISHED")
        self.assertEquals(updated_progress["content"]["state"], "IN_PROGRESS")
        ###
        ### Issue Cancel
        ###
        importer.cancel_sync_repo(None, None)
        # Wait for cancel of sync
        for i in range(45):
            if sync_thread.finished:
                break
            time.sleep(1)
        self.assertEquals(updated_progress["content"]["state"], "CANCELED")
        self.assertFalse(sync_thread.status)
Ejemplo n.º 17
0
    def test_sync_no_feed(self):
        repo = mock.MagicMock(spec=Repository)
        pkg_dir = os.path.join(self.temp_dir, 'content')
        sync_conduit = importer_mocks.get_sync_conduit(type_id=ids.TYPE_ID_ISO, pkg_dir=pkg_dir)
        config = {importer_constants.KEY_FEED: None}
        config = importer_mocks.get_basic_config(**config)

        # Now run the sync
        self.assertRaises(ValueError, self.iso_importer.sync_repo, repo, sync_conduit, config)
Ejemplo n.º 18
0
 def test_get_existing_errata(self):
     unit_key = dict()
     unit_key['id'] = "RHBA-2007:0112"
     metadata = {'updated' : "2007-03-13 00:00:00"}
     existing_units = [Unit(TYPE_ID_ERRATA, unit_key, metadata, '')]
     sync_conduit = importer_mocks.get_sync_conduit(existing_units=existing_units)
     created_existing_units = errata.get_existing_errata(sync_conduit)
     self.assertEquals(len(created_existing_units), 1)
     self.assertEquals(len(existing_units), len(created_existing_units))
Ejemplo n.º 19
0
    def test_upload_unit_validate_unset(self, remove, validate):
        """
        Assert correct behavior from upload_unit() when the validation setting is not set. This
        should default to validating the upload.
        """
        # Set up the test
        file_data = 'This is a file.\n'
        working_dir = os.path.join(self.temp_dir, "working")
        os.mkdir(working_dir)
        pkg_dir = os.path.join(self.temp_dir, 'content')
        os.mkdir(pkg_dir)
        repo = mock.MagicMock(spec=Repository)
        repo.working_dir = working_dir
        # Set the checksum incorrect. The upload should be unsuccessful since the default is to
        # validate
        unit_key = {'name': 'test.iso', 'size': 16, 'checksum': 'Wrong'}
        metadata = {}
        temp_file_location = os.path.join(self.temp_dir, 'test.iso')
        with open(temp_file_location, 'w') as temp_file:
            temp_file.write(file_data)
        sync_conduit = importer_mocks.get_sync_conduit(type_id=ids.TYPE_ID_ISO, pkg_dir=pkg_dir)
        # validate isn't set, so default should happen
        config = importer_mocks.get_basic_config()

        # Run the upload. This should report a failure
        report = self.iso_importer.upload_unit(repo, ids.TYPE_ID_ISO, unit_key, metadata,
                                               temp_file_location, sync_conduit, config)

        self.assertEqual(report['success_flag'], False)
        self.assertEqual(
            report['summary'],
            ('Downloading <test.iso> failed checksum validation. The manifest specified the '
             'checksum to be Wrong, but it was '
             'f02d5a72cd2d57fa802840a76b44c6c6920a8b8e6b90b20e26c03876275069e0.'))

        # The conduit's init_unit method should have been called
        expected_rel_path = os.path.join(unit_key['name'], unit_key['checksum'],
                                         str(unit_key['size']), unit_key['name'])
        sync_conduit.init_unit.assert_called_once_with(ids.TYPE_ID_ISO, unit_key, metadata,
                                                       expected_rel_path)

        # The file should have been moved to its final destination
        self.assertFalse(os.path.exists(temp_file_location))
        would_be_destination = os.path.join(pkg_dir, expected_rel_path)
        self.assertFalse(os.path.exists(would_be_destination))
        # The file should have been removed
        remove.assert_called_once_with(would_be_destination)

        # validate() should have been called with the full_validation=True flag
        iso = validate.mock_calls[0][1][0]
        validate.assert_called_once_with(iso, full_validation=True)

        # The conduit's save_unit method should have been called
        self.assertEqual(sync_conduit.save_unit.call_count, 0)
Ejemplo n.º 20
0
    def test_sync_calls_sync(self, mock_sync_run):
        repo = Repository('repo1')
        sync_conduit = importer_mocks.get_sync_conduit(type_id=ids.TYPE_ID_ISO, pkg_dir='/a/b/c')
        config = importer_mocks.get_basic_config(**{
                    importer_constants.KEY_FEED: 'http://fake.com/iso_feed/'})

        self.iso_importer.sync_repo(repo, sync_conduit, config)

        # make sure the sync workflow is called with the right stuff
        mock_sync_run.assert_called_once_with(sync_conduit, config)
        mock_sync_run.return_value.perform_sync.assert_called_once_with()
Ejemplo n.º 21
0
 def test_get_existing_units(self):
     unit_key = {}
     for k in UNIT_KEY_RPM:
         unit_key[k] = "test_value"
     existing_units = [Unit(TYPE_ID_RPM, unit_key, "test_metadata", os.path.join(self.pkg_dir, "test_rel_path"))]
     sync_conduit = importer_mocks.get_sync_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir)
     actual_existing_units = importer_rpm.get_existing_units(sync_conduit)
     self.assertEquals(len(actual_existing_units), 1)
     self.assertEquals(len(existing_units), len(actual_existing_units))
     lookup_key = importer_rpm.form_lookup_key(unit_key)
     self.assertEqual(existing_units[0], actual_existing_units[lookup_key])
Ejemplo n.º 22
0
 def test_feedless_repo_sync(self):
     repo = mock.Mock(spec=Repository)
     repo.working_dir = self.working_dir
     repo.id = "test_feedless_repo_sync"
     sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, pkg_dir=self.pkg_dir)
     sync_conduit.set_progress = mock.Mock()
     config = importer_mocks.get_basic_config()
     importer = YumImporter()
     status, summary, details = importer._sync_repo(repo, sync_conduit, config)
     self.assertFalse(status)
     self.assertEquals(summary['error'], "Cannot perform repository sync on a repository with no feed")
Ejemplo n.º 23
0
    def test_upload_unit_validate_false(self, validate):
        """
        Assert correct behavior from upload_unit() when the validation setting is False.
        """
        # Set up the test
        file_data = 'This is a file.\n'
        working_dir = os.path.join(self.temp_dir, "working")
        os.mkdir(working_dir)
        pkg_dir = os.path.join(self.temp_dir, 'content')
        os.mkdir(pkg_dir)
        repo = mock.MagicMock(spec=Repository)
        repo.working_dir = working_dir
        # Set the checksum incorrect. The upload should be successful no matter what since
        # validation will be set to False
        unit_key = {'name': 'test.iso', 'size': 16, 'checksum': 'Wrong'}
        metadata = {}
        temp_file_location = os.path.join(self.temp_dir, 'test.iso')
        with open(temp_file_location, 'w') as temp_file:
            temp_file.write(file_data)
        sync_conduit = importer_mocks.get_sync_conduit(type_id=ids.TYPE_ID_ISO, pkg_dir=pkg_dir)
        config = importer_mocks.get_basic_config(**{importer_constants.KEY_VALIDATE: 'false'})

        # Run the upload. This should be successful, since we have set validation off.
        report = self.iso_importer.upload_unit(repo, ids.TYPE_ID_ISO, unit_key, metadata,
                                               temp_file_location, sync_conduit, config)

        # The import should have been successful
        self.assertEqual(report['success_flag'], True)
        self.assertEqual(report['summary'], None)

        # The conduit's init_unit method should have been called
        expected_rel_path = os.path.join(unit_key['name'], unit_key['checksum'],
                                         str(unit_key['size']), unit_key['name'])
        sync_conduit.init_unit.assert_called_once_with(ids.TYPE_ID_ISO, unit_key, metadata,
                                                       expected_rel_path)

        # The file should have been moved to its final destination
        self.assertFalse(os.path.exists(temp_file_location))
        expected_destination = os.path.join(pkg_dir, expected_rel_path)
        self.assertTrue(os.path.exists(expected_destination))
        with open(expected_destination) as iso_file:
            self.assertEqual(iso_file.read(), file_data)

        # validate() should still have been called, but with the full_validation=False flag
        # We need to get the ISO itself for our assertion, since it is technically the first
        # argument
        iso = validate.mock_calls[0][1][0]
        validate.assert_called_once_with(iso, full_validation=False)

        # The conduit's save_unit method should have been called
        self.assertEqual(sync_conduit.save_unit.call_count, 1)
        saved_unit = sync_conduit.save_unit.mock_calls[0][1][0]
        self.assertEqual(saved_unit.unit_key, unit_key)
Ejemplo n.º 24
0
    def test_distribution_exports(self):
        feed_url = "file://%s/pulp_unittest/" % self.data_dir
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.repo_working_dir
        repo.id = "pulp_unittest"
        repo.checksumtype = 'sha'
        sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        importerRPM = importer_rpm.ImporterRPM()
        status, summary, details = importerRPM.sync(repo, sync_conduit, config)
        dunit_key = {}
        dunit_key['id'] = "ks-TestFamily-TestVariant-16-x86_64"
        dunit_key['version'] = "16"
        dunit_key['arch'] = "x86_64"
        dunit_key['family'] = "TestFamily"
        dunit_key['variant'] = "TestVariant"
        metadata = { "files" : [{"checksumtype" : "sha256", 	"relativepath" : "images/fileA.txt", 	"fileName" : "fileA.txt",
                    "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileA.txt",
                    "item_type" : "tree_file",
                    "savepath" : "%s/testr1/images" % self.repo_working_dir,
                    "checksum" : "22603a94360ee24b7034c74fa13d70dd122aa8c4be2010fc1361e1e6b0b410ab",
                    "filename" : "fileA.txt",
                    "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir,
                    "size" : 0 },
                { 	"checksumtype" : "sha256", 	"relativepath" : "images/fileB.txt", 	"fileName" : "fileB.txt",
                    "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileB.txt",
                    "item_type" : "tree_file",
                    "savepath" : "%s/testr1/images" % self.repo_working_dir,
                    "checksum" : "8dc89e9883c098443f6616e60a8e489254bf239eeade6e4b4943b7c8c0c345a4",
                    "filename" : "fileB.txt",
                    "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, 	"size" : 0 },
                { 	"checksumtype" : "sha256", 	"relativepath" : "images/fileC.iso", 	"fileName" : "fileC.iso",
                    "downloadurl" : "http://repos.fedorapeople.org/repos/pulp/pulp/demo_repos/pulp_unittest//images/fileC.iso",
                    "item_type" : "tree_file",
                    "savepath" : "%s/testr1/images" % self.repo_working_dir,
                    "checksum" : "099f2bafd533e97dcfee778bc24138c40f114323785ac1987a0db66e07086f74",
                    "filename" : "fileC.iso",
                    "pkgpath" : "%s/ks-TestFamily-TestVariant-16-x86_64/images" % self.pkg_dir, 	"size" : 0 } ],}
        distro_unit = Unit(distribution.TYPE_ID_DISTRO, dunit_key, metadata, '')
        distro_unit.storage_path = "%s/ks-TestFamily-TestVariant-16-x86_64" % self.pkg_dir
        symlink_dir = "%s/%s" % (self.repo_working_dir, "isos")
        iso_distributor = ISODistributor()
        publish_conduit = distributor_mocks.get_publish_conduit(existing_units=[distro_unit], pkg_dir=self.pkg_dir)
        config = distributor_mocks.get_basic_config(https_publish_dir=self.https_publish_dir, http=False, https=True)
        repo_exporter = RepoExporter(symlink_dir)
#        status, errors = iso_distributor._export_distributions([distro_unit], symlink_dir)
        status, errors = repo_exporter.export_distributions([distro_unit])
        print status, errors
        self.assertTrue(status)
        for file in metadata['files']:
            print os.path.isfile("%s/%s" % (symlink_dir, file['relativepath']))
            self.assertTrue(os.path.isfile("%s/%s" % (symlink_dir, file['relativepath'])))
Ejemplo n.º 25
0
 def test_update_errata_units(self):
     # existing errata is older than available; should purge and resync
     available_errata = errata.get_available_errata(self.repo_dir)
     self.assertEqual(52, len(available_errata))
     unit_key = dict()
     unit_key['id'] = "RHBA-2007:0112"
     metadata = {'updated' : "2007-03-13 00:00:00"}
     existing_units = [Unit(TYPE_ID_ERRATA, unit_key, metadata, '')]
     existing_units[0].updated = "2007-03-13 00:00:00"
     sync_conduit = importer_mocks.get_sync_conduit(existing_units=existing_units)
     created_existing_units = errata.get_existing_errata(sync_conduit)
     self.assertEquals(len(created_existing_units), 1)
     new_errata, new_units, sync_conduit = errata.get_new_errata_units(available_errata, sync_conduit)
     self.assertEquals(len(available_errata), len(new_errata))
Ejemplo n.º 26
0
 def get_pkg_group_or_category(self, repo, type_id):
     repo_src_dir = os.path.join(self.data_dir, "test_comps_import_with_dots_in_pkg_names")
     sync_conduit = importer_mocks.get_sync_conduit()
     avail_groups, avail_cats = comps.get_available(repo_src_dir)
     if type_id == TYPE_ID_PKG_GROUP:
         groups, group_units = comps.get_new_group_units(avail_groups, {}, sync_conduit, repo)
         self.assertTrue(len(group_units) > 0)
         return group_units.values()[0]
     elif type_id == TYPE_ID_PKG_CATEGORY:
         cats, cat_units = comps.get_new_category_units(avail_cats, {}, sync_conduit, repo)
         self.assertTrue(len(cat_units) > 0)
         return cat_units.values()[0]
     else:
         return None
Ejemplo n.º 27
0
    def test_link_errata_rpm_units(self):
        feed_url = "file://%s/test_errata_local_sync/" % self.data_dir
        repo = mock.Mock(spec=Repository)
        repo.working_dir = self.working_dir
        repo.id = "test_errata_local_sync"
        repo.checksumtype = 'sha'
        sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=[], pkg_dir=self.pkg_dir)
        config = importer_mocks.get_basic_config(feed_url=feed_url)
        importerRPM = importer_rpm.ImporterRPM()
        status, summary, details = importerRPM.sync(repo, sync_conduit, config)
        metadata = {'updated' : "2007-03-13 00:00:00"}
        unit_key_a = {'id' : '','name' :'patb', 'version' :'0.1', 'release' : '2', 'epoch':'0', 'arch' : 'noarch', 'checksumtype' : 'sha',
                      'checksum': '017c12050a97cf6095892498750c2a39d2bf535e'}
        unit_key_b = {'id' : '', 'name' :'emoticons', 'version' :'0.1', 'release' :'2', 'epoch':'0','arch' : 'noarch', 'checksumtype' :'sha',
                      'checksum' : '663c89b0d29bfd5479d8736b716d50eed9495dbb'}

        existing_units = []
        for unit in [unit_key_a, unit_key_b]:
            existing_units.append(Unit(TYPE_ID_RPM, unit, metadata, ''))
        sync_conduit = importer_mocks.get_sync_conduit(type_id=TYPE_ID_RPM, existing_units=existing_units, pkg_dir=self.pkg_dir)
        importerErrata = errata.ImporterErrata()
        status, summary, details = importerErrata.sync(repo, sync_conduit, config)
        self.assertEquals(len(details['link_report']['linked_units']), 2)
Ejemplo n.º 28
0
 def test_srpm_sync(self):
     feed_url = "http://pkilambi.fedorapeople.org/test_srpm_repo/"
     repo = mock.Mock(spec=Repository)
     repo.working_dir = self.working_dir
     repo.id = "test_srpm_sync"
     sync_conduit = importer_mocks.get_sync_conduit(existing_units=[], pkg_dir=self.pkg_dir)
     config = importer_mocks.get_basic_config(feed_url=feed_url)
     importerRPM = importer_rpm.ImporterRPM()
     status, summary, details = importerRPM.sync(repo, sync_conduit, config)
     self.assertTrue(status)
     self.assertTrue(summary is not None)
     self.assertTrue(details is not None)
     self.assertEquals(summary["num_rpms"], 3)
     self.assertEquals(summary["num_synced_new_srpms"], 3)
     self.assertEquals(summary["num_synced_new_rpms"], 0)
Ejemplo n.º 29
0
    def test_upload_unit_named_PULP_MANIFEST(self, remove):
        """
        We had a bug[0] due to the ISOImporter allowing units to be uploaded named PULP_MANIFEST.
        This test asserts that that is no longer allowed.

        [0] https://bugzilla.redhat.com/show_bug.cgi?id=973678
        """
        # Set up the test
        file_data = 'This is a PULP_MANIFEST file. The upload should be rejected.\n'
        working_dir = os.path.join(self.temp_dir, "working")
        os.mkdir(working_dir)
        pkg_dir = os.path.join(self.temp_dir, 'content')
        os.mkdir(pkg_dir)
        repo = mock.MagicMock(spec=Repository)
        repo.working_dir = working_dir
        # We'll set validation off so the checksum doesn't matter
        unit_key = {'name': 'PULP_MANIFEST', 'size': len(file_data), 'checksum': "Doesn't matter"}
        metadata = {}
        temp_file_location = os.path.join(self.temp_dir, unit_key['name'])
        with open(temp_file_location, 'w') as temp_file:
            temp_file.write(file_data)
        sync_conduit = importer_mocks.get_sync_conduit(type_id=ids.TYPE_ID_ISO, pkg_dir=pkg_dir)
        # Just so we don't have to care about the checksum
        config = importer_mocks.get_basic_config(**{importer_constants.KEY_VALIDATE: 'false'})

        report = self.iso_importer.upload_unit(repo, ids.TYPE_ID_ISO, unit_key, metadata,
                                               temp_file_location, sync_conduit, config)

        self.assertEqual(report['success_flag'], False)
        self.assertEqual(report['summary'], 'An ISO may not be named PULP_MANIFEST, as it '
                         'conflicts with the name of the manifest during publishing.')

        # init_unit() should have been called
        expected_rel_path = os.path.join(unit_key['name'], unit_key['checksum'],
                                         str(unit_key['size']), unit_key['name'])
        sync_conduit.init_unit.assert_called_once_with(ids.TYPE_ID_ISO, unit_key, metadata,
                                                       expected_rel_path)

        # The file should have been deleted
        self.assertFalse(os.path.exists(temp_file_location))
        would_be_destination = os.path.join(pkg_dir, expected_rel_path)
        self.assertFalse(os.path.exists(would_be_destination))
        # The file should have been removed from there
        remove.assert_called_once_with(would_be_destination)

        # The conduit's save_unit method should not have been called
        self.assertEqual(sync_conduit.save_unit.call_count, 0)
Ejemplo n.º 30
0
    def test_upload_unit_validate_true_bad_checksum(self, remove, validate):
        """
        Test behavior with a bad checksum.
        """
        # Set up the test
        file_data = 'This is a file.\n'
        error_message = 'uh oh'
        validate.side_effect = ValueError(error_message)
        working_dir = os.path.join(self.temp_dir, "working")
        os.mkdir(working_dir)
        pkg_dir = os.path.join(self.temp_dir, 'content')
        os.mkdir(pkg_dir)
        repo = mock.MagicMock(spec=Repository)
        repo.working_dir = working_dir
        # Set the checksum incorrect. The upload should fail.
        unit_key = {'name': 'test.iso', 'size': 16, 'checksum': 'Wrong'}
        metadata = {}
        temp_file_location = os.path.join(self.temp_dir, 'test.iso')
        with open(temp_file_location, 'w') as temp_file:
            temp_file.write(file_data)
        sync_conduit = importer_mocks.get_sync_conduit(type_id=ids.TYPE_ID_ISO, pkg_dir=pkg_dir)
        config = importer_mocks.get_basic_config(**{importer_constants.KEY_VALIDATE: 'true'})

        # Run the upload. This should fail due to the bad checksum
        report = self.iso_importer.upload_unit(repo, ids.TYPE_ID_ISO, unit_key, metadata,
                                               temp_file_location, sync_conduit, config)

        self.assertEqual(report['success_flag'], False)
        self.assertEqual(report['summary'], error_message)
        # The conduit's init_unit method should have been called
        expected_rel_path = os.path.join(unit_key['name'], unit_key['checksum'],
                                         str(unit_key['size']), unit_key['name'])
        sync_conduit.init_unit.assert_called_once_with(ids.TYPE_ID_ISO, unit_key, metadata,
                                                       expected_rel_path)

        # The file should have been deleted
        self.assertFalse(os.path.exists(temp_file_location))
        would_be_destination = os.path.join(pkg_dir, expected_rel_path)
        self.assertFalse(os.path.exists(would_be_destination))
        # The file should have been removed from there
        remove.assert_called_once_with(would_be_destination)

        # validate() should have been called with the full_validation=True flag
        validate.assert_called_once_with(full_validation=True)

        # The conduit's save_unit method should not have been called
        self.assertEqual(sync_conduit.save_unit.call_count, 0)