Exemplo n.º 1
0
    def test_no_distribution(self, mock_get_dist):
        parent = Mock()
        tmp_dir = Mock()
        dist = DistSync(parent, '')
        files = dist.process_distribution(tmp_dir)

        self.assertEquals(0, len(files))
Exemplo n.º 2
0
    def test_no_distribution(self, mock_get_dist):
        parent = Mock()
        tmp_dir = Mock()
        dist = DistSync(parent, '')
        files = dist.process_distribution(tmp_dir)

        self.assertEquals(0, len(files))
Exemplo n.º 3
0
 def test_get_distribution_file_does_not_exists(self, mock_listener, mock_create_downloader):
     mock_listener.return_value.succeeded_reports = []
     tmp_dir = '/tmp/'
     feed = 'http://www.foo.bar/flux/'
     parent = Mock(feed=feed)
     dist = DistSync(parent, feed)
     file_name = dist.get_distribution_file(tmp_dir)
     self.assertEquals(None, file_name)
Exemplo n.º 4
0
 def test_get_distribution_file_does_not_exists(self, mock_listener, mock_create_downloader):
     mock_listener.return_value.succeeded_reports = []
     tmp_dir = '/tmp/'
     feed = 'http://www.foo.bar/flux/'
     parent = Mock(feed=feed)
     dist = DistSync(parent, feed)
     file_name = dist.get_distribution_file(tmp_dir)
     self.assertEquals(None, file_name)
Exemplo n.º 5
0
    def test_no_distribution(self, mock_get_dist):
        parent = Mock()
        tmp_dir = Mock()
        model = Mock(metadata=dict())
        dist = DistSync(parent, '')
        files = dist.process_distribution(tmp_dir)

        self.assertEquals(0, len(files))
        self.assertEquals(None, model.metadata.get(constants.CONFIG_KEY_DISTRIBUTION_XML_FILE))
Exemplo n.º 6
0
    def test_parse_good_file(self, mock_get_dist):
        tmp_dir = Mock()
        parent = Mock()
        dist = DistSync(parent, '')
        files = dist.process_distribution(tmp_dir)

        self.assertEquals(3, len(files))
        self.assertEquals('foo/bar.txt', files[0]['relativepath'])
        self.assertEquals('baz/qux.txt', files[1]['relativepath'])
        self.assertEquals(constants.DISTRIBUTION_XML, files[2]['relativepath'])
Exemplo n.º 7
0
    def test_parse_good_file(self, mock_get_dist):
        tmp_dir = Mock()
        parent = Mock()
        dist = DistSync(parent, '')
        files = dist.process_distribution(tmp_dir)

        self.assertEquals(3, len(files))
        self.assertEquals('foo/bar.txt', files[0]['relativepath'])
        self.assertEquals('baz/qux.txt', files[1]['relativepath'])
        self.assertEquals(constants.DISTRIBUTION_XML, files[2]['relativepath'])
Exemplo n.º 8
0
    def test_no_distribution(self, mock_get_dist):
        parent = Mock()
        tmp_dir = Mock()
        model = Mock(metadata=dict())
        dist = DistSync(parent, '')
        files = dist.process_distribution(tmp_dir)

        self.assertEquals(0, len(files))
        self.assertEquals(
            None,
            model.metadata.get(constants.CONFIG_KEY_DISTRIBUTION_XML_FILE))
Exemplo n.º 9
0
 def test_get_distribution_file_exists(self, mock_listener, mock_create_downloader):
     mock_listener.return_value.succeeded_reports = ['foo']
     tmp_dir = '/tmp/'
     feed = 'http://www.foo.bar/flux/'
     parent = Mock(feed=feed)
     dist = DistSync(parent, feed)
     file_name = dist.get_distribution_file(tmp_dir)
     request = mock_create_downloader.return_value.method_calls[0][1][0][0]
     self.assertEquals(request.url, os.path.join(feed, constants.DISTRIBUTION_XML))
     self.assertEquals(request.destination, os.path.join(tmp_dir,
                                                         constants.DISTRIBUTION_XML))
     self.assertEquals(file_name, os.path.join(tmp_dir, constants.DISTRIBUTION_XML))
Exemplo n.º 10
0
 def test_get_distribution_file_exists(self, mock_listener, mock_create_downloader):
     mock_listener.return_value.succeeded_reports = ['foo']
     tmp_dir = '/tmp/'
     feed = 'http://www.foo.bar/flux/'
     parent = Mock(feed=feed)
     dist = DistSync(parent, feed)
     file_name = dist.get_distribution_file(tmp_dir)
     request = mock_create_downloader.return_value.method_calls[0][1][0][0]
     self.assertEquals(request.url, os.path.join(feed, constants.DISTRIBUTION_XML))
     self.assertEquals(request.destination, os.path.join(tmp_dir,
                                                         constants.DISTRIBUTION_XML))
     self.assertEquals(file_name, os.path.join(tmp_dir, constants.DISTRIBUTION_XML))
Exemplo n.º 11
0
def _fix_treeinfo_files(distribution_dir):
    """
    find all treeinfo or .treeinfo files in the distribution directory and
    strip any references to repomd.xml in checksum lists.

    Pulp 2.4 does this stripping when saving new treeinfo files but we need do
    a one-time pass of existing files during the upgrade process.
    """
    for root, dirs, files in os.walk(distribution_dir):
        for fname in files:
            if fname.startswith('treeinfo') or fname.startswith('.treeinfo'):
                treeinfo_file = os.path.join(root, fname)
                _logger.info("stripping repomd.xml checksum from %s" % treeinfo_file)
                DistSync.strip_treeinfo_repomd(treeinfo_file)
Exemplo n.º 12
0
    def test_remote_is_none(self):
        self.model2.timestamp = None

        ret = DistSync.existing_distribution_is_current(
            self.model1, self.model2)

        self.assertTrue(ret is False)
Exemplo n.º 13
0
    def test_not_current(self):
        self.model1.timestamp = 600.0  # 10 mins after the epoch

        ret = DistSync.existing_distribution_is_current(
            self.model1, self.model2)

        self.assertTrue(ret is False)
Exemplo n.º 14
0
    def test_sanitizes_checksum_type(self, Distribution, RawConfigParser):
        """
        Ensure the function properly sanitizes checksum types.
        """
        parser = MagicMock()
        parser.has_section.return_value = True
        parser.items.return_value = [['path', 'sha:checksum']]
        RawConfigParser.return_value = parser

        model, files = DistSync.parse_treeinfo_file('/some/path')

        self.assertEqual(files[0]['checksumtype'], 'sha1')
Exemplo n.º 15
0
    def test_sanitizes_checksum_type(self, Distribution, RawConfigParser):
        """
        Ensure the the function properly sanitizes checksum types.
        """
        parser = MagicMock()
        parser.has_section.return_value = True
        parser.items.return_value = [['path', 'sha:checksum']]
        RawConfigParser.return_value = parser

        model, files = DistSync.parse_treeinfo_file('/some/path')

        self.assertEqual(files[0]['checksumtype'], 'sha1')
Exemplo n.º 16
0
    def test_rhel5(self):
        path = os.path.join(DATA_PATH, 'treeinfo-rhel5')

        model, files = DistSync.parse_treeinfo_file(path)

        self.assertTrue(isinstance(model, models.Distribution))
        self.assertEqual(model.distribution_id, 'ks-Red Hat Enterprise Linux Server-foo-5.9-x86_64')

        self.assertEqual(len(files), 19)
        for item in files:
            self.assertTrue(item['relativepath'])
        self.assertEquals('foo', model.variant)
        self.assertEquals('Server', model.packagedir)
        self.assertEquals(1354213090.94, model.timestamp)
Exemplo n.º 17
0
    def test_rhel5_optional(self):
        path = os.path.join(DATA_PATH, 'treeinfo-rhel5-no-optional-keys')

        model, files = DistSync.parse_treeinfo_file(path)

        self.assertTrue(isinstance(model, models.Distribution))
        self.assertEqual(model.distribution_id, 'ks-Red Hat Enterprise Linux Server--5.9-x86_64')

        self.assertEqual(len(files), 19)
        for item in files:
            self.assertTrue(item['relativepath'])

        self.assertEquals('', model.variant)
        self.assertEquals(None, model.packagedir)
Exemplo n.º 18
0
    def test_rhel5_optional(self):
        path = os.path.join(DATA_PATH, 'treeinfo-rhel5-no-optional-keys')

        model, files = DistSync.parse_treeinfo_file(path)

        self.assertTrue(isinstance(model, models.Distribution))
        self.assertEqual(model.distribution_id, 'ks-Red Hat Enterprise Linux Server--5.9-x86_64')

        self.assertEqual(len(files), 19)
        for item in files:
            self.assertTrue(item['relativepath'])

        self.assertEquals('', model.variant)
        self.assertEquals(None, model.packagedir)
Exemplo n.º 19
0
    def test_rhel5(self):
        path = os.path.join(DATA_PATH, 'treeinfo-rhel5')

        model, files = DistSync.parse_treeinfo_file(path)

        self.assertTrue(isinstance(model, models.Distribution))
        self.assertEqual(model.distribution_id, 'ks-Red Hat Enterprise Linux Server-foo-5.9-x86_64')

        self.assertEqual(len(files), 19)
        for item in files:
            self.assertTrue(item['relativepath'])
        self.assertEquals('foo', model.variant)
        self.assertEquals('Server', model.packagedir)
        self.assertEquals(1354213090.94, model.timestamp)
Exemplo n.º 20
0
    def run(self):
        """
        Steps through the entire workflow of a repo sync.

        :return:    A SyncReport detailing how the sync went
        :rtype:     pulp.plugins.model.SyncReport
        """
        # Empty list could be returned in case _parse_as_mirrorlist()
        # was not able to find any valid url
        if not self.sync_feed:
            raise PulpCodedException(error_code=error_codes.RPM1004, reason='Not found')
        url_count = 0
        for url in self.sync_feed:
            # Verify that we have a feed url.
            # if there is no feed url, then we have nothing to sync
            if url is None:
                raise PulpCodedException(error_code=error_codes.RPM1005)
            # using this tmp dir ensures that cleanup leaves nothing behind, since
            # we delete below
            self.tmp_dir = tempfile.mkdtemp(dir=self.working_dir)
            url_count += 1
            try:
                with self.update_state(self.progress_report['metadata']):
                    metadata_files = self.check_metadata(url)
                    metadata_files = self.get_metadata(metadata_files)

                    # Save the default checksum from the metadata
                    self.save_default_metadata_checksum_on_repo(metadata_files)

                with self.update_state(self.content_report) as skip:
                    if not (skip or self.skip_repomd_steps):
                        self.update_content(metadata_files, url)

                _logger.info(_('Downloading additional units.'))

                with self.update_state(self.distribution_report,
                                       models.Distribution._content_type_id) as skip:
                    if not skip:
                        dist_sync = DistSync(self, url)
                        dist_sync.run()

                with self.update_state(self.progress_report['errata'], ids.TYPE_ID_ERRATA) as skip:
                    if not (skip or self.skip_repomd_steps):
                        self.get_errata(metadata_files)

                with self.update_state(self.progress_report['comps']) as skip:
                    if not (skip or self.skip_repomd_steps):
                        self.get_comps_file_units(metadata_files, group.process_group_element,
                                                  group.GROUP_TAG)
                        self.get_comps_file_units(metadata_files, group.process_category_element,
                                                  group.CATEGORY_TAG)
                        self.get_comps_file_units(metadata_files, group.process_environment_element,
                                                  group.ENVIRONMENT_TAG)

            except CancelException:
                report = self.conduit.build_cancel_report(self._progress_summary,
                                                          self.progress_report)
                report.canceled_flag = True
                return report

            except PulpCodedException, e:
                # Check if the caught exception indicates that the mirror is bad.
                # Try next mirror in the list without raising the exception.
                # In case it was the last mirror in the list, raise the exception.
                bad_mirror_exceptions = [error_codes.RPM1004, error_codes.RPM1006]
                if (e.error_code in bad_mirror_exceptions) and \
                        url_count != len(self.sync_feed):
                            continue
                else:
                    self._set_failed_state(e)
                    raise

            except Exception, e:
                # In case other exceptions were caught that are not related to the state of the
                # mirror, raise the exception immediately and do not iterate throught the rest
                # of the mirrors.
                _logger.exception(e)
                self._set_failed_state(e)
                report = self.conduit.build_failure_report(self._progress_summary,
                                                           self.progress_report)
                return report
Exemplo n.º 21
0
    def test_remote_is_none(self):
        self.model2.timestamp = None

        ret = DistSync.existing_distribution_is_current(self.model1, self.model2)

        self.assertTrue(ret is False)
Exemplo n.º 22
0
    def test_not_current(self):
        self.model1.timestamp = 600.0  # 10 mins after the epoch

        ret = DistSync.existing_distribution_is_current(self.model1, self.model2)

        self.assertTrue(ret is False)
Exemplo n.º 23
0
 def test_current(self):
     ret = DistSync.existing_distribution_is_current(self.model1, self.model2)
     self.assertTrue(ret is True)
Exemplo n.º 24
0
    def setUp(self):
        path = os.path.join(DATA_PATH, 'treeinfo-rhel5')

        self.model1, files1 = DistSync.parse_treeinfo_file(path)
        self.model2, files2 = DistSync.parse_treeinfo_file(path)
Exemplo n.º 25
0
    def run(self):
        """
        Steps through the entire workflow of a repo sync.

        :return:    A SyncReport detailing how the sync went
        :rtype:     pulp.plugins.model.SyncReport
        """
        # Empty list could be returned in case _parse_as_mirrorlist()
        # was not able to find any valid url
        if not self.sync_feed:
            raise PulpCodedException(error_code=error_codes.RPM1004,
                                     reason='Not found')
        url_count = 0
        for url in self.sync_feed:
            # Verify that we have a feed url.
            # if there is no feed url, then we have nothing to sync
            if url is None:
                raise PulpCodedException(error_code=error_codes.RPM1005)
            # using this tmp dir ensures that cleanup leaves nothing behind, since
            # we delete below
            self.tmp_dir = tempfile.mkdtemp(dir=self.working_dir)
            url_count += 1
            try:
                with self.update_state(self.progress_report['metadata']):
                    metadata_files = self.check_metadata(url)
                    metadata_files = self.get_metadata(metadata_files)

                    # Save the default checksum from the metadata
                    self.save_default_metadata_checksum_on_repo(metadata_files)

                with self.update_state(self.content_report) as skip:
                    if not (skip or self.skip_repomd_steps):
                        self.update_content(metadata_files, url)

                _logger.info(_('Downloading additional units.'))

                with self.update_state(
                        self.distribution_report,
                        models.Distribution._content_type_id) as skip:
                    if not skip:
                        dist_sync = DistSync(self, url)
                        dist_sync.run()

                with self.update_state(self.progress_report['errata'],
                                       ids.TYPE_ID_ERRATA) as skip:
                    if not (skip or self.skip_repomd_steps):
                        self.get_errata(metadata_files)

                with self.update_state(self.progress_report['comps']) as skip:
                    if not (skip or self.skip_repomd_steps):
                        self.get_comps_file_units(metadata_files,
                                                  group.process_group_element,
                                                  group.GROUP_TAG)
                        self.get_comps_file_units(
                            metadata_files, group.process_category_element,
                            group.CATEGORY_TAG)
                        self.get_comps_file_units(
                            metadata_files, group.process_environment_element,
                            group.ENVIRONMENT_TAG)

                with self.update_state(
                        self.progress_report['purge_duplicates']) as skip:
                    if not (skip or self.skip_repomd_steps):
                        purge.remove_repo_duplicate_nevra(self.conduit.repo_id)

            except PulpCodedException, e:
                # Check if the caught exception indicates that the mirror is bad.
                # Try next mirror in the list without raising the exception.
                # In case it was the last mirror in the list, raise the exception.
                bad_mirror_exceptions = [
                    error_codes.RPM1004, error_codes.RPM1006
                ]
                if (e.error_code in bad_mirror_exceptions) and \
                        url_count != len(self.sync_feed):
                    continue
                else:
                    self._set_failed_state(e)
                    raise

            except Exception, e:
                # In case other exceptions were caught that are not related to the state of the
                # mirror, raise the exception immediately and do not iterate throught the rest
                # of the mirrors.
                _logger.exception(e)
                self._set_failed_state(e)
                report = self.conduit.build_failure_report(
                    self._progress_summary, self.progress_report)
                return report
Exemplo n.º 26
0
 def test_bad_distribution_schema(self, mock_get_dist):
     parent = Mock()
     tmp_dir = Mock()
     dist = DistSync(parent, '')
     self.assertRaises(PulpCodedValidationException, dist.process_distribution, tmp_dir)
Exemplo n.º 27
0
    def setUp(self):
        path = os.path.join(DATA_PATH, 'treeinfo-rhel5')

        self.model1, files1 = DistSync.parse_treeinfo_file(path)
        self.model2, files2 = DistSync.parse_treeinfo_file(path)
Exemplo n.º 28
0
 def test_current(self):
     ret = DistSync.existing_distribution_is_current(self.model1, self.model2)
     self.assertTrue(ret is True)