def test_generate_download_requests(self): """ Ensure correct operation from generate_download_requests(). """ repo = mock.MagicMock() conduit = mock.MagicMock() config = mock.MagicMock() working_dir = '/some/dir' step = sync.SyncStep(repo, conduit, config, working_dir) u1 = models.Package(name='foo', version='1.2.0') u2 = models.Package(name='foo', version='1.3.0') step.get_local_units_step.units_to_download.extend([u1, u2]) step.unit_urls.update({ u1: 'http://u1/foo-1.2.0.tar.gz', u2: 'http://u2/foo-1.3.0.tar.gz' }) requests = step.generate_download_requests() self.assertTrue(isinstance(requests, types.GeneratorType)) # For the remainder of our tests, it will be more useful if requests is a list requests = list(requests) self.assertEqual(len(requests), 2) request_urls = [r.url for r in requests] self.assertEqual( request_urls, ['http://u1/foo-1.2.0.tar.gz', 'http://u2/foo-1.3.0.tar.gz']) # The destinations should both have been paths request_destinations = [r.destination for r in requests] self.assertEqual( request_destinations, ['/some/dir/foo-1.2.0.tar.gz', '/some/dir/foo-1.3.0.tar.gz']) requests_data = [r.data for r in requests] self.assertEqual(requests_data, step.get_local_units_step.units_to_download)
def test_generate_download_requests(self): """ Ensure correct operation from generate_download_requests(). """ repo = mock.MagicMock() conduit = mock.MagicMock() config = mock.MagicMock() working_dir = '/some/dir' config.get.return_value = 'mock/feed' step = sync.SyncStep(repo, conduit, config, working_dir) u1 = models.Package(name='foo', version='1.2.0', path='url/1.2.tar.gz') u2 = models.Package(name='foo', version='1.3.0', path='url/1.3.tar.gz') u1._feed_url = u2._feed_url = "feed/" step.get_local_units_step.units_to_download.extend([u1, u2]) requests = step.generate_download_requests() self.assertTrue(isinstance(requests, types.GeneratorType)) # For the remainder of our tests, it will be more useful if requests is a list requests = list(requests) self.assertEqual(len(requests), 2) request_urls = [r.url for r in requests] self.assertEqual(request_urls, ['mock/feed/packages/url/1.2.tar.gz', 'mock/feed/packages/url/1.3.tar.gz']) # The destinations should both have been paths constructed from the filename request_destinations = [r.destination for r in requests] self.assertEqual(request_destinations, ['/some/dir/1.2.tar.gz', '/some/dir/1.3.tar.gz']) requests_data = [r.data for r in requests] self.assertEqual(requests_data, step.get_local_units_step.units_to_download)
def test___init___three_packages(self, generate_download_requests, super___init__, download_packages___init__): """ Test the __init__() method when the user has specified three packages to sync. """ repo = mock.MagicMock() repo.id = 'cool_repo' conduit = mock.MagicMock() config = mock.MagicMock() working_dir = '/some/dir' def fake_get(key, default=None): if key == constants.CONFIG_KEY_PACKAGE_NAMES: return 'numpy,scipy,django' return 'http://example.com/' config.get.side_effect = fake_get step = sync.SyncStep(repo, conduit, config, working_dir) # The superclass __init__ method gets called four times. Once directly by this __init__, and # three more times by the substeps it creates. self.assertEqual(super___init__.call_count, 4) # Let's assert that the direct call was cool. self.assertEqual( super___init__.mock_calls[0], mock.call(step, 'sync_step_main', repo, conduit, config, working_dir, constants.IMPORTER_TYPE_ID)) self.assertEqual(step.description, _('Synchronizing cool_repo repository.')) # Assert that the feed url and packages names are correct self.assertEqual(step._feed_url, 'http://example.com/') self.assertEqual(step._package_names, ['numpy', 'scipy', 'django']) self.assertEqual(step.available_units, []) self.assertEqual(step.unit_urls, {}) # Three child steps should have been added self.assertEqual(len(step.children), 3) self.assertEqual(type(step.children[0]), sync.DownloadMetadataStep) self.assertEqual(type(step.children[1]), GetLocalUnitsStep) self.assertEqual(type(step.children[2]), sync.DownloadPackagesStep) # Make sure the steps were initialized properly downloads = generate_download_requests.return_value download_packages___init__.assert_called_once_with( step.children[2], 'sync_step_download_packages', downloads=downloads, repo=repo, config=config, conduit=conduit, working_dir=working_dir, description=_('Downloading and processing Python packages.'))
def test_sync(self, process_lifecycle, _build_final_report, mock_rebuild): """ Ensure that sync() makes the correct calls. """ repo = mock.MagicMock() conduit = mock.MagicMock() config = mock.MagicMock() working_dir = '/some/dir' step = sync.SyncStep(repo, conduit, config, working_dir) step.sync() process_lifecycle.assert_called_once_with(step) _build_final_report.assert_called_once_with(step) mock_rebuild.assert_called_once_with(repo.repo_obj)
def sync_repo(self, repo, sync_conduit, config): """ Synchronizes content into the given repository. This call is responsible for adding new content units to Pulp as well as associating them to the given repository. While this call may be implemented using multiple threads, its execution from the Pulp server's standpoint should be synchronous. This call should not return until the sync is complete. It is not expected that this call be atomic. Should an error occur, it is not the responsibility of the importer to rollback any unit additions or associations that have been made. The returned report object is used to communicate the results of the sync back to the user. Care should be taken to i18n the free text "log" attribute in the report if applicable. :param repo: metadata describing the repository :type repo: pulp.plugins.model.Repository :param sync_conduit: provides access to relevant Pulp functionality :type sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: plugin configuration :type config: pulp.plugins.config.PluginCallConfiguration :return: report of the details of the sync :rtype: pulp.plugins.model.SyncReport """ working_dir = tempfile.mkdtemp(dir=repo.working_dir) try: sync_step = sync.SyncStep(repo=repo, conduit=sync_conduit, config=config, working_dir=working_dir) return sync_step.sync() finally: shutil.rmtree(working_dir, ignore_errors=True)