def _update_units(self, request, unit_inventory): """ Update units that have been updated on the parent since added or last updated in the child inventory. :param request: A synchronization request. :type request: SyncRequest :param unit_inventory: The inventory of both parent and child content units. :type unit_inventory: UnitInventory """ download_list = [] units = unit_inventory.updated_units() listener = ContentDownloadListener(self, request) for unit, unit_ref in units: storage_path = unit[constants.STORAGE_PATH] if storage_path: self._reset_storage_path(unit) unit_url, destination = self._url_and_destination(unit_inventory.base_URL, unit) _request = listener.create_request(unit_url, destination, unit, unit_ref) download_list.append(_request) else: unit = unit_ref.fetch() self.add_unit(request, unit) if not download_list: return container = ContentContainer() request.summary.sources = container.download( request.downloader, download_list, listener) request.summary.errors.extend(listener.error_list)
def get(self, request, source_id): """ Get a content source by ID. :param request: WSGI request object, body contains bits to upload :type request: django.core.handlers.wsgi.WSGIRequest :param source_id: A content source ID. :type source_id: str :raises: MissingResource if source id does not exist :return: requested content source object. :rtype: django.http.HttpResponse """ container = ContentContainer() source = container.sources.get(source_id) if source: d = source.dict() link = { '_href': reverse('content_sources_resource', kwargs={'source_id': source.id}) } d.update(link) return generate_json_response_with_pulp_encoder(d) else: raise MissingResource(source_id=source_id)
def _download(self, request, entry, responder): """ Download the file. :param request: The original twisted client HTTP request being handled by the streamer. :type request: twisted.web.server.Request :param entry: The catalog entry to download. :type entry: pulp.server.db.model.LazyCatalogEntry :param responder: The file-like object that nectar should write to. :type responder: Responder :return: The download report. :rtype: nectar.report.DownloadReport """ downloader = None try: unit = self._get_unit(entry) downloader = self._get_downloader(request, entry) alt_request = ContainerRequest(entry.unit_type_id, unit.unit_key, entry.url, responder) listener = downloader.event_listener container = ContentContainer(threaded=False) container.download(downloader, [alt_request], listener) if listener.succeeded_reports: return listener.succeeded_reports[0] else: raise DownloadFailed() finally: try: downloader.config.finalize() except Exception: if logger.isEnabledFor(logging.DEBUG): logger.exception(_('finalize() failed.'))
def __init__(self, base_url, nectar_conf, units, dst_dir, listener, url_modify=None): """ :param base_url: The repository base url. :type base_url: str :param units: An iterable of units to download. :type units: iterable :param dst_dir: The absolute path to where the packages are to be downloaded. :type dst_dir: str :param listener: A nectar listener. :type listener: nectar.listener.DownloadListener :param url_modify: Optional URL modifier :type url_modify: pulp_rpm.plugins.importers.yum.utils.RepoURLModifier """ self.base_url = base_url self.units = units self.dst_dir = dst_dir self.listener = ContainerListener(listener) self.primary = create_downloader(base_url, nectar_conf) self.container = ContentContainer() self.url_modify = url_modify or RepoURLModifier()
def test_download_to_stream(self): request_list = [] _dir, cataloged = self.populate_catalog(ORPHANED, 0, 10) _dir, cataloged = self.populate_catalog(UNIT_WORLD, 0, 10) _dir = self.populate_content(PRIMARY, 0, 20) # unit-world for n in range(0, 10): request = Request( cataloged[n].type_id, cataloged[n].unit_key, 'file://%s/unit_%d' % (_dir, n), StringIO()) request_list.append(request) # primary for n in range(11, 20): unit_key = { 'name': 'unit_%d' % n, 'version': '1.0.%d' % n, 'release': '1', 'checksum': str(uuid4()) } request = Request( TYPE_ID, unit_key, 'file://%s/unit_%d' % (_dir, n), StringIO()) request_list.append(request) downloader = LocalFileDownloader(DownloaderConfig()) listener = Mock() container = ContentContainer(path=self.tmp_dir) container.threaded = False container.refresh = Mock() # test report = container.download(downloader, request_list, listener) # validation # unit-world for i in range(0, 10): request = request_list[i] self.assertTrue(request.downloaded) self.assertEqual(len(request.errors), 0) fp = request.destination s = fp.getvalue() self.assertTrue(UNIT_WORLD in s) # primary for i in range(11, len(request_list)): request = request_list[i] self.assertTrue(request.downloaded) self.assertEqual(len(request.errors), 0) fp = request.destination s = fp.getvalue() self.assertTrue(PRIMARY in s) self.assertEqual(report.total_sources, 2) self.assertEqual(len(report.downloads), 2) self.assertEqual(report.downloads[PRIMARY_ID].total_succeeded, 9) self.assertEqual(report.downloads[PRIMARY_ID].total_failed, 0) self.assertEqual(report.downloads[UNIT_WORLD].total_succeeded, 10) self.assertEqual(report.downloads[UNIT_WORLD].total_failed, 0)
def test_construction(self, fake_load): path = 'path-1' # test ContentContainer(path) # validation fake_load.assert_called_with(path)
def test_init(self, fake_load): path = 'path-1' # test container = ContentContainer(path) # validation fake_load.assert_called_with(path) self.assertEqual(container.sources, fake_load.return_value) self.assertEqual(container.threaded, True)
def test_purge_orphans(self, fake_manager, fake_load): fake_load.return_value = {'A': 1, 'B': 2, 'C': 3} # test container = ContentContainer('') # validation container.purge_orphans() fake_manager().purge_orphans.assert_called_with(fake_load.return_value.keys())
def test_download(self, fake_load): sources = [] for n in range(3): s = ContentSource('s-%d' % n, {}) s.get_downloader = Mock() sources.append(s) fake_load.return_value = sources request_list = [] for n in range(6): r = Request('T', {}, 'url-%d' % n, 'path-%d' % n) r.find_sources = Mock(return_value=sources[n % 3:]) request_list.append(r) collated = [{ sources[0]: ['nectar-1'], sources[1]: ['nectar-2', 'nectar-3', 'nectar-4'], sources[2]: ['nectar-5', 'nectar-6'] }, {}] fake_collated = Mock(side_effect=collated) fake_listener = Mock() canceled = FakeEvent() fake_primary = PrimarySource(Mock()) # test container = ContentContainer('') container.refresh = Mock() container.collated = fake_collated report = container.download(canceled, fake_primary, request_list, fake_listener) # validation container.refresh.assert_called_with(canceled) for r in request_list: r.find_sources.assert_called_with(fake_primary, container.sources) self.assertEqual(report.total_passes, 1) self.assertEqual(report.total_sources, len(sources)) self.assertEqual(len(report.downloads), 3) for source in sources: self.assertEqual(report.downloads[source.id].total_succeeded, 0) self.assertEqual(report.downloads[source.id].total_failed, 0) for source in sources: source.get_downloader.assert_called_with() downloader = source.get_downloader() listener = downloader.event_listener self.assertEqual(listener.cancel_event, canceled) self.assertEqual(listener.downloader, downloader) self.assertEqual(listener.listener, fake_listener) downloader.download.assert_called_with(collated[0][source])
def test_download_canceled_after_collated(self, fake_load): sources = [] for n in range(3): s = ContentSource('s-%d' % n, {}) s.get_downloader = Mock() sources.append(s) fake_load.return_value = sources request_list = [] for n in range(6): r = Request('T', {}, 'url-%d' % n, 'path-%d' % n) r.find_sources = Mock(return_value=sources[n % 3:]) request_list.append(r) collated = [{ sources[0]: ['nectar-1'], sources[1]: ['nectar-2', 'nectar-3', 'nectar-4'], sources[2]: ['nectar-5', 'nectar-6'] }, {}] fake_collated = Mock(side_effect=collated) fake_listener = Mock() canceled = Mock() canceled.isSet.side_effect = [False, True, True] fake_primary = PrimarySource(Mock()) # test container = ContentContainer('') container.refresh = Mock() container.collated = fake_collated report = container.download(canceled, fake_primary, request_list, fake_listener) # validation container.refresh.assert_called_with(canceled) for r in request_list: r.find_sources.assert_called_with(fake_primary, container.sources) called = 0 for s in sources: if s.get_downloader.called: called += 1 self.assertEqual(called, 1) self.assertEqual(report.total_passes, 1) self.assertEqual(report.total_sources, len(sources)) self.assertEqual(len(report.downloads), 1) self.assertEqual(report.downloads[sources[2].id].total_succeeded, 0) self.assertEqual(report.downloads[sources[2].id].total_failed, 0)
def refresh(self): """ Refresh all content sources """ container = ContentContainer() content_sources = [ resource_tag(RESOURCE_CONTENT_SOURCE, id) for id in container.sources.keys() ] tags = [action_tag(ACTION_REFRESH_ALL_CONTENT_SOURCES) ] + content_sources task_result = content.refresh_content_sources.apply_async(tags=tags) raise OperationPostponed(task_result)
def POST(self, source_id, action): """ Content source actions. """ container = ContentContainer() source = container.sources.get(source_id) if source: method = getattr(self, action, None) if method: return method(source_id) else: raise BadRequest() else: raise MissingResource(source_id=source_id)
def GET(self): """ Get all content sources. :return: List of sources. :rtype: list """ container = ContentContainer() sources = [] for source in container.sources.values(): d = source.dict() href = serialization.link.child_link_obj(source.id) d.update(href) sources.append(d) return self.ok(sources)
def GET(self, source_id): """ Get a content source by ID. :param source_id: A content source ID. :type source_id: str :return: A content source object. :rtype: dict """ container = ContentContainer() source = container.sources.get(source_id) if source: return self.ok(source.dict()) else: raise MissingResource(source_id=source_id)
def test_refresh_exception(self, mock_refresh): container = ContentContainer(path=self.tmp_dir) # test report = container.refresh(force=True) # validation self.assertEqual(len(report), 2) for r in report: self.assertFalse(r.succeeded) self.assertEqual(r.added_count, 0) self.assertEqual(r.deleted_count, 0) self.assertEqual(len(r.errors), 1) collection = ContentCatalog.get_collection() self.assertEqual(mock_refresh.call_count, 2) self.assertEqual(collection.find().count(), 0)
def test_purge_orphans(self): _dir, cataloged = self.populate_catalog(ORPHANED, 0, 10) _dir, cataloged = self.populate_catalog(UNDERGROUND, 0, 10) _dir, cataloged = self.populate_catalog(UNIT_WORLD, 0, 10) collection = ContentCatalog.get_collection() self.assertEqual(collection.find().count(), 30) container = ContentContainer(path=self.tmp_dir) # test container.purge_orphans() # validation self.assertEqual(collection.find().count(), 20) self.assertEqual(collection.find({'source_id': ORPHANED}).count(), 0) self.assertEqual(collection.find({'source_id': UNDERGROUND}).count(), 10) self.assertEqual(collection.find({'source_id': UNIT_WORLD}).count(), 10)
def test_forced_refresh(self, fake_manager, fake_load): sources = {} for n in range(3): s = ContentSource('s-%d' % n, {}) s.refresh = Mock() sources[s.id] = s fake_manager().has_entries.return_value = True fake_load.return_value = sources # test container = ContentContainer('') container.refresh(force=True) # validation for s in sources.values(): s.refresh.assert_called_with()
def test_download_canceled_before_collated(self, fake_load): fake_load.return_value = [] canceled = FakeEvent() canceled.set() # test container = ContentContainer('') container.refresh = Mock() container.collated = Mock() report = container.download(canceled, None, [], None) container.refresh.assert_called_with(canceled) self.assertFalse(container.collated.called) self.assertEqual(report.total_passes, 0) self.assertEqual(report.total_sources, 0) self.assertEqual(len(report.downloads), 0)
def test_refresh_failure(self, mock_plugin): container = ContentContainer(path=self.tmp_dir) # test report = container.refresh(force=True) # validation self.assertEqual(len(report), 5) for r in report: self.assertFalse(r.succeeded) self.assertEqual(r.added_count, 0) self.assertEqual(r.deleted_count, 0) self.assertEqual(len(r.errors), 1) plugin = mock_plugin.return_value[0] collection = ContentCatalog.get_collection() self.assertEqual(plugin.refresh.call_count, 5) self.assertEqual(collection.find().count(), 0)
def refresh(request): """ Refresh all content sources :param request: WSGI request object, body contains bits to upload :type request: django.core.handlers.wsgi.WSGIRequest :raises: OperationPostponed when an async operation is performed """ container = ContentContainer() content_sources = [ tags.resource_tag(RESOURCE_CONTENT_SOURCE, id) for id in container.sources.keys() ] task_tags = [tags.action_tag(ACTION_REFRESH_ALL_CONTENT_SOURCES) ] + content_sources task_result = content.refresh_content_sources.apply_async( tags=task_tags) raise OperationPostponed(task_result)
def __init__(self, refresh_conduit, content_source_id=None): """ :param refresh_conduit: Conduit providing access to relative Pulp functionality :type refresh_conduit: pulp.server.content.sources.steps.ContentSourceConduit :param content_source_id: Id of content source to refresh :type str: """ super(ContentSourcesRefreshStep, self).__init__( step_type=reporting_constants.REFRESH_STEP_CONTENT_SOURCE, status_conduit=refresh_conduit, non_halting_exceptions=[PulpCodedTaskException]) self.container = ContentContainer() if content_source_id: self.sources = [self.container.sources[content_source_id]] else: self.sources = [source for name, source in self.container.sources.iteritems()] self.description = _("Refreshing content sources")
def test_refresh_canceled(self, fake_load): sources = {} for n in range(3): s = ContentSource('s-%d' % n, {}) s.refresh = Mock() sources[s.id] = s fake_load.return_value = sources # test canceled = FakeEvent() canceled.set() container = ContentContainer('') container.refresh(canceled, force=True) # validation for s in sources.values(): self.assertFalse(s.refresh.called)
def __init__(self, base_url, nectar_conf, units, dst_dir, listener): """ :param base_url: The repository base url. :type base_url: str :param units: An iterable of units to download. :type units: iterable :param dst_dir: The absolute path to where the packages are to be downloaded. :type dst_dir: str :param listener: A nectar listener. :type listener: nectar.listener.DownloadListener """ self.base_url = base_url self.units = units self.dst_dir = dst_dir self.listener = ContainerListener(listener) self.primary = create_downloader(base_url, nectar_conf) self.container = ContentContainer() self.canceled = Event()
def test_refresh(self, fake_manager, fake_load): sources = {} for n in range(3): s = ContentSource('s-%d' % n, {}) s.refresh = Mock(return_value=[n]) s.get_downloader = Mock() sources[s.id] = s fake_manager().has_entries.return_value = False fake_load.return_value = sources # test container = ContentContainer('') report = container.refresh() # validation for s in sources.values(): s.refresh.assert_called_with() self.assertEqual(sorted(report), [0, 1, 2])
def _add_units(self, request, unit_inventory): """ Determine the list of units contained in the parent inventory but are not contained in the child inventory and add them. For each unit, this is performed in the following steps: 1. Download the file (if defined) associated with the unit. 2. Add the unit to the child inventory. 3. Associate the unit to the repository. The unit is added only: 1. If no file is associated with unit. 2. The file associated with the unit is successfully downloaded. For units with files, the unit is added to the inventory as part of the unit download manager callback. :param request: A synchronization request. :type request: SyncRequest :param unit_inventory: The inventory of both parent and child content units. :type unit_inventory: UnitInventory """ download_list = [] units = unit_inventory.units_on_parent_only() request.progress.begin_adding_units(len(units)) listener = ContentDownloadListener(self, request) for unit, unit_ref in units: if request.cancelled(): return self._reset_storage_path(unit) if not self._needs_download(unit): # unit has no file associated self.add_unit(request, unit_ref.fetch()) continue unit_path, destination = self._path_and_destination(unit) unit_URL = pathlib.url_join(unit_inventory.base_URL, unit_path) _request = listener.create_request(unit_URL, destination, unit, unit_ref) download_list.append(_request) if request.cancelled(): return container = ContentContainer() request.summary.sources = \ container.download(request.cancel_event, request.downloader, download_list, listener) request.summary.errors.extend(listener.error_list)
def test_refresh(self, mock_plugin): container = ContentContainer(path=self.tmp_dir) # test report = container.refresh(force=True) # validation plugin = mock_plugin.return_value[0] self.assertEqual(plugin.refresh.call_count, 5) self.assertEqual(len(report), 5) for r in report: self.assertTrue(r.succeeded) self.assertEqual(r.added_count, 100) self.assertEqual(r.deleted_count, 0) calls = iter(plugin.refresh.call_args_list) for source in ContentSource.load_all(self.tmp_dir).values(): for url in source.urls: args = calls.next()[0] self.assertTrue(isinstance(args[0], CatalogerConduit)) self.assertEqual(args[1], source.descriptor) self.assertEqual(args[2], url)
def test_threaded_download(self, fake_load, fake_refresh, fake_primary, fake_batch): path = Mock() downloader = Mock() requests = Mock() listener = Mock() _batch = Mock() _batch.download.return_value = 123 fake_batch.return_value = _batch # test container = ContentContainer(path) report = container.download(downloader, requests, listener) # validation fake_load.assert_called_with(path) fake_refresh.assert_called_with() fake_primary.assert_called_with(downloader) fake_batch.assert_called_with(fake_primary(), container, requests, listener) _batch.assert_called_with() self.assertEqual(report, _batch.return_value)
def test_refresh_raised(self, fake_manager, fake_load): sources = {} for n in range(3): s = ContentSource('s-%d' % n, {}) s.refresh = Mock(side_effect=ValueError('must be int')) s.get_downloader = Mock() sources[s.id] = s fake_manager().has_entries.return_value = False fake_load.return_value = sources # test container = ContentContainer('') report = container.refresh() # validation for s in sources.values(): s.refresh.assert_called_with() for r in report: r.errors = ['must be int']
def post(self, request, source_id, action): """ Single content source actions. :param request: WSGI request object, body contains bits to upload :type request: django.core.handlers.wsgi.WSGIRequest :param source_id: A content source ID. :type source_id: str :param action: Name of action to perform :type action: str :raises: MissingResource if source id does not exist """ container = ContentContainer() source = container.sources.get(source_id) if source: method = getattr(self, action, None) if method: return method(request, source_id) else: return HttpResponseBadRequest('bad request') else: raise MissingResource(source_id=source_id)
def get(self, request): """ Get all content sources. :param request: WSGI request object, body contains bits to upload :type request: django.core.handlers.wsgi.WSGIRequest :return: list of sources :rtype: django.http.HttpResponse """ container = ContentContainer() sources = [] for source in container.sources.values(): d = source.dict() link = { '_href': reverse('content_sources_resource', kwargs={'source_id': source.id}) } d.update(link) sources.append(d) return generate_json_response_with_pulp_encoder(sources)