def __init__(self, step_type, step_description, lazy_status_conduit, download_requests): """ Initializes a Step that downloads all the download requests provided. :param lazy_status_conduit: Conduit used to update the task status. :type lazy_status_conduit: LazyStatusConduit :param download_requests: List of download requests to process. :type download_requests: list of nectar.request.DownloadRequest """ super(LazyUnitDownloadStep, self).__init__( step_type=step_type, status_conduit=lazy_status_conduit, ) self.description = step_description self.download_requests = download_requests self.download_config = { MAX_CONCURRENT: int(pulp_conf.get('lazy', 'download_concurrency')), HEADERS: { PULP_STREAM_REQUEST_HEADER: 'true' }, SSL_VALIDATION: True } self.downloader = HTTPThreadedDownloader( DownloaderConfig(**self.download_config), self)
def test_download_cancelled_in_failed(self, mock_started, mock_cancel): request_list = [] for n in range(0, 5): unit_key = { 'name': 'unit_%d' % n, 'version': '1.0.%d' % n, 'release': '1', 'checksum': str(uuid4()) } request = Request(TYPE_ID, unit_key, 'http://unit-city/unit_%d' % n, os.path.join(self.downloaded, 'unit_%d' % n)) request_list.append(request) downloader = HTTPThreadedDownloader(DownloaderConfig()) container = ContentContainer(path=self.tmp_dir) container.refresh = Mock() event = CancelEvent(2) report = container.download(event, downloader, request_list) self.assertTrue(mock_started.called) self.assertTrue(mock_cancel.called) self.assertEqual(report.total_passes, 1) self.assertEqual(report.total_sources, 2) self.assertEqual(len(report.downloads), 1) self.assertEqual(report.downloads[PRIMARY_ID].total_succeeded, 0) self.assertEqual(report.downloads[PRIMARY_ID].total_failed, 5)
def __init__(self, name, download_config, registry_url, working_dir): """ Initialize the V2Repository. :param name: name of a docker repository :type name: basestring :param download_config: download configuration object :type download_config: nectar.config.DownloaderConfig :param registry_url: URL for the docker registry :type registry_url: basestring :param working_dir: full path to the directory where files should be saved :type working_dir: basestring """ # Docker's registry aligns non-namespaced images to the library namespace. # if we have a docker registry image, and no namespace, add the library # namespace to the image name. if '/' not in name and re.search(r'registry[-,\w]*.docker.io', registry_url, re.IGNORECASE): self.name = "library/" + name else: self.name = name self.download_config = download_config self.registry_url = registry_url self.downloader = HTTPThreadedDownloader(self.download_config, AggregatingEventListener()) self.working_dir = working_dir self.token = None
def __init__(self, sync_conduit, config): """ Initialize an ISOSyncRun. :param sync_conduit: the sync conduit to use for this sync run. :type sync_conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit :param config: plugin configuration :type config: pulp.plugins.config.PluginCallConfiguration """ self.sync_conduit = sync_conduit self._remove_missing_units = config.get( importer_constants.KEY_UNITS_REMOVE_MISSING, default=constants.CONFIG_UNITS_REMOVE_MISSING_DEFAULT) self._validate_downloads = config.get( importer_constants.KEY_VALIDATE, default=constants.CONFIG_VALIDATE_DEFAULT) self._repo_url = encode_unicode(config.get( importer_constants.KEY_FEED)) # The _repo_url must end in a trailing slash, because we will use urljoin to determine # the path to # PULP_MANIFEST later if self._repo_url[-1] != '/': self._repo_url = self._repo_url + '/' # Cast our config parameters to the correct types and use them to build a Downloader max_speed = config.get(importer_constants.KEY_MAX_SPEED) if max_speed is not None: max_speed = float(max_speed) max_downloads = config.get(importer_constants.KEY_MAX_DOWNLOADS) if max_downloads is not None: max_downloads = int(max_downloads) else: max_downloads = constants.CONFIG_MAX_DOWNLOADS_DEFAULT ssl_validation = config.get_boolean( importer_constants.KEY_SSL_VALIDATION) ssl_validation = ssl_validation if ssl_validation is not None else \ constants.CONFIG_VALIDATE_DEFAULT downloader_config = { 'max_speed': max_speed, 'max_concurrent': max_downloads, 'ssl_client_cert': config.get(importer_constants.KEY_SSL_CLIENT_CERT), 'ssl_client_key': config.get(importer_constants.KEY_SSL_CLIENT_KEY), 'ssl_ca_cert': config.get(importer_constants.KEY_SSL_CA_CERT), 'ssl_validation': ssl_validation, 'proxy_url': config.get(importer_constants.KEY_PROXY_HOST), 'proxy_port': config.get(importer_constants.KEY_PROXY_PORT), 'proxy_username': config.get(importer_constants.KEY_PROXY_USER), 'proxy_password': config.get(importer_constants.KEY_PROXY_PASS) } downloader_config = DownloaderConfig(**downloader_config) # We will pass self as the event_listener, so that we can receive the callbacks in this # class if self._repo_url.lower().startswith('file'): self.downloader = LocalFileDownloader(downloader_config, self) else: self.downloader = HTTPThreadedDownloader(downloader_config, self) self.progress_report = SyncProgressReport(sync_conduit)
def build_downloader(url, nectar_config): """ Return a Nectar downloader for a URL with the given nectar config. :param url: The URL is used to determine the scheme so the correct type of downloader can be created. :type url: basestring :param nectar_config: The configuration that should be used with the downloader :type nectar_config: nectar.config.DownloaderConfig :return: A configured downloader. :rtype: nectar.downloaders.base.Downloader :raise ValueError: When the URL scheme is not supported. """ url = urlparse(url) scheme = url.scheme.lower() if scheme == 'file': return LocalFileDownloader(nectar_config) if scheme in ('http', 'https'): return HTTPThreadedDownloader(nectar_config) raise ValueError(_('Scheme "{s}" not supported').format(s=url.scheme))
def __init__(self, name, download_config, registry_url, working_dir): """ :param name: name of a docker repository :type name: basestring :param download_config: download configuration object :type download_config: nectar.config.DownloaderConfig :param registry_url: URL for the docker registry :type registry_url: basestring :param working_dir: full path to the directory where files should be saved :type working_dir: basestring """ self.name = name self.download_config = download_config self.registry_url = registry_url self.listener = AggregatingEventListener() self.downloader = HTTPThreadedDownloader(self.download_config, self.listener) self.working_dir = working_dir self.token = None self.endpoint = None
def get_downloader(config, url, **options): """ Get a configured downloader. :param config: A plugin configuration. :type config: pulp.plugins.config.PluginCallConfiguration :param url: A URL. :type url: str :param options: Extended configuration. :type options: dict :return: A configured downloader. :rtype: nectar.downloaders.base.Downloader :raise ValueError: when the URL scheme is not supported. """ url = urlparse(url) nectar_config = importer_config_to_nectar_config(config.flatten()) scheme = url.scheme.lower() if scheme == 'file': return LocalFileDownloader(nectar_config) if scheme in ('http', 'https'): return HTTPThreadedDownloader(nectar_config) raise ValueError(_('Scheme "{s}" not supported').format(s=url.scheme))
def initialize(self): """ Set up the nectar downloader Originally based on the ISO sync setup """ config = self.get_config() self._validate_downloads = config.get(importer_constants.KEY_VALIDATE, default=True) self._repo_url = encode_unicode(config.get(importer_constants.KEY_FEED)) # The _repo_url must end in a trailing slash, because we will use # urljoin to determine the path later if self._repo_url[-1] != '/': self._repo_url = self._repo_url + '/' downloader_config = importer_config_to_nectar_config(config.flatten()) # We will pass self as the event_listener, so that we can receive the # callbacks in this class if self._repo_url.lower().startswith('file'): self.downloader = LocalFileDownloader(downloader_config, self) else: self.downloader = HTTPThreadedDownloader(downloader_config, self)
def test_download_fail_completely(self): request_list = [] _dir, cataloged = self.populate_catalog(UNIT_WORLD, 0, 10) shutil.rmtree(_dir) _dir = self.populate_content(PRIMARY, 0, 20) # primary for n in range(0, 10): unit_key = { 'name': 'unit_%d' % n, 'version': '1.0.%d' % n, 'release': '1', 'checksum': str(uuid4()) } request = Request(TYPE_ID, unit_key, 'http://redhat.com/%s/unit_%d' % (_dir, n), os.path.join(self.downloaded, 'unit_%d' % n)) request_list.append(request) downloader = HTTPThreadedDownloader(DownloaderConfig()) listener = MockListener() container = ContentContainer(path=self.tmp_dir) container.refresh = Mock() event = Event() report = container.download(event, downloader, request_list, listener) # primary for i in range(0, len(request_list)): request = request_list[i] self.assertFalse(request.downloaded) self.assertEqual(len(request.errors), 1) self.assertEqual(listener.download_started.call_count, len(request_list)) self.assertEqual(listener.download_succeeded.call_count, 0) self.assertEqual(listener.download_failed.call_count, len(request_list)) self.assertEqual(report.total_passes, 1) self.assertEqual(report.total_sources, 2) self.assertEqual(len(report.downloads), 1) self.assertEqual(report.downloads[PRIMARY_ID].total_succeeded, 0) self.assertEqual(report.downloads[PRIMARY_ID].total_failed, 10)
def _create_and_configure_downloader(self, listener): config = importer_config_to_nectar_config(self.config.flatten()) return HTTPThreadedDownloader(config, listener)
def get_downloader(self, conduit, config, url): if url.startswith('http'): return HTTPThreadedDownloader(nectar_config(config)) if url.startswith('file'): return LocalFileDownloader(nectar_config(config)) raise ValueError('unsupported url')