def test_multiple_downloads(self): cfg = config.DownloaderConfig() lst = listener.AggregatingEventListener() downloader = threaded.HTTPThreadedDownloader(cfg, lst) bogus_file_names = ['notme', 'notmeeither'] all_file_names = self.data_file_names + bogus_file_names url_list = [ 'http://localhost:%d/%s%s' % (self.server_port, self.data_directory, n) for n in all_file_names ] dest_list = [ os.path.join(self.download_dir, n) for n in all_file_names ] request_list = [ request.DownloadRequest(u, d) for u, d in zip(url_list, dest_list) ] downloader.download(request_list) self.assertEqual(len(lst.succeeded_reports), len(self.data_file_names)) self.assertEqual(len(lst.failed_reports), len(bogus_file_names)) for i, dest in enumerate(dest_list[:len(self.data_file_names)]): self.assertTrue(os.path.exists(dest)) self.assertEqual(os.path.getsize(dest), self.data_file_sizes[i]) for dest in dest_list[len(self.data_file_names):]: self.assertFalse(os.path.exists(dest))
def _download_isos(self, manifest): """ Makes the calls to retrieve the ISOs from the manifest, storing them on disk and recording them in the Pulp database. :param manifest: The manifest containing a list of ISOs we want to download. :type manifest: pulp_rpm.plugins.db.models.ISOManifest """ self.progress_report.total_bytes = 0 self.progress_report.num_isos = len(manifest) # For each ISO in the manifest, we need to determine a relative path where we want # it to be stored, and initialize the Unit that will represent it for iso in manifest: iso.bytes_downloaded = 0 # Set the total bytes onto the report self.progress_report.total_bytes += iso.size self.progress_report.update_progress() # We need to build a list of DownloadRequests download_directory = common_utils.get_working_directory() download_requests = [] for iso in manifest: iso_tmp_dir = tempfile.mkdtemp(dir=download_directory) iso_name = os.path.basename(iso.url) iso_download_path = os.path.join(iso_tmp_dir, iso_name) download_requests.append( request.DownloadRequest(iso.url, iso_download_path, iso)) self.downloader.download(download_requests)
def _download_manifest(self): """ Download the manifest file, and process it to return an ISOManifest. :return: manifest of available ISOs :rtype: pulp_rpm.plugins.db.models.ISOManifest """ manifest_url = urljoin(self._repo_url, models.ISOManifest.FILENAME) # I probably should have called this manifest destination, but I couldn't help myself manifest_destiny = StringIO() manifest_request = request.DownloadRequest(manifest_url, manifest_destiny) self.downloader.download([manifest_request]) # We can inspect the report status to see if we had an error when retrieving the manifest. if self.progress_report.state == self.progress_report.STATE_MANIFEST_FAILED: raise IOError( _("Could not retrieve %(url)s") % {'url': manifest_url}) manifest_destiny.seek(0) try: manifest = models.ISOManifest(manifest_destiny, self._repo_url) except ValueError: self.progress_report.error_message = _( 'The PULP_MANIFEST file was not in the ' + 'expected format.') self.progress_report.state = self.progress_report.STATE_MANIFEST_FAILED raise ValueError(self.progress_report.error_message) return manifest
def generate_download_requests(self): """ Yield a Nectar Download request for each package that wasn't available locally. :return: A generator that yields DownloadReqests for the Package files. :rtype: generator """ for package in self.get_local_units_step.units_to_download: url = package.package_url(self._feed_url) destination = os.path.join(self.get_working_dir(), os.path.basename(url)) yield request.DownloadRequest(url, destination, package)
def generate_download_requests(self): """ For each package that is listed in self._packages_to_download, yield a Nectar DownloadRequest for its url attribute. :return: A generator that yields DownloadReqests for the Package files. :rtype: generator """ for p in self._packages_to_download: yield request.DownloadRequest(p['url'], os.path.join(self.working_dir, os.path.basename(p['url'])), p)
def generate_download_requests(self): """ For each package name that the user has requested, yield a Nectar DownloadRequest for its metadata file in JSON format. :return: A generator that yields DownloadReqests for the metadata files. :rtype: generator """ # We need to retrieve the manifests for each of our packages manifest_urls = [urljoin(self.parent._feed_url, 'pypi/%s/json/' % pn) for pn in self.parent._package_names] for u in manifest_urls: yield request.DownloadRequest(u, StringIO(), {})
def generate_download_requests(self): """ For each package that wasn't available locally, yield a Nectar DownloadRequest for its url attribute. :return: A generator that yields DownloadReqests for the Package files. :rtype: generator """ for p in self.get_local_units_step.units_to_download: url = self.unit_urls.pop(p) destination = os.path.join(self.get_working_dir(), os.path.basename(url)) yield request.DownloadRequest(url, destination, p)
def generate_download_requests(self): """ For each project name in the parent step's list of projects, yield a Nectar DownloadRequest for the project's json metadata. :return: A generator that yields DownloadRequests for a project's json metadata. :rtype: generator """ metadata_urls = [urljoin(self.parent._feed_url, 'pypi/%s/json/' % pn) for pn in self.parent._project_names] for u in metadata_urls: if self.canceled: return yield request.DownloadRequest(u, StringIO(), {})
def test_single_download_success(self): cfg = config.DownloaderConfig() lst = listener.AggregatingEventListener() downloader = threaded.HTTPThreadedDownloader(cfg, lst) file_path = os.path.join(self.data_directory, self.data_file_names[0]) dest_path = os.path.join(self.download_dir, self.data_file_names[0]) url = 'http://localhost:%d/%s' % (self.server_port, file_path) req = request.DownloadRequest(url, dest_path) downloader.download([req]) self.assertTrue(os.path.exists(dest_path)) self.assertEqual(os.path.getsize(dest_path), self.data_file_sizes[0]) self.assertEqual(len(lst.succeeded_reports), 1) self.assertEqual(len(lst.failed_reports), 0)
def test_single_download_failure(self): cfg = config.DownloaderConfig() lst = listener.AggregatingEventListener() downloader = threaded.HTTPThreadedDownloader(cfg, lst) file_name = 'idontexistanddontcreateme' file_path = os.path.join(self.data_directory, file_name) dest_path = os.path.join(self.download_dir, file_name) url = 'http://localhost:%d/%s' % (self.server_port, file_path) req = request.DownloadRequest(url, dest_path) downloader.download([req]) self.assertFalse(os.path.exists(dest_path)) self.assertEqual(len(lst.succeeded_reports), 0) self.assertEqual(len(lst.failed_reports), 1) self.assertTrue(lst.failed_reports[0].error_msg is not None)
def test_throttling(self): two_seconds = datetime.timedelta(seconds=2) three_seconds = datetime.timedelta(seconds=4) cfg = config.DownloaderConfig(max_speed=256000) # 1/2 size of file lst = listener.AggregatingEventListener() downloader = threaded.HTTPThreadedDownloader(cfg, lst) # use the 500k file, should take >= 2 seconds to download, but < 4 file_path = os.path.join(self.data_directory, self.data_file_names[1]) dest_path = os.path.join(self.download_dir, self.data_file_names[1]) url = 'http://localhost:%d/%s' % (self.server_port, file_path) req = request.DownloadRequest(url, dest_path) start = datetime.datetime.now() downloader.download([req]) finish = datetime.datetime.now() self.assertTrue(finish - start >= two_seconds) self.assertTrue(finish - start < three_seconds)
def _download(self, catalog_entry, request, responder): """ Build a nectar downloader and download the content from the catalog entry. The download is performed by the alternate content container, so it is possible to use the streamer in conjunction with alternate content sources. :param catalog_entry: The catalog entry to download. :type catalog_entry: pulp.server.db.model.LazyCatalogEntry :param request: The client content request. :type request: twisted.web.server.Request :param responder: The file-like object that nectar should write to. :type responder: Responder """ importer, config = repo_controller.get_importer_by_id(catalog_entry.importer_id) data = {'catalog_entry': catalog_entry, 'client_request': request} download_request = nectar_request.DownloadRequest(catalog_entry.url, responder, data=data) downloader = importer.get_downloader(config, catalog_entry.url, **catalog_entry.data) downloader.event_listener = StreamerListener(request, self.config) downloader.download_one(download_request, events=True) downloader.config.finalize()
def _download_isos(self, manifest): """ Makes the calls to retrieve the ISOs from the manifest, storing them on disk and recording them in the Pulp database. :param manifest: The manifest containing a list of ISOs we want to download. :type manifest: pulp_rpm.plugins.db.models.ISOManifest """ self.progress_report.total_bytes = 0 self.progress_report.num_isos = len(manifest) # For each ISO in the manifest, we need to determine a relative path where we want # it to be stored, and initialize the Unit that will represent it for iso in manifest: iso.init_unit(self.sync_conduit) iso.bytes_downloaded = 0 # Set the total bytes onto the report self.progress_report.total_bytes += iso.size self.progress_report.update_progress() # We need to build a list of DownloadRequests download_requests = [ request.DownloadRequest(iso.url, iso.storage_path, iso) for iso in manifest ] self.downloader.download(download_requests)