def test_basic_auth(self): headers = {"basic_auth": "user:pass"} fd = FileDownloader("test", ["test"], urllb3_headers=headers) http = fd._get_http_pool(secure=True) sc = http.request("GET", "https://httpbin.org/basic-auth/user/pass").status assert sc == 200
def test_basic_auth(self): headers = {'basic_auth': 'user:pass'} fd = FileDownloader('test', ['test'], urllb3_headers=headers) http = fd._get_http_pool(secure=True) sc = http.request('GET', 'https://httpbin.org/basic-auth/user/pass').status assert sc == 200
def test_cb(self): def cb(status): pass fd = FileDownloader(FILENAME, URLS, hexdigest=FILE_HASH, progress_hooks=[cb], verify=True) binary_data = fd.download_verify_return() assert binary_data is not None
def test_auth_header(self): headers = {"Authorization": "Basic dXNlcjpwYXNz"} fd = FileDownloader("test", ["test"], headers=headers) http = fd._get_http_pool(secure=True) sc = http.request("GET", "https://httpbin.org/basic-auth/user/pass").status assert sc == 200
def _full_update(self): log.debug("Starting full update") file_hash = self._get_file_hash_from_manifest() with ChDir(self.update_folder): log.debug("Downloading update...") if self.downloader: fd = self.downloader( self.filename, self.update_urls, hexdigest=file_hash ) else: fd = FileDownloader( self.filename, self.update_urls, hexdigest=file_hash, verify=self.verify, progress_hooks=self.progress_hooks, max_download_retries=self.max_download_retries, urllb3_headers=self.urllib3_headers, ) result = fd.download_verify_write() if result: log.debug("Download Complete") return True else: # pragma: no cover log.debug("Failed To Download Latest Version") return False
def _get_manifest_from_http(self): log.debug("Downloading online version file") version_files = [self.version_file, self.version_file_compat] for vf in version_files: try: if self.downloader: fd = self.downloader(vf, self.update_urls) else: fd = FileDownloader( vf, self.update_urls, verify=self.verify, urllb3_headers=self.urllib3_headers, ) data = fd.download_verify_return() try: decompressed_data = _gzip_decompress(data) except IOError: log.debug("Failed to decompress gzip file") # Will be caught down below. # Just logging the error raise log.debug("Version file download successful") # Writing version file to application data directory self._write_manifest_to_filesystem(decompressed_data, vf) return decompressed_data except Exception as err: log.debug(err, exc_info=True) continue log.debug("Version file download failed") return None
def _download_verify_patches(self): # Downloads & verifies all patches log.debug('Downloading patches') downloaded = 0 total = len(self.patch_data) for p in self.patch_data: # Initialize downloader fd = FileDownloader(p['patch_name'], p['patch_urls'], p['patch_hash'], self.verify) # Attempt to download resource data = fd.download_verify_return() if data is not None: self.patch_binary_data.append(data) downloaded += 1 status = {'total': total, 'downloaed': downloaded, 'status': 'downloading'} self._call_progress_hooks(status) else: # Since patches are applied sequentially # we cannot continue successfully status = {'total': total, 'downloaded': downloaded, 'status': 'failed to download all patches'} self._call_progress_hooks(status) return False status = {'total': total, 'downloaed': downloaded, 'status': 'finished'} self._call_progress_hooks(status) return True
def _get_key_data(self): log.debug("Downloading key file") try: if self.downloader: fd = self.downloader(self.key_file, self.update_urls) else: fd = FileDownloader( self.key_file, self.update_urls, verify=self.verify, urllb3_headers=self.urllib3_headers, ) data = fd.download_verify_return() try: decompressed_data = _gzip_decompress(data) except IOError: log.debug("Failed to decompress gzip file") raise log.debug("Key file download successful") # Writing version file to application data directory self._write_manifest_to_filesystem(decompressed_data, self.key_file) return decompressed_data except Exception as err: log.debug("Version file download failed") log.debug(err, exc_info=True) return None
def test_cb(self): def cb(status): pass fd = FileDownloader(FILENAME, URL, FILE_HASH, progress_hooks=[cb]) binary_data = fd.download_verify_return() assert binary_data is not None
def test_bad_content_length(self): class FakeHeaders(object): headers = {} fd = FileDownloader(FILENAME, URLS, hexdigest=FILE_HASH, verify=True) data = FakeHeaders() assert fd._get_content_length(data) is None
def test_return_fail(self): fd = FileDownloader(FILENAME, URLS, 'JKFEIFJILEFJ983NKFNKL', verify=True) binary_data = fd.download_verify_return() assert binary_data is None
def test_bad_content_length(self): class FakeHeaders(object): headers = {} fd = FileDownloader(FILENAME, URL, FILE_HASH) data = FakeHeaders() assert fd._get_content_length(data) == 100001
def test_return_fail(self, download_max_size): fd = FileDownloader(FILENAME, URLS, "JKFEIFJILEFJ983NKFNKL", verify=True) fd.download_max_size = download_max_size binary_data = fd.download_verify_return() assert binary_data is None
def _download_verify_patches(self): # Downloads & verifies all patches log.debug('Downloading patches') downloaded = 0 percent = 0 total = len(self.patch_data) temp_dir = tempfile.gettempdir() for p in self.patch_data: # Don't write temp files to cwd with ChDir(temp_dir): fd = FileDownloader( p['patch_name'], p['patch_urls'], hexdigest=p['patch_hash'], verify=self.verify, max_download_retries=self.max_download_retries, urllb3_headers=self.urllib3_headers) # Attempt to download resource data = fd.download_verify_return() percent = int((float(downloaded + 1) / float(total)) * 100) percent = '{0:.1f}'.format(percent) if data is not None: self.patch_binary_data.append(data) downloaded += 1 status = { 'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'downloading' } self._call_progress_hooks(status) else: # Since patches are applied sequentially # we cannot continue successfully status = { 'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'failed to download all patches' } self._call_progress_hooks(status) return False status = { 'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'finished' } self._call_progress_hooks(status) return True
def test_cb(self, download_max_size): def cb(status): pass fd = FileDownloader(FILENAME, URLS, hexdigest=FILE_HASH, progress_hooks=[cb], verify=True) fd.download_max_size = download_max_size binary_data = fd.download_verify_return() assert binary_data is not None
def _full_update(self): log.debug('Starting full update') file_hash = self._get_file_hash_from_manifest() with dsdev_utils.paths.ChDir(self.update_folder): log.debug('Downloading update...') fd = FileDownloader(self.filename, self.update_urls, hexdigest=file_hash, verify=self.verify, progress_hooks=self.progress_hooks) result = fd.download_verify_write() if result: log.debug('Download Complete') return True else: # pragma: no cover log.debug('Failed To Download Latest Version') return False
def _download_verify_patches(self): # Downloads & verifies all patches log.debug('Downloading patches') downloaded = 0 percent = 0 total = len(self.patch_data) temp_dir = tempfile.gettempdir() for p in self.patch_data: # Don't write temp files to cwd with ChDir(temp_dir): fd = FileDownloader(p['patch_name'], p['patch_urls'], hexdigest=p['patch_hash'], verify=self.verify, max_download_retries=self.max_download_retries, urllb3_headers=self.urllib3_headers) # Attempt to download resource data = fd.download_verify_return() percent = int((float(downloaded + 1) / float(total)) * 100) percent = '{0:.1f}'.format(percent) if data is not None: self.patch_binary_data.append(data) downloaded += 1 status = {'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'downloading'} self._call_progress_hooks(status) else: # Since patches are applied sequentially # we cannot continue successfully status = {'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'failed to download all patches'} self._call_progress_hooks(status) return False status = {'total': total, 'downloaded': downloaded, 'percent_complete': percent, 'status': 'finished'} self._call_progress_hooks(status) return True
def _download_manifest(self): log.info('Downloading online version file') try: fd = FileDownloader(self.version_file, self.update_urls, verify=self.verify) data = fd.download_verify_return() try: decompressed_data = gzip_decompress(data) except IOError: log.error('Failed to decompress gzip file') # Will be caught down below. Just logging the error raise log.info('Version file download successful') # Writing version file to application data directory self._write_manifest_2_filesystem(decompressed_data) return decompressed_data except Exception as err: log.error('Version file download failed') log.debug(str(err), exc_info=True) return None
def _download_key(self): log.info('Downloading key file') try: fd = FileDownloader(self.key_file, self.update_urls, verify=self.verify) data = fd.download_verify_return() try: decompressed_data = gzip_decompress(data) except IOError: log.error('Failed to decompress gzip file') # Will be caught down below. Just logging the error raise log.info('Key file download successful') # Writing version file to application data directory self._write_manifest_2_filesystem(decompressed_data) return decompressed_data except Exception as err: log.error('Version file download failed') log.debug(err, exc_info=True) return None
def _full_update(self, name): log.info('Starting full update') latest = get_highest_version(name, self.platform, self.easy_data) filename = get_filename(name, latest, self.platform, self.easy_data) hash_key = '{}*{}*{}*{}*{}'.format(self.updates_key, name, latest, self.platform, 'file_hash') file_hash = self.easy_data.get(hash_key) with jms_utils.paths.ChDir(self.update_folder): log.info('Downloading update...') fd = FileDownloader(filename, self.update_urls, file_hash, self.verify, self.progress_hooks) result = fd.download_verify_write() if result: log.info('Download Complete') return True else: # pragma: no cover log.error('Failed To Download Latest Version') return False
def _download_verify_patches(self): # Downloads & verifies all patches log.debug('Downloading patches') downloaded = 0 total = len(self.patch_data) for p in self.patch_data: # Initialize downloader fd = FileDownloader(p['patch_name'], p['patch_urls'], p['patch_hash'], self.verify) # Attempt to download resource data = fd.download_verify_return() if data is not None: self.patch_binary_data.append(data) downloaded += 1 status = { 'total': total, 'downloaed': downloaded, 'status': 'downloading' } self._call_progress_hooks(status) else: # Since patches are applied sequentially # we cannot continue successfully status = { 'total': total, 'downloaded': downloaded, 'status': 'failed to download all patches' } self._call_progress_hooks(status) return False status = { 'total': total, 'downloaed': downloaded, 'status': 'finished' } self._call_progress_hooks(status) return True
def test_bad_url(self): fd = FileDownloader(FILENAME, ['bad url'], hexdigest='bad hash', verify=True) binary_data = fd.download_verify_return() assert binary_data is None
def test_url_as_string(self): with pytest.raises(FileDownloaderError): FileDownloader(FILENAME, URLS[0])
def test_good_conent_length(self): fd = FileDownloader(FILENAME, URL, FILE_HASH, verify=False) fd.download_verify_return() assert fd.content_length == 60000
def test_bad_content_length(self): class FakeHeaders(object): headers = {} fd = FileDownloader(FILENAME, URL, FILE_HASH, verify=False) data = FakeHeaders() assert fd._get_content_length(data) == 100001
def test_good_content_length(self): fd = FileDownloader(FILENAME, URLS, hexdigest=FILE_HASH, verify=True) fd.download_verify_return() assert fd.content_length == 2387
def test_return(self, download_max_size): fd = FileDownloader(FILENAME, URLS, FILE_HASH, verify=True) fd.download_max_size = download_max_size binary_data = fd.download_verify_return() assert binary_data is not None
def test_url_with_spaces(self): fd = FileDownloader(FILENAME_WITH_SPACES, URL, FILE_HASH) binary_data = fd.download_verify_return() assert binary_data is not None
def test_url_with_spaces(self): fd = FileDownloader(FILENAME_WITH_SPACES, URLS, hexdigest=FILE_HASH, verify=True) binary_data = fd.download_verify_return() assert binary_data is not None
def test_good_conent_length(self): fd = FileDownloader(FILENAME, URL, FILE_HASH) fd.download_verify_return() assert fd.content_length == 60000
def _download_verify_patches(self): # Downloads & verifies all patches log.debug("Downloading patches") downloaded = 0 percent = 0 total = len(self.patch_data) temp_dir = tempfile.gettempdir() for p in self.patch_data: # Don't write temp files to cwd with ChDir(temp_dir): if self.downloader: fd = self.downloader(p["patch_name"], p["patch_urls"], hexdigest=p["patch_hash"]) else: fd = FileDownloader( p["patch_name"], p["patch_urls"], hexdigest=p["patch_hash"], verify=self.verify, max_download_retries=self.max_download_retries, headers=self.headers, http_timeout=self.http_timeout) # Attempt to download resource data = fd.download_verify_return() percent = int((float(downloaded + 1) / float(total)) * 100) percent = "{0:.1f}".format(percent) if data is not None: self.patch_binary_data.append(data) downloaded += 1 status = { "total": total, "downloaded": downloaded, "percent_complete": percent, "status": "downloading", } self._call_progress_hooks(status) else: # Since patches are applied sequentially # we cannot continue successfully status = { "total": total, "downloaded": downloaded, "percent_complete": percent, "status": "failed to download all patches", } self._call_progress_hooks(status) return False status = { "total": total, "downloaded": downloaded, "percent_complete": percent, "status": "finished", } self._call_progress_hooks(status) return True
def test_return(self): fd = FileDownloader(FILENAME, URLS, FILE_HASH, verify=True) binary_data = fd.download_verify_return() assert binary_data is not None
def test_good_conent_length(self): fd = FileDownloader(FILENAME, URLS, hexdigest=FILE_HASH, verify=True) fd.download_verify_return() assert fd.content_length == 2387
def test_bad_url(self): fd = FileDownloader(FILENAME, 'bad url', 'bad hash', verify=False) binary_data = fd.download_verify_return() assert binary_data is None
def test_return(self): fd = FileDownloader(FILENAME, URL, FILE_HASH) binary_data = fd.download_verify_return() assert binary_data is not None
def test_bad_url(self): fd = FileDownloader(FILENAME, 'bad url', 'bad hash') binary_data = fd.download_verify_return() assert binary_data is None