def test_wronghash(httpd, partial_download): port = httpd.server_port with pytest.raises(DownloadError): urlretrieve('http://localhost:%s/test/trust.pdf' % port, partial_download.name, sha256sum='')
def update_cache(self): self._available = {} for remote in self._remotes.values(): # TODO: Get resumable.urlretrieve to accept a file-like object? with tempfile.NamedTemporaryFile() as fd: tmppath = fd.name def _progress(*args): self._progress(' {}'.format(remote.name), *args) # TODO: Verify the download with sha256sum? Crypto signature? try: urlretrieve(remote.url, tmppath, reporthook=_progress) except ConnectionError: print("Warning: Impossible to connect to the remote" " {remote.name}({remote.url}).\n" "Continue anyway without this remote.".format( remote=remote)) continue catalog = load_from_file(tmppath) # TODO: Handle content which was removed from the remote source self._available.update(catalog['all']) self._persist_catalog()
def test_wrongsize(httpd, partial_download, testfile_stats): port = httpd.server_port with pytest.raises(DownloadError): urlretrieve('http://localhost:%s/test/trust.pdf' % port, partial_download.name, filesize=testfile_stats.size-1)
def _fetch_package(self, package): def _progress(*args): self._progress(' {}'.format(package.id), *args) filename = '{0.id}-{0.version}'.format(package) urlparsed = urlparse(package.url) for cache in self._package_caches: path = os.path.join(cache, filename) if os.path.isfile(path): if self._verify_sha256(path, package.sha256sum): return path if urlparsed.scheme in ['file', '']: try: shutil.copyfile(urlparsed.path, path) except Exception as error: print("Warning: Impossible to fetch the package file" " {package.title}({package.url}).\n{error}\n" "Ignoring this package.".format(package=package, error=error)) os.unlink(path) else: try: # This might be an incomplete download, try finishing it urlretrieve(package.url, path, sha256sum=package.sha256sum, reporthook=_progress) return path except DownloadError as e: # File was too busted, could not finish the download if e.args[0] is DownloadCheck.checksum_mismatch: msg = 'Downloaded file has invalid checksum' else: msg = 'Error downloading the file: {}'.format(e) printerr(msg) os.unlink(path) path = os.path.join(self._local_package_cache, filename) if urlparsed.scheme in ['file', '']: try: shutil.copyfile(urlparsed.path, path) except Exception as error: print("Warning: Impossible to fetch the package file" " {package.title}({package.url}).\n{error}\n" "Ignoring this package.".format(package=package, error=error)) else: urlretrieve(package.url, path, sha256sum=package.sha256sum, reporthook=_progress) return path
def retrieve(file_dict): success = False while success is False: try: urlretrieve(file_dict['download_url'], os.path.join(dir0, file_dict['name'])) success = True except Exception as e: print(e) pass
def fetch(cls, url, filename, expected_sha256=None): b = ProgressBar() try: urlretrieve(url, filename, b.reporthook, sha256sum=expected_sha256) sys.stdout.write('\n') except DownloadError: if os.path.exists(filename): os.unlink(filename) raise except IOError: sys.stdout.write('\n') raise DownloadError('Failed to fetch %s from %s' % (filename, url))
def fetch(cls, url, filename, expected_sha256): b = ProgressBar() try: urlretrieve(url, filename, b.reporthook, sha256sum=expected_sha256) sys.stdout.write('\n') except DownloadError: if os.path.exists(filename): os.unlink(filename) raise except IOError: sys.stdout.write('\n') raise DownloadError('Failed to fetch %s from %s' % (filename, url))
def download_archive(self): from urllib import request # for archive in self.http_atoms: # self.time = time.time() # file_location = home + ".modularitea/download/" + archive.get_url(ARCH).split('/')[-1] # request.urlretrieve( # archive.get_url(ARCH), # file_location, # self._report_hook # ) # print("download done") from resumable import urlretrieve, DownloadError import requests for archive in self.http_atoms: print(ACTION_INFO, "Downloading", archive.get_name()) self.time = time.time() file_location = home + ".modularitea/download/" + archive.get_url(ARCH).split('/')[-1] print(" ", archive.get_url(ARCH)) try: urlretrieve( archive.get_url(ARCH), # home + ".modularitea/download/" + archive.get_name().replace(" ", ""), file_location, self._report_hook ) except DownloadError: print(ACTION_ERROR, "fail di download error") from urllib import request size = int(request.urlopen(archive.get_url(ARCH)).info()['Content-Length']) size_downloaded = os.path.getsize(file_location) if size_downloaded == size: pass else: raise DownloadError except requests.exceptions.ConnectionError as e: # todo: hapus debug # print("fail di ConnectionError") print(ACTION_ERROR, 'Error while downloading files. Check your internet connection') exit(1) print(ACTION_INFO, 'download done') return 0
def _fetch_package(self, package): def _progress(*args): self._progress(' {}'.format(package.id), *args) filename = '{0.id}-{0.version}'.format(package) for cache in self._package_caches: path = os.path.join(cache, filename) if os.path.isfile(path): if self._verify_sha256(path, package.sha256sum): return path try: # This might be an incomplete download, try finishing it urlretrieve(package.url, path, sha256sum=package.sha256sum, reporthook=_progress) return path except DownloadError as e: # File was too busted, could not finish the download if e.args[0] is DownloadCheck.checksum_mismatch: msg = 'Downloaded file has invalid checksum' else: msg = 'Error downloading the file: {}'.format(e) printerr(msg) os.unlink(path) path = os.path.join(self._local_package_cache, filename) urlretrieve(package.url, path, sha256sum=package.sha256sum, reporthook=_progress) return path
def update_cache(self): self._available_value = {} for remote in self._remotes.values(): # TODO: Get resumable.urlretrieve to accept a file-like object? with tempfile.NamedTemporaryFile() as fd: tmppath = fd.name def _progress(*args): self._progress(' {}'.format(remote.name), *args) # TODO: Verify the download with sha256sum? Crypto signature? urlparsed = urlparse(remote.url) if urlparsed.scheme in ['file', '']: try: shutil.copyfile(urlparsed.path, tmppath) except Exception as error: print("Warning: Impossible to fetch the catalog file" " {remote.name}({remote.url}).\n{error}\n" "Continue anyway without this remote.".format( remote=remote, error=error)) continue else: try: urlretrieve(remote.url, tmppath, reporthook=_progress) except ConnectionError: print("Warning: Impossible to connect to the remote" " {remote.name}({remote.url}).\n" "Continue anyway without this remote.".format( remote=remote)) continue catalog = load_from_file(tmppath) # TODO: Handle content which was removed from the remote source self._available.update(catalog['all']) self._update_installed_metadata() self._persist_catalog()
def update_cache(self): self._catalog['available'] = {} for remote in self._remotes.values(): # TODO: Get resumable.urlretrieve to accept a file-like object? fd, tmppath = tempfile.mkstemp() os.close(fd) def _progress(*args): self._progress(' {}'.format(remote.name), *args) # TODO: Verify the download with sha256sum? Crypto signature? urlretrieve(remote.url, tmppath, reporthook=_progress) with open(tmppath, 'r') as f: catalog = yaml.safe_load(f.read()) # TODO: Handle content which was removed from the remote source self._catalog['available'].update(catalog['all']) os.unlink(tmppath) self._persist_cache()
def _fetch_package(self, package): def _progress(*args): self._progress(' {}'.format(package.id), *args) filename = '{0.id}-{0.version}'.format(package) for cache in self._package_caches: path = os.path.join(cache, filename) if os.path.isfile(path): if self._verify_sha256(path, package.sha256sum): return path try: # This might be an incomplete download, try finishing it urlretrieve( package.url, path, sha256sum=package.sha256sum, reporthook=_progress) return path except DownloadError as e: # File was too busted, could not finish the download if e.args[0] is DownloadCheck.checksum_mismatch: msg = 'Downloaded file has invalid checksum\n' else: msg = 'Error downloading the file: {}\n'.format(e) sys.stderr.write(msg) sys.stderr.flush() os.unlink(path) path = os.path.join(self._local_package_cache, filename) urlretrieve( package.url, path, sha256sum=package.sha256sum, reporthook=_progress) return path
def download_episode(url, title, folder, quality): trial = 1 title = re.sub(r"[^a-zA-Z0-9\-\.\(\)\' ]", '_', title) filename = title + '.mp4' filename = '{}/{}'.format(folder, filename) err = None while True: if os.path.exists(filename): return file = filename filename += '.part' print('Downloading {}'.format(title)) surl = stream_url(url, title, quality) while surl is not None: try: URLopener.version = 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36' headers = urlretrieve(surl, filename=filename, reporthook=dlProgress) print("\ndone\n") os.rename(filename, file) return except Exception as e: err = e print("\nUnable to download..\n" + str(err)) if 'Forbidden' in str(e): trial += 1 if trial > 10: return print("Trying again..\nTry: " + str(trial)) continue else: break trial += 1 if trial > 10: return print("\nUnable to download..\n" + str(err)) print("Trying again..\nTry: " + str(trial)) filename = file
def test_norange(simple_httpd, partial_download, testfile_stats): urlretrieve('http://localhost:%s/test/trust.pdf' % simple_httpd.server_port, partial_download.name) assert testfile_stats.sha256sum == sha256(partial_download.name)
def partial_download(httpd): port = httpd.server_port with NamedTemporaryFile() as tempfile: urlretrieve('http://localhost:%s/test/trust.pdf.partial' % port, tempfile.name) yield tempfile