def _download(self, url, folder='', filename='', retried=0): logger.info('Starting to download {0} ...'.format(url)) filename = filename if filename else os.path.basename( urlparse(url).path) base_filename, extension = os.path.splitext(filename) try: if os.path.exists( os.path.join(folder, base_filename.zfill(3) + extension)): logger.warning('File: {0} exists, ignoring'.format( os.path.join(folder, base_filename.zfill(3) + extension))) return 1, url with open(os.path.join(folder, base_filename.zfill(3) + extension), "wb") as f: i = 0 while i < 10: try: response = request('get', url, stream=True, timeout=self.timeout) except Exception as e: i += 1 if not i < 10: logger.critical(str(e)) return 0, None continue break if response.status_code != 200: raise NhentaiImageNotExistException length = response.headers.get('content-length') if length is None: f.write(response.content) else: for chunk in response.iter_content(2048): f.write(chunk) except (requests.HTTPError, requests.Timeout) as e: if retried < 3: logger.warning('Warning: {0}, retrying({1}) ...'.format( str(e), retried)) return 0, self._download(url=url, folder=folder, filename=filename, retried=retried + 1) else: return 0, None except NhentaiImageNotExistException as e: os.remove(os.path.join(folder, base_filename.zfill(3) + extension)) return -1, url except Exception as e: logger.critical(str(e)) return 0, None return 1, url
def _download(self, url, folder='', filename='', retried=0): logger.info('Start downloading: {0} ...'.format(url)) filename = filename if filename else os.path.basename(urlparse(url).path) base_filename, extension = os.path.splitext(filename) try: with open(os.path.join(folder, base_filename.zfill(3) + extension), "wb") as f: response = request('get', url, stream=True, timeout=self.timeout) if response.status_code != 200: raise NhentaiImageNotExistException length = response.headers.get('content-length') if length is None: f.write(response.content) else: for chunk in response.iter_content(2048): f.write(chunk) except (requests.HTTPError, requests.Timeout) as e: if retried < 3: logger.warning('Warning: {0}, retrying({1}) ...'.format(str(e), retried)) return 0, self._download(url=url, folder=folder, filename=filename, retried=retried+1) else: return 0, None except NhentaiImageNotExistException as e: os.remove(os.path.join(folder, base_filename.zfill(3) + extension)) return -1, url except Exception as e: logger.critical(str(e)) return 0, None return 1, url
def _download(self, url, folder='', filename='', retried=False): logger.info('Start downloading: {0} ...'.format(url)) filename = filename if filename else os.path.basename( urlparse(url).path) base_filename, extension = os.path.splitext(filename) try: with open(os.path.join(folder, base_filename.zfill(3) + extension), "wb") as f: response = request('get', url, stream=True, timeout=self.timeout) length = response.headers.get('content-length') if length is None: f.write(response.content) else: for chunk in response.iter_content(2048): f.write(chunk) except requests.HTTPError as e: if not retried: logger.error('Error: {0}, retrying'.format(str(e))) return self._download(url=url, folder=folder, filename=filename, retried=True) else: return None except Exception as e: logger.critical(str(e)) return None return url