def download_file(url, file): if url not in download_progress: download_progress[url] = {} download_progress[url]["status"] = "downloading" else: if download_progress[url]["status"] == "downloading": logging.error("url in downloading, %s", url) return False try: logging.info("download %s to %s", url, file) opener = get_opener() req = opener.open(url) download_progress[url]["size"] = int( req.headers.get('content-length') or 0) CHUNK = 16 * 1024 downloaded = 0 with open(file, 'wb') as fp: while True: chunk = req.read(CHUNK) if not chunk: break fp.write(chunk) downloaded += len(chunk) download_progress[url]["downloaded"] = downloaded download_progress[url]["status"] = "finished" return True except Exception as e: logging.exception("download %s to %s fail:%r", url, file, e) return False
def download_file(url, file): if url not in download_progress: download_progress[url] = {} download_progress[url]["status"] = "downloading" else: if download_progress[url]["status"] == "downloading": logging.error("url in downloading, %s", url) return False try: logging.info("download %s to %s", url, file) opener = get_opener() req = opener.open(url) download_progress[url]["size"] = int(req.headers.get('content-length') or 0) CHUNK = 16 * 1024 downloaded = 0 with open(file, 'wb') as fp: while True: chunk = req.read(CHUNK) if not chunk: break fp.write(chunk) downloaded += len(chunk) download_progress[url]["downloaded"] = downloaded download_progress[url]["status"] = "finished" return True except Exception as e: download_progress[url]["status"] = "fail" logging.exception("download %s to %s fail:%r", url, file, e) return False
def get_opener(retry=0): if retry == 0: opener = urllib2.build_opener() return opener else: return update.get_opener()