def download_file(url, filename): if not os.path.isdir(DATASETS_DIR): os.makedirs(DATASETS_DIR) u = urllib.request.urlopen(url) with open(filename, 'wb') as f: meta = u.info() if (meta.get_all("Content-Length")): file_size = int(meta.get_all("Content-Length")[0]) pbar = tqdm(total=file_size, desc=basename(normpath(filename)), unit='B', unit_scale=True) file_size_dl = 0 block_sz = 8192 while True: buff = u.read(block_sz) if not buff: break pbar.update(len(buff)) file_size_dl += len(buff) f.write(buff) pbar.close() else: LOG.warning("No content length information") file_size_dl = 0 block_sz = 8192 for cyc in itertools.cycle('/–\\|'): buff = u.read(block_sz) if not buff: break print(cyc, end='\r') file_size_dl += len(buff) f.write(buff)
def save_response_content(response, filename): CHUNK_SIZE = 8192 if 'Content-Length' in response.headers.keys(): filesize = response.headers['Content-Length'] pbar = tqdm(total=filesize, desc=basename(normpath(filename)), unit='B', unit_scale=True) with open(filename, "wb") as f: for chunk in response.iter_content(CHUNK_SIZE): if chunk: # filter out keep-alive new chunks f.write(chunk) pbar.update(len(chunk)) pbar.close() else: LOG.warning("No content length information") with open(filename, "wb") as f: for chunk, cyc in zip(response.iter_content(CHUNK_SIZE), itertools.cycle('/–\\|')): if chunk: # filter out keep-alive new chunks f.write(chunk) print(cyc, end='\r')