def download(media, session, directory, username): count = 0 while count < 11: link = media["link"] r = json_request(session, link, "HEAD", True, False) if not r: return False header = r.headers content_length = int(header["content-length"]) date_object = datetime.strptime(media["postedAt"], "%d-%m-%Y %H:%M:%S") og_filename = media["filename"] media["ext"] = os.path.splitext(og_filename)[1] media["ext"] = media["ext"].replace(".", "") download_path = media["directory"] + media["filename"] timestamp = date_object.timestamp() if not overwrite_files: if check_for_dupe_file(download_path, content_length): return r = json_request(session, link, "GET", True, False) if not r: return False try: with open(download_path, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) except (ConnectionResetError): count += 1 continue format_image(download_path, timestamp) logger.info("Link: {}".format(link)) logger.info("Path: {}".format(download_path)) return True
def download(media, session, directory, username): while True: link = media["link"] r = json_request(session, link, "HEAD", True, False) if not r: break header = r.headers content_length = int(header["content-length"]) date_object = datetime.strptime(media["postedAt"], "%d-%m-%Y %H:%M:%S") og_filename = media["filename"] media["ext"] = os.path.splitext(og_filename)[1] media["ext"] = media["ext"].replace(".", "") download_path = media["directory"] + media["filename"] timestamp = date_object.timestamp() if not overwrite_files: if os.path.isfile(download_path): local_size = os.path.getsize(download_path) if local_size == content_length: return r = json_request(session, link, "GET", True, False) if not r: break with open(download_path, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) format_image(download_path, timestamp) logger.info("Link: {}".format(link)) logger.info("Path: {}".format(download_path)) return True