コード例 #1
0
    def download(self, media, api):
        return_bool = True
        if not overwrite_files and media.downloaded:
            return
        count = 0
        sessions = [x for x in api.sessions if media.link in x.links]
        if not sessions:
            return
        session = sessions[0]
        while count < 11:
            links = [media.link]

            def choose_link(session, links):
                for link in links:
                    r = api.json_request(link, session, "HEAD",
                                         stream=True, json_format=False)
                    if not isinstance(r, requests.Response):
                        continue

                    header = r.headers
                    content_length = header.get('content-length')
                    if not content_length:
                        continue
                    content_length = int(content_length)
                    return [link, content_length]
            result = choose_link(session, links)
            if not result:
                count += 1
                continue
            link = result[0]
            content_length = result[1]
            media.size = content_length
            date_object = media.created_at
            download_path = os.path.join(
                media.directory, media.filename)
            timestamp = date_object.timestamp()
            if not overwrite_files:
                if main_helper.check_for_dupe_file(download_path, content_length):
                    main_helper.format_image(download_path, timestamp)
                    return_bool = False
                    media.downloaded = True
                    break
            r = api.json_request(
                link, session, stream=True, json_format=False)
            if not isinstance(r, requests.Response):
                return_bool = False
                count += 1
                continue
            downloader = main_helper.downloader(r, download_path, count)
            if not downloader:
                count += 1
                continue
            main_helper.format_image(download_path, timestamp)
            link_string = f"Link: {link}"
            path_string = f"Link: {download_path}"
            print(link_string)
            print(path_string)
            media.downloaded = True
            break
        return return_bool
コード例 #2
0
ファイル: starsavn.py プロジェクト: jaiirus/jaiirus2
    def download(media, session, directory, username):
        count = 0
        while count < 11:
            links = media["links"]

            def choose_link(session, links):
                for link in links:
                    r = json_request(session, link, "HEAD", True, False)
                    if not r:
                        continue

                    header = r.headers
                    content_length = int(header["content-length"])
                    if not content_length:
                        continue
                    return [link, content_length]

            result = choose_link(session, links)
            if not result:
                continue

            link = result[0]
            content_length = result[1]
            date_object = datetime.strptime(media["postedAt"],
                                            "%d-%m-%Y %H:%M:%S")
            og_filename = media["filename"]
            media["ext"] = os.path.splitext(og_filename)[1]
            media["ext"] = media["ext"].replace(".", "")
            download_path = media["directory"] + media["filename"]
            timestamp = date_object.timestamp()
            if not overwrite_files:
                if check_for_dupe_file(download_path, content_length):
                    return
            r = json_request(session, link, "GET", True, False)
            if not r:
                count += 1
                continue
            delete = False
            try:
                with open(download_path, 'wb') as f:
                    delete = True
                    for chunk in r.iter_content(chunk_size=1024):
                        if chunk:  # filter out keep-alive new chunks
                            f.write(chunk)
            except (ConnectionResetError) as e:
                if delete:
                    os.unlink(download_path)
                log_error.exception(e)
                count += 1
                continue
            except Exception as e:
                if delete:
                    os.unlink(download_path)
                log_error.exception(str(e) + "\n Tries: " + str(count))
                count += 1
                continue
            format_image(download_path, timestamp)
            log_download.info("Link: {}".format(link))
            log_download.info("Path: {}".format(download_path))
            return True
コード例 #3
0
ファイル: starsavn.py プロジェクト: throwaway-of/OnlyFans
    def download(medias):
        return_bool = True
        for media in medias:
            count = 0
            session = media["session"]
            while count < 11:
                links = media["links"]

                def choose_link(session, links):
                    for link in links:
                        r = main_helper.json_request(session,
                                                     link,
                                                     "HEAD",
                                                     stream=True,
                                                     json_format=False)
                        if not isinstance(r, requests.Response):
                            continue

                        header = r.headers
                        content_length = header.get('content-length')
                        if not content_length:
                            continue
                        content_length = int(content_length)
                        return [link, content_length]

                result = choose_link(session, links)
                if not result:
                    count += 1
                    continue
                link = result[0]
                content_length = result[1]
                date_object = datetime.strptime(media["postedAt"],
                                                "%d-%m-%Y %H:%M:%S")
                download_path = os.path.join(media["directory"],
                                             media["filename"])
                timestamp = date_object.timestamp()
                if not overwrite_files:
                    if main_helper.check_for_dupe_file(download_path,
                                                       content_length):
                        main_helper.format_image(download_path, timestamp)
                        return_bool = False
                        break
                r = main_helper.json_request(session,
                                             link,
                                             stream=True,
                                             json_format=False)
                if not isinstance(r, requests.Response):
                    return_bool = False
                    count += 1
                    continue
                downloader = main_helper.downloader(r, download_path, count)
                if not downloader:
                    count += 1
                    continue
                main_helper.format_image(download_path, timestamp)
                log_download.info("Link: {}".format(link))
                log_download.info("Path: {}".format(download_path))
                break
        return return_bool
コード例 #4
0
ファイル: starsavn.py プロジェクト: mikpim01/OnlyFans-1
    def download(self, post: format_content.post_item, api):
        return_bool = True
        for media in post.medias:
            if not overwrite_files and media.downloaded:
                continue
            count = 0
            session = media.session
            if not session:
                continue
            while count < 11:
                links = media.links

                def choose_link(session, links):
                    for link in links:
                        r = api.json_request(link, session, "HEAD",
                                             stream=True, json_format=False)
                        if not isinstance(r, requests.Response):
                            continue

                        header = r.headers
                        content_length = header.get('content-length')
                        if not content_length:
                            continue
                        content_length = int(content_length)
                        return [link, content_length]
                result = choose_link(session, links)
                if not result:
                    count += 1
                    continue
                link = result[0]
                content_length = result[1]
                media.size = content_length
                date_object = datetime.strptime(
                    post.postedAt, "%d-%m-%Y %H:%M:%S")
                download_path = os.path.join(
                    media.directory, media.filename)
                timestamp = date_object.timestamp()
                if not overwrite_files:
                    if main_helper.check_for_dupe_file(download_path, content_length):
                        main_helper.format_image(download_path, timestamp)
                        return_bool = False
                        media.downloaded = True
                        break
                r = api.json_request(
                    link, session, stream=True, json_format=False)
                if not isinstance(r, requests.Response):
                    return_bool = False
                    count += 1
                    continue
                downloader = main_helper.downloader(r, download_path, count)
                if not downloader:
                    count += 1
                    continue
                main_helper.format_image(download_path, timestamp)
                log_download.info("Link: {}".format(link))
                log_download.info("Path: {}".format(download_path))
                media.downloaded = True
                break
        return return_bool
コード例 #5
0
 def download(thread, session, directory):
     thread_directory = thread["directory"]
     metadata_directory = os.path.join(
         thread_directory, "Metadata")
     os.makedirs(metadata_directory, exist_ok=True)
     metadata_filepath = os.path.join(metadata_directory, "Posts.json")
     with open(os.path.join(metadata_filepath), 'w') as outfile:
         json.dump(thread, outfile)
     return_bool = True
     medias = thread["posts"]
     for media in medias:
         count = 0
         while count < 11:
             if "download_path" not in media:
                 count += 1
                 continue
             ext = media["ext"].replace(".", "")
             filename = str(media["tim"])+"."+ext
             link = "http://i.4cdn.org/" + board_name + "/" + filename
             r = main_helper.json_request(
                 session, link, "HEAD", True, False)
             if not isinstance(r, requests.Response):
                 return_bool = False
                 count += 1
                 continue
             header = r.headers
             content_length = header.get('content-length')
             content_length = int(content_length)
             download_path = media["download_path"]
             timestamp = media["time"]
             if not overwrite_files:
                 if main_helper.check_for_dupe_file(download_path, content_length):
                     return_bool = False
                     break
             r = main_helper.json_request(session, link, "GET", True, False)
             if not isinstance(r, requests.Response):
                 return_bool = False
                 count += 1
                 continue
             delete = False
             try:
                 with open(download_path, 'wb') as f:
                     delete = True
                     for chunk in r.iter_content(chunk_size=1024):
                         if chunk:  # filter out keep-alive new chunks
                             f.write(chunk)
             except (ConnectionResetError) as e:
                 if delete:
                     os.unlink(download_path)
                 main_helper.log_error.exception(e)
                 count += 1
                 continue
             except (requests.exceptions.ConnectionError, requests.exceptions.ChunkedEncodingError) as e:
                 count += 1
                 continue
             except Exception as e:
                 if delete:
                     os.unlink(download_path)
                 main_helper.log_error.exception(
                     str(e) + "\n Tries: "+str(count))
                 count += 1
                 continue
             main_helper.format_image(download_path, timestamp)
             log_download.info("Link: {}".format(link))
             log_download.info("Path: {}".format(download_path))
             break
     return return_bool
コード例 #6
0
    def download(medias, session, directory, username):
        return_bool = True
        for media in medias:
            count = 0
            session = media["session"]
            while count < 11:
                links = media["links"]

                def choose_link(session, links):
                    for link in links:
                        r = json_request(session,
                                         link,
                                         "HEAD",
                                         stream=True,
                                         json_format=False)
                        if not r:
                            continue

                        header = r.headers
                        content_length = int(header["content-length"])
                        if not content_length:
                            continue
                        return [link, content_length]

                result = choose_link(session, links)
                if not result:
                    count += 1
                    continue
                link = result[0]
                content_length = result[1]
                date_object = datetime.strptime(media["postedAt"],
                                                "%d-%m-%Y %H:%M:%S")
                download_path = os.path.join(media["directory"],
                                             media["filename"])
                timestamp = date_object.timestamp()
                if not overwrite_files:
                    if check_for_dupe_file(download_path, content_length):
                        format_image(download_path, timestamp)
                        return_bool = False
                        break
                r = json_request(session, link, stream=True, json_format=False)
                if not r:
                    return_bool = False
                    count += 1
                    continue
                delete = False
                try:
                    with open(download_path, 'wb') as f:
                        delete = True
                        for chunk in r.iter_content(chunk_size=1024):
                            if chunk:  # filter out keep-alive new chunks
                                f.write(chunk)
                except (ConnectionResetError) as e:
                    if delete:
                        os.unlink(download_path)
                    count += 1
                    continue
                except (requests.exceptions.ConnectionError,
                        requests.exceptions.ChunkedEncodingError) as e:
                    count += 1
                    continue
                except Exception as e:
                    if delete:
                        os.unlink(download_path)
                    log_error.exception(str(e) + "\n Tries: " + str(count))
                    count += 1
                    continue
                format_image(download_path, timestamp)
                log_download.info("Link: {}".format(link))
                log_download.info("Path: {}".format(download_path))
                break
        return return_bool
コード例 #7
0
    def download(thread, session, directory):
        os.makedirs(directory, exist_ok=True)
        return_bool = True
        posts = thread["posts"]
        directory = thread["directory"]
        metadata_directory = os.path.join(
            directory, "Metadata")
        os.makedirs(metadata_directory, exist_ok=True)
        metadata_filepath = os.path.join(metadata_directory, "Posts.json")
        with open(os.path.join(metadata_filepath), 'w') as outfile:
            json.dump(thread, outfile)
        for post in posts:
            for media in post["files"]:
                count = 0
                while count < 11:
                    if "download_path" not in media:
                        continue
                    link = "https://bbw-chan.nl" + media["path"]
                    r = main_helper.json_request(
                        session, link, "HEAD", True, False)
                    if not isinstance(r, requests.Response):
                        return_bool = False
                        count += 1
                        continue

                    header = r.headers
                    content_length = header.get('content-length')
                    content_length = int(content_length)
                    download_path = media["download_path"]
                    timestamp = post["creation"]
                    if not overwrite_files:
                        if main_helper.check_for_dupe_file(download_path, content_length):
                            return_bool = False
                            break
                    r = main_helper.json_request(
                        session, link, "GET", True, False)
                    if not isinstance(r, requests.Response):
                        return_bool = False
                        count += 1
                        continue
                    os.makedirs(directory, exist_ok=True)
                    delete = False
                    try:
                        with open(download_path, 'wb') as f:
                            delete = True
                            for chunk in r.iter_content(chunk_size=1024):
                                if chunk:  # filter out keep-alive new chunks
                                    f.write(chunk)
                    except (ConnectionResetError) as e:
                        if delete:
                            os.unlink(download_path)
                        count += 1
                        continue
                    except (requests.exceptions.ConnectionError, requests.exceptions.ChunkedEncodingError) as e:
                        count += 1
                        continue
                    except Exception as e:
                        if delete:
                            os.unlink(download_path)
                        count += 1
                        continue
                    main_helper.format_image(download_path, timestamp)
                    log_download.info("Link: {}".format(link))
                    log_download.info("Path: {}".format(download_path))
                    break
        return return_bool
コード例 #8
0
    def download(media, session):
        return_bool = True
        count = 0
        while count < 11:
            links = media["links"]

            def choose_link(session, links):
                for link in links:
                    r = main_helper.json_request(session, link, "HEAD", True,
                                                 False)
                    if not isinstance(r, requests.Response):
                        continue

                    header = r.headers
                    content_length = int(header["content-length"])
                    if not content_length:
                        continue
                    return [link, content_length]

            result = choose_link(session, links)
            if not result:
                count += 1
                continue
            link = result[0]
            content_length = result[1]
            date_object = datetime.strptime(media["postedAt"],
                                            "%d-%m-%Y %H:%M:%S")
            download_path = media["download_path"]
            timestamp = date_object.timestamp()
            if not overwrite_files:
                if main_helper.check_for_dupe_file(download_path,
                                                   content_length):
                    main_helper.format_image(download_path, timestamp)
                    return_bool = False
                    count += 1
                    break
            r = main_helper.json_request(session, link, "GET", True, False)
            if not isinstance(r, requests.Response):
                return_bool = False
                count += 1
                continue
            delete = False
            try:
                with open(download_path, 'wb') as f:
                    delete = True
                    for chunk in r.iter_content(chunk_size=1024):
                        if chunk:  # filter out keep-alive new chunks
                            f.write(chunk)
            except (ConnectionResetError) as e:
                if delete:
                    os.unlink(download_path)
                count += 1
                continue
            except (requests.exceptions.ConnectionError,
                    requests.exceptions.ChunkedEncodingError) as e:
                count += 1
                continue
            except Exception as e:
                if delete:
                    os.unlink(download_path)
                count += 1
                continue
            main_helper.format_image(download_path, timestamp)
            log_download.info("Link: {}".format(link))
            log_download.info("Path: {}".format(download_path))
            break
        return return_bool