コード例 #1
0
def get_replay_comments(comments_json_file):
    try:
        comments_downloader = CommentsDownloader(destination_file=comments_json_file)
        comments_downloader.get_replay()
        try:
            if comments_downloader.comments:
                comments_log_file = comments_json_file.replace('.json', '.log')
                comment_errors, total_comments = CommentsDownloader.generate_log(
                    comments_downloader.comments, pil.livestream_obj.get('published_time'), comments_log_file,
                    comments_delay=0)
                if total_comments == 1:
                    logger.info("Successfully saved 1 comment to logfile.")
                    os.remove(comments_json_file)
                    logger.separator()
                    return True
                else:
                    if comment_errors:
                        logger.warn(
                            "Successfully saved {:s} comments but {:s} comments are (partially) missing.".format(
                                str(total_comments), str(comment_errors)))
                    else:
                        logger.info("Successfully saved {:s} comments.".format(str(total_comments)))
                    os.remove(comments_json_file)
                    logger.separator()
                    return True
            else:
                logger.info("There are no available comments to save.")
                return False
        except Exception as e:
            logger.error('Could not save comments to logfile: {:s}'.format(str(e)))
            return False
    except KeyboardInterrupt as e:
        logger.binfo("Downloading replay comments has been aborted.")
        return False
コード例 #2
0
ファイル: auth.py プロジェクト: zmike808/PyInstaLive
def onlogin_callback(api, cookie_file):
    cache_settings = api.settings
    with open(cookie_file, 'w') as outfile:
        json.dump(cache_settings, outfile, default=to_json)
        logger.info('New cookie file was made: {0!s}'.format(
            os.path.basename(cookie_file)))
        logger.separator()
コード例 #3
0
ファイル: dlfuncs.py プロジェクト: tinymoss/PyInstaLive
def merge_segments():
    try:
        if pil.run_at_finish:
            try:
                thread = threading.Thread(target=helpers.run_command,
                                          args=(pil.run_at_finish, ))
                thread.daemon = True
                thread.start()
                logger.binfo("Launched finish command: {:s}".format(
                    pil.run_at_finish))
            except Exception as e:
                logger.warn('Could not execute command: {:s}'.format(str(e)))

        live_mp4_file = '{}{}_{}_{}_{}_live.mp4'.format(
            pil.dl_path, pil.datetime_compat, pil.dl_user,
            pil.livestream_obj.get('id'), pil.epochtime)

        live_segments_path = os.path.normpath(
            pil.broadcast_downloader.output_dir)

        if pil.segments_json_thread_worker and pil.segments_json_thread_worker.is_alive(
        ):
            pil.segments_json_thread_worker.join()

        if pil.comment_thread_worker and pil.comment_thread_worker.is_alive():
            logger.info("Waiting for comment downloader to finish.")
            pil.comment_thread_worker.join()
        logger.info('Merging downloaded files into video.')
        try:
            pil.broadcast_downloader.stitch(
                live_mp4_file, cleartempfiles=pil.clear_temp_files)
            logger.info('Successfully merged downloaded files into video.')
            if pil.clear_temp_files:
                helpers.remove_temp_folder()
            helpers.remove_lock()
        except ValueError as e:
            logger.separator()
            logger.error('Could not merge downloaded files: {:s}'.format(
                str(e)))
            if os.listdir(live_segments_path):
                logger.separator()
                logger.binfo(
                    "Segment directory is not empty. Trying to merge again.")
                logger.separator()
                pil.assemble_arg = live_mp4_file.replace(
                    ".mp4", "_downloads.json")
                assembler.assemble(user_called=False)
            else:
                logger.separator()
                logger.error(
                    "Segment directory is empty. There is nothing to merge.")
                logger.separator()
            helpers.remove_lock()
        except Exception as e:
            logger.error('Could not merge downloaded files: {:s}'.format(
                str(e)))
            helpers.remove_lock()
    except KeyboardInterrupt:
        logger.binfo('Aborted merging process, no video was created.')
        helpers.remove_lock()
コード例 #4
0
ファイル: dlfuncs.py プロジェクト: tinymoss/PyInstaLive
def iterate_users(user_list):
    for user in user_list:
        try:
            if os.path.isfile(os.path.join(pil.dl_path, user + '.lock')):
                logger.warn(
                    "Lock file is already present for '{:s}', there is probably another download "
                    "ongoing!".format(user))
                logger.warn(
                    "If this is not the case, manually delete the file '{:s}' and try again."
                    .format(user + '.lock'))
            else:
                logger.info(
                    "Launching daemon process for '{:s}'.".format(user))
                start_result = helpers.run_command(
                    "pyinstalive -d {:s} -cp '{:s}' -dp '{:s}' {:s} {:s} {:s}".
                    format(user, pil.config_path, pil.dl_path,
                           '--no-lives' if not pil.dl_lives else '',
                           '--no-replays' if not pil.dl_replays else '',
                           '--no-heartbeat' if not pil.do_heartbeat else ''))
                if start_result:
                    logger.warn("Could not start process: {:s}".format(
                        str(start_result)))
                else:
                    logger.info("Process started successfully.")
            logger.separator()
            time.sleep(2)
        except Exception as e:
            logger.warn("Could not start process: {:s}".format(str(e)))
        except KeyboardInterrupt:
            logger.binfo('The process launching has been aborted by the user.')
            logger.separator()
            break
コード例 #5
0
def show_info():
    cookie_files = []
    cookie_from_config = ''
    try:
        for file in os.listdir(os.getcwd()):
            if file.endswith(".json"):
                with open(file) as data_file:
                    try:
                        json_data = json.load(data_file)
                        if json_data.get('created_ts'):
                            cookie_files.append(file)
                    except Exception as e:
                        pass
            if pil.ig_user == file.replace(".json", ''):
                cookie_from_config = file
    except Exception as e:
        logger.warn("Could not check for cookie files: {:s}".format(str(e)))
        logger.whiteline()
    logger.info("To see all the available arguments, use the -h argument.")
    logger.whiteline()
    logger.info("PyInstaLive version:        {:s}".format(
        Constants.SCRIPT_VER))
    logger.info("Python version:             {:s}".format(
        Constants.PYTHON_VER))
    if not command_exists("ffmpeg"):
        logger.error("FFmpeg framework:           Not found")
    else:
        logger.info("FFmpeg framework:           Available")

    if len(cookie_from_config) > 0:
        logger.info(
            "Cookie files:               {:s} ({:s} matches config user)".
            format(str(len(cookie_files)), cookie_from_config))
    elif len(cookie_files) > 0:
        logger.info("Cookie files:               {:s}".format(
            str(len(cookie_files))))
    else:
        logger.warn("Cookie files:               None found")

    logger.info("CLI supports color:         {:s}".format(
        "No" if not logger.supports_color() else "Yes"))
    logger.info("Command to run at start:    {:s}".format(
        "None" if not pil.run_at_start else pil.run_at_start))
    logger.info("Command to run at finish:   {:s}".format(
        "None" if not pil.run_at_finish else pil.run_at_finish))

    if os.path.exists(pil.config_path):
        logger.info("Config file contents:")
        logger.whiteline()
        with open(pil.config_path) as f:
            for line in f:
                logger.plain("    {:s}".format(line.rstrip()))
    else:
        logger.error("Config file:         Not found")
    logger.whiteline()
    logger.info("End of PyInstaLive information screen.")
    logger.separator()
コード例 #6
0
def get_user_id():
    is_user_id = False
    user_id = None
    try:
        user_id = int(pil.dl_user)
        is_user_id = True
    except ValueError:
        try:
            user_res = pil.ig_api.username_info(pil.dl_user)
            if pil.verbose:
                logger.plain(json.dumps(user_res))
            user_id = user_res.get('user', {}).get('pk')
        except ClientConnectionError as cce:
            logger.error(
                "Could not get user info for '{:s}': {:d} {:s}".format(
                    pil.dl_user, cce.code, str(cce)))
            if "getaddrinfo failed" in str(cce):
                logger.error(
                    'Could not resolve host, check your internet connection.')
            if "timed out" in str(cce):
                logger.error(
                    'The connection timed out, check your internet connection.'
                )
        except ClientThrottledError as cte:
            logger.error(
                "Could not get user info for '{:s}': {:d} {:s}".format(
                    pil.dl_user, cte.code, str(cte)))
        except ClientError as ce:
            logger.error(
                "Could not get user info for '{:s}': {:d} {:s}".format(
                    pil.dl_user, ce.code, str(ce)))
            if "Not Found" in str(ce):
                logger.error('The specified user does not exist.')
        except Exception as e:
            logger.error("Could not get user info for '{:s}': {:s}".format(
                pil.dl_user, str(e)))
        except KeyboardInterrupt:
            logger.binfo(
                "Aborted getting user info for '{:s}', exiting.".format(
                    pil.dl_user))
    if user_id and is_user_id:
        logger.info(
            "Getting info for '{:s}' successful. Assuming input is an user Id."
            .format(pil.dl_user))
        logger.separator()
        return user_id
    elif user_id:
        logger.info("Getting info for '{:s}' successful.".format(pil.dl_user))
        logger.separator()
        return user_id
    else:
        return None
コード例 #7
0
def download_following():
    try:
        is_checking = ''
        if pil.dl_lives and pil.dl_replays:
            is_checking = 'livestreams or replays'
        elif pil.dl_lives and not pil.dl_replays:
            is_checking = 'livestreams'
        elif not pil.dl_lives and pil.dl_replays:
            is_checking = 'replays'
        logger.info(
            "Checking following users for any {:s}.".format(is_checking))
        broadcast_f_list = pil.ig_api.reels_tray()
        if pil.verbose:
            logger.plain(json.dumps(broadcast_f_list))
        usernames_available_livestreams = []
        usernames_available_replays = []
        if broadcast_f_list['broadcasts'] and pil.dl_lives:
            for broadcast_f in broadcast_f_list['broadcasts']:
                username = broadcast_f['broadcast_owner']['username']
                if username not in usernames_available_livestreams:
                    usernames_available_livestreams.append(username)

        if broadcast_f_list.get('post_live', {}).get('post_live_items',
                                                     []) and pil.dl_replays:
            for broadcast_r in broadcast_f_list.get('post_live', {}).get(
                    'post_live_items', []):
                for broadcast_f in broadcast_r.get("broadcasts", []):
                    username = broadcast_f['broadcast_owner']['username']
                    if username not in usernames_available_replays:
                        usernames_available_replays.append(username)
        logger.separator()
        available_total = list(usernames_available_livestreams)
        available_total.extend(x for x in usernames_available_replays
                               if x not in available_total)
        if available_total:
            logger.info(
                "The following users have available {:s}.".format(is_checking))
            logger.info(', '.join(available_total))
            logger.separator()
            iterate_users(available_total)
        else:
            logger.info(
                "There are currently no available {:s}.".format(is_checking))
            logger.separator()
    except Exception as e:
        logger.error("Could not finish checking following users: {:s}".format(
            str(e)))
    except KeyboardInterrupt:
        logger.separator()
        logger.binfo('The checking process has been aborted by the user.')
        logger.separator()
コード例 #8
0
def download_live(target_room_id):
    if not os.path.exists(
            os.path.join(ptts.dl_path, ptts.tt_target_user, 'broadcasts')):
        os.makedirs(
            os.path.join(ptts.dl_path, ptts.tt_target_user, 'broadcasts'))

    download_path = os.path.join(ptts.dl_path, ptts.tt_target_user,
                                 'broadcasts')
    logger.separator()
    logger.info("Checking for ongoing livestreams.")
    logger.separator()
    s = requests.Session()
    s.headers.update({
        'User-Agent':
        'Mozilla/5.0 (X11; Linux i686; rv:60.0) Gecko/20100101 Firefox/60.0',
    })
    r = s.get(Constants.LIVE_WEB_URL.format(target_room_id))
    r.raise_for_status()
    live = r.text
    live_room_id = re.search(r'(stream-)(.*)(?=\/playlist)', live)
    if live_room_id:
        live_hls_url = Constants.LIVE_HLS_ENDP.format(live_room_id[2])
        logger.info("HLS url: {:s}".format(live_hls_url))
        logger.separator()
        logger.info("HLS url retrieved. Calling youtube-dl.")
        helpers.call_ytdl(
            live_hls_url,
            os.path.join(download_path,
                         str(live_room_id[2]) + "_" + ptts.epochtime))
    else:
        logger.info("There is no available livestream for this user.")
        logger.separator()
コード例 #9
0
 def print_status(sep=True):
     if pil.do_heartbeat:
         heartbeat_info = pil.ig_api.broadcast_heartbeat_and_viewercount(pil.livestream_obj.get('id'))
     viewers = pil.livestream_obj.get('viewer_count', 0) + 1
     if sep:
         logger.separator()
     else:
         logger.info('Username    : {:s}'.format(pil.dl_user))
     logger.info('Viewers     : {:s} watching'.format(str(int(viewers))))
     logger.info('Airing time : {:s}'.format(get_stream_duration(0)))
     if pil.do_heartbeat:
         logger.info('Status      : {:s}'.format(heartbeat_info.get('broadcast_status').title()))
         return heartbeat_info.get('broadcast_status') not in ['active', 'interrupted']
     else:
         return None
コード例 #10
0
def new_config():
    try:
        if os.path.exists(pil.config_path):
            logger.info("A configuration file is already present:")
            logger.whiteline()
            with open(pil.config_path) as f:
                for line in f:
                    logger.plain("    {:s}".format(line.rstrip()))
            logger.whiteline()
            logger.info(
                "To create a default config file, delete 'pyinstalive.ini' and run this script again."
            )
            logger.separator()
        else:
            try:
                logger.warn(
                    "Could not find configuration file, creating a default one."
                )
                config_file = open(pil.config_path, "w")
                config_file.write(
                    Constants.CONFIG_TEMPLATE.format(os.getcwd()).strip())
                config_file.close()
                logger.warn(
                    "Edit the created 'pyinstalive.ini' file and run this script again."
                )
                logger.separator()
                return
            except Exception as e:
                logger.error(
                    "Could not create default config file: {:s}".format(
                        str(e)))
                logger.warn(
                    "You must manually create and edit it with the following template: "
                )
                logger.whiteline()
                for line in Constants.CONFIG_TEMPLATE.strip().splitlines():
                    logger.plain("    {:s}".format(line.rstrip()))
                logger.whiteline()
                logger.warn(
                    "Save it as 'pyinstalive.ini' and run this script again.")
                logger.separator()
    except Exception as e:
        logger.error("An error occurred: {:s}".format(str(e)))
        logger.warn(
            "If you don't have a configuration file, manually create and edit one with the following template:"
        )
        logger.whiteline()
        logger.plain(Constants.CONFIG_TEMPLATE)
        logger.whiteline()
        logger.warn("Save it as 'pyinstalive.ini' and run this script again.")
        logger.separator()
コード例 #11
0
def validate_inputs(config, args):
    error_arr = []
    try:
        config.read(os.path.join(os.getcwd(), "settings.ini"))
        logger.banner()

        ptts.tt_username = config.get('ptts', 'username')
        ptts.tt_password = config.get('ptts', 'password')
        ptts.dl_path = config.get('ptts', 'download_path')
        ptts.args = args

        if not ptts.tt_username or not len(ptts.tt_username):
            raise Exception(
                "Invalid value for 'username'. This value is required.")

        if not ptts.tt_password or not len(ptts.tt_password):
            raise Exception(
                "Invalid value for 'password'. This value is required.")

        if not ptts.dl_path.endswith('/'):
            ptts.dl_path = ptts.dl_path + '/'
        if not ptts.dl_path or not os.path.exists(ptts.dl_path):
            ptts.dl_path = os.getcwd()
            error_arr.append(["dl_path", ptts.dl_path])

        if error_arr:
            for error in error_arr:
                logger.warn(
                    "Invalid value for '{:s}'. Using default value: {:s}".
                    format(error[0], error[1]))
                logger.separator()

        if args.download:
            ptts.tt_target_user = args.download
        elif args.hashtag:
            ptts.tt_target_hashtag = args.hashtag
        elif args.getfollowing:
            ptts.tt_target_user = args.getfollowing
        elif args.livestream:
            ptts.tt_target_user = args.livestream
        elif args.single:
            ptts.tt_target_id = args.single
        else:
            logger.error(
                "Missing --download or --single argument. Either of these arguments is required."
            )
            logger.separator()
            return False

        return True
    except Exception as e:
        logger.error("An error occurred: {:s}".format(str(e)))
        logger.error(
            "Make sure the config file and given arguments valid and try again."
        )
        logger.separator()
        return False
コード例 #12
0
def call_ytdl(url, filename):
    try:
        subprocess.call('youtube-dl -o "{}.mp4" "{}"'.format(filename, url),
                        shell=True)
        return True
    except Exception as e:
        logger.separator()
        logger.error("Something went wrong: " + str(e))
        logger.separator()
        if os.path.isfile(filename + '.mp4.part'):
            os.rename(filename + '.mp4.part', filename + '.mp4')
    except KeyboardInterrupt:
        logger.separator()
        logger.info("The download has been aborted.")
        if os.path.isfile(filename + '.mp4.part'):
            os.rename(filename + '.mp4.part', filename + '.mp4')
        logger.separator()
コード例 #13
0
ファイル: dlfuncs.py プロジェクト: tinymoss/PyInstaLive
def get_live_comments(comments_json_file):
    try:
        comments_downloader = CommentsDownloader(
            destination_file=comments_json_file)
        first_comment_created_at = 0

        try:
            while not pil.broadcast_downloader.is_aborted:
                if 'initial_buffered_duration' not in pil.livestream_obj and pil.broadcast_downloader.initial_buffered_duration:
                    pil.livestream_obj[
                        'initial_buffered_duration'] = pil.broadcast_downloader.initial_buffered_duration
                    comments_downloader.broadcast = pil.livestream_obj
                first_comment_created_at = comments_downloader.get_live(
                    first_comment_created_at)
        except ClientError as e:
            if not 'media has been deleted' in e.error_response:
                logger.warn("Comment collection ClientError: %d %s" %
                            (e.code, e.error_response))

        try:
            if comments_downloader.comments:
                comments_downloader.save()
                comments_log_file = comments_json_file.replace('.json', '.log')
                comment_errors, total_comments = CommentsDownloader.generate_log(
                    comments_downloader.comments,
                    pil.epochtime,
                    comments_log_file,
                    comments_delay=pil.broadcast_downloader.
                    initial_buffered_duration)
                if len(comments_downloader.comments) == 1:
                    logger.info("Successfully saved 1 comment.")
                    os.remove(comments_json_file)
                    logger.separator()
                    return True
                else:
                    if comment_errors:
                        logger.warn(
                            "Successfully saved {:s} comments but {:s} comments are (partially) missing."
                            .format(str(total_comments), str(comment_errors)))
                    else:
                        logger.info("Successfully saved {:s} comments.".format(
                            str(total_comments)))
                    os.remove(comments_json_file)
                    logger.separator()
                    return True
            else:
                logger.info("There are no available comments to save.")
                logger.separator()
                return False
        except Exception as e:
            logger.error('Could not save comments: {:s}'.format(str(e)))
            return False
    except KeyboardInterrupt as e:
        logger.binfo("Downloading livestream comments has been aborted.")
        return False
コード例 #14
0
ファイル: organize.py プロジェクト: siavashyj/PyInstaLive
def organize_videos():

    try:
        # Make a variable equal to the names of the files in the current directory.
        download_path_files = os.listdir(pil.dl_path)

        # Count the amount of files moved and not moved because they already exist etc.
        not_moved = 0
        has_moved = 0

        # The downloaded livestream(s) are in MP4 format.
        video_format = ['mp4']

        # Find the MP4 files and save them in a variable called 'filenames'.
        filenames = [
            filename for filename in download_path_files
            if filename.split('.')[-1] in video_format
        ]

        if len(filenames) == 0:
            logger.binfo("No files were found to organize.")
            logger.separator()
            return

        for filename in filenames:
            # Split the filenames into parts.
            filename_parts = filename.split('_')

            # Get the date from the filename.
            date = datetime.strptime(filename_parts[0],
                                     '%Y%m%d').strftime('%d-%m-%Y')

            # Get the username from the filename.
            username = '******'.join(filename_parts[1:-3])

            # Get the time from the unix timestamp.
            time_from_unix_timestamp = time.strftime(
                '%I.%M%p', time.localtime(int(filename_parts[-2])))

            # # Remove the leading zero from single-digit hours.
            if float(time_from_unix_timestamp[0:2]) < 10:
                time_from_unix_timestamp = time_from_unix_timestamp[1:]

            # Get the last part of the filename ("live.mp4" or "replay.mp4").
            live_or_replay = filename_parts[-1]

            # The path of each original filename is as follows:
            old_filename_path = os.path.join(pil.dl_path, filename)

            # We want to change the format of each filename to:
            new_filename_format = date + " " + username + " [" + time_from_unix_timestamp + "] " + live_or_replay

            # The path of each new filename is as follows:
            new_filename_path = os.path.join(pil.dl_path, new_filename_format)

            # Change the filenames.
            os.rename(old_filename_path, new_filename_path)

        # Now that the files have been renamed, we need to rescan the files in the directory.
        download_path_files = os.listdir(pil.dl_path)

        new_filenames = [
            filename for filename in download_path_files
            if filename.split('.')[-1] in video_format
        ]

        # We want a dictionary where the filenames are the keys
        # and the usernames are the values.
        filenames_to_usernames = {}

        # Populate the dictionary with a loop.
        for filename in new_filenames:
            # Split the filenames into parts so we get just the usernames:
            filename_parts = filename.split()
            # This is how to get the usernames from the split filenames:
            username = filename_parts[1]
            # Filename = key and username = value:
            filenames_to_usernames[filename] = username

        # We only want one folder for each username, so convert the list into a set to remove duplicates.
        usernames = set(filenames_to_usernames.values())

        # Make a folder for each username.
        for username in usernames:
            username_path = os.path.join(pil.dl_path, username)
            if not os.path.isdir(username_path):
                os.mkdir(username_path)

        # Move the videos into the folders
        for filename, username in filenames_to_usernames.items():
            filename_base = os.path.basename(filename)
            source_path = os.path.join(pil.dl_path, filename)
            destination_path = os.path.join(pil.dl_path, username,
                                            filename_base)
            if not os.path.isfile(destination_path):
                try:
                    shutil.move(source_path, destination_path)
                    logger.info(
                        "Moved '{:s}' successfully.".format(filename_base))
                    has_moved += 1
                except OSError as oe:
                    logger.warn("Could not move {:s}: {:s}".format(
                        filename_base, str(oe)))
                    not_moved += 1
            else:
                logger.binfo(
                    "Did not move '{:s}' because it already exists.".format(
                        filename_base))
                not_moved += 1

        logger.separator()
        logger.info("{} {} moved.".format(
            has_moved, "file was" if has_moved == 1 else "files were"))
        if not_moved:
            logger.binfo("{} {} not moved.".format(
                not_moved, "file was" if not_moved == 1 else "files were"))
        logger.separator()
    except Exception as e:
        logger.error("Could not organize files: {:s}".format(str(e)))
コード例 #15
0
def assemble(user_called=True, retry_with_zero_m4v=False):
    try:
        ass_json_file = pil.assemble_arg if pil.assemble_arg.endswith(
            ".json") else pil.assemble_arg + ".json"
        ass_mp4_file = os.path.join(
            pil.dl_path,
            os.path.basename(ass_json_file).replace("_downloads", "").replace(
                ".json", ".mp4"))
        ass_segment_dir = pil.assemble_arg if not pil.assemble_arg.endswith(
            ".json") else pil.assemble_arg.replace(".json", "")

        # if pil.verbose:
        #     logger.plain("{}\n{}\n{}".format(ass_json_file, ass_mp4_file, ass_segment_dir))

        broadcast_info = {}
        if not os.path.isdir(ass_segment_dir) or not os.listdir(
                ass_segment_dir):
            logger.error(
                'The segment directory does not exist or does not contain any files: %s'
                % ass_segment_dir)
            logger.separator()
            return
        if not os.path.isfile(ass_json_file):
            logger.warn(
                "No matching json file found for the segment directory, trying to continue without it."
            )
            ass_stream_id = os.listdir(ass_segment_dir)[0].split('-')[0]
            broadcast_info['id'] = ass_stream_id
            broadcast_info['broadcast_status'] = "active"
            broadcast_info['segments'] = {}
        else:
            with open(ass_json_file) as info_file:
                try:
                    broadcast_info = json.load(info_file)
                except Exception as e:
                    logger.warn(
                        "Could not decode json file, trying to continue without it."
                    )
                    ass_stream_id = os.listdir(ass_segment_dir)[0].split(
                        '-')[0]
                    broadcast_info['id'] = ass_stream_id
                    broadcast_info['broadcast_status'] = "active"
                    broadcast_info['segments'] = {}

        if broadcast_info.get('broadcast_status', '') == 'post_live':
            logger.error(
                'Video segment files from replay downloads cannot be assembled.'
            )
            return

        stream_id = str(broadcast_info['id'])

        segment_meta = broadcast_info.get('segments', {})
        if segment_meta:
            all_segments = [
                os.path.join(ass_segment_dir, k)
                for k in broadcast_info['segments'].keys()
            ]
        else:
            all_segments = list(
                filter(
                    os.path.isfile,
                    glob.glob(
                        os.path.join(ass_segment_dir,
                                     '%s-*.m4v' % stream_id))))

        all_segments = sorted(all_segments, key=lambda x: _get_file_index(x))
        sources = []
        audio_stream_format = 'assembled_source_{0}_{1}_mp4.tmp'
        video_stream_format = 'assembled_source_{0}_{1}_m4a.tmp'
        video_stream = ''
        audio_stream = ''
        has_skipped_zero_m4v = False

        if not all_segments:
            logger.error(
                "No video segment files have been found in the specified folder."
            )
            logger.separator()
            return
        else:
            logger.info(
                "Assembling video segment files from specified folder: {}".
                format(ass_segment_dir))

        for segment in all_segments:
            segment = re.sub('\?.*$', '', segment)
            if not os.path.isfile(segment.replace('.m4v', '.m4a')):
                logger.warn('Audio segment not found: {0!s}'.format(
                    segment.replace('.m4v', '.m4a')))
                continue

            if segment.endswith('-init.m4v'):
                logger.info('Replacing %s' % segment)
                segment = os.path.join(
                    os.path.dirname(os.path.realpath(__file__)), 'repair',
                    'init.m4v')

            if segment.endswith('-0.m4v') and not retry_with_zero_m4v:
                has_skipped_zero_m4v = True
                continue

            video_stream = os.path.join(
                ass_segment_dir,
                video_stream_format.format(stream_id, len(sources)))
            audio_stream = os.path.join(
                ass_segment_dir,
                audio_stream_format.format(stream_id, len(sources)))

            file_mode = 'ab'

            with open(video_stream,
                      file_mode) as outfile, open(segment, 'rb') as readfile:
                shutil.copyfileobj(readfile, outfile)

            with open(audio_stream, file_mode) as outfile, open(
                    segment.replace('.m4v', '.m4a'), 'rb') as readfile:
                shutil.copyfileobj(readfile, outfile)

        if audio_stream and video_stream:
            sources.append({'video': video_stream, 'audio': audio_stream})

        for n, source in enumerate(sources):
            ffmpeg_binary = os.getenv('FFMPEG_BINARY', 'ffmpeg')
            cmd = [
                ffmpeg_binary, '-loglevel', 'warning', '-y', '-i',
                source['audio'], '-i', source['video'], '-c:v', 'copy', '-c:a',
                'copy', ass_mp4_file
            ]
            #fnull = open(os.devnull, 'w')
            fnull = None
            exit_code = subprocess.call(cmd,
                                        stdout=fnull,
                                        stderr=subprocess.STDOUT)
            if exit_code != 0:
                logger.warn(
                    "FFmpeg exit code not '0' but '{:d}'.".format(exit_code))
                if has_skipped_zero_m4v and not retry_with_zero_m4v:
                    logger.binfo(
                        "*-0.m4v segment was detected but skipped, retrying to assemble video without "
                        "skipping it.")
                    os.remove(source['audio'])
                    os.remove(source['video'])
                    logger.separator()
                    assemble(user_called, retry_with_zero_m4v=True)
                    return
            else:
                logger.info('The video file has been generated: %s' %
                            os.path.basename(ass_mp4_file))
                os.remove(source['audio'])
                os.remove(source['video'])
            if user_called:
                logger.separator()
    except Exception as e:
        logger.error("An error occurred: {:s}".format(str(e)))
コード例 #16
0
ファイル: organize.py プロジェクト: pedrodinisf/PyInstaLive
def organize_files():

    try:
        files = [
            f for f in os.listdir(pil.dl_path)
            if os.path.isfile(os.path.join(pil.dl_path, f))
        ]

        not_moved = 0
        has_moved = 0

        username_regex = r'(?<=\d{8}_)(.*?)(?=_\d)'
        date_regex = r'^\d{8}'
        timestamp_regex = r'_(\d{10})_'
        type_regex = r'(live|replay)'
        raw_file_dict = {}
        new_file_dict = {}

        for file in files:
            username = re.search(username_regex, file)[0]
            date_ts = datetime.strptime(
                re.search(date_regex, file)[0], '%Y%m%d').strftime('%d-%m-%Y')
            time_ts = time.strftime(
                '%I-%M-%S-%p',
                time.localtime(int(re.search(timestamp_regex, file)[1])))
            file_ext = os.path.splitext(file)[1]
            file_type = re.search(type_regex, file)[0]

            new_file = "{:s} {:s} {:s} ({:s}){:s}".format(
                date_ts, time_ts, username, file_type, file_ext)
            raw_file_dict[file] = username
            new_file_dict[file] = new_file

        for filename, username in raw_file_dict.items():
            try:
                os.makedirs(os.path.join(pil.dl_path, username))
            except:
                pass
            source_path = os.path.join(pil.dl_path, filename)
            destination_path = os.path.join(pil.dl_path, username,
                                            new_file_dict.get(filename))
            if not os.path.isfile(destination_path):
                try:
                    shutil.move(source_path, destination_path)
                    logger.info(
                        "Moved and renamed '{:s}' successfully.".format(
                            filename))
                    has_moved += 1
                except OSError as oe:
                    logger.warn("Could not move and rename {:s}: {:s}".format(
                        filename, str(oe)))
                    not_moved += 1
            else:
                logger.binfo(
                    "Did not move and rename '{:s}' because it already exists."
                    .format(filename))
                not_moved += 1

        logger.separator()
        logger.info("{} {} moved.".format(
            has_moved, "file was" if has_moved == 1 else "files were"))
        if not_moved:
            logger.binfo("{} {} not moved.".format(
                not_moved, "file was" if not_moved == 1 else "files were"))
        logger.separator()
    except Exception as e:
        logger.error("Could not organize files: {:s}".format(str(e)))
コード例 #17
0
def download_single(video_id):
    try:
        download_path = os.path.join(ptts.dl_path, video_id + ".mp4")
        if not os.path.isfile(download_path):
            rr = requests.get(Constants.VIDEO_BASE_URL.format(video_id, 1),
                              verify=True,
                              headers=Constants.REQUESTS_VIDEO_UA)
            if rr.status_code == 200:
                open(download_path, 'wb').write(rr.content)
                logger.info("Downloaded video with Id: {}".format(video_id))
            else:
                logger.warn(
                    "Response did not return status 200, was {:d} instead. Trying with lower "
                    "bitrate.".format(rr.status_code))
                rr = requests.get(Constants.VIDEO_BASE_URL.format(video_id, 0),
                                  verify=True,
                                  headers=Constants.REQUESTS_VIDEO_UA)
                if rr.status_code == 200:
                    open(download_path, 'wb').write(rr.content)
                else:
                    logger.warn(
                        "Response did not return status 200, was {:d} instead. Giving up."
                        .format(rr.status_code))
            logger.separator()
        else:
            logger.binfo("This video already exists.")
            logger.separator()
    except KeyboardInterrupt:
        logger.separator()
        logger.info("The download has been aborted.")
        logger.separator()
    except Exception as e:
        logger.separator()
        logger.error("Something went wrong: " + str(e))
        logger.separator()
コード例 #18
0
ファイル: auth.py プロジェクト: zmike808/PyInstaLive
def authenticate(username, password, force_use_login_args=False):
    ig_api = None
    try:
        if force_use_login_args:
            pil.ig_user = username
            pil.ig_pass = password
            pil.config_login_overridden = True
            logger.binfo(
                "Overriding configuration file login with -u and -p arguments."
            )
            logger.separator()
        cookie_file = os.path.join(os.path.dirname(pil.config_path),
                                   "{}.json".format(username))
        if not os.path.isfile(cookie_file):
            # settings file does not exist
            logger.warn('Unable to find cookie file: {0!s}'.format(
                os.path.basename(cookie_file)))
            logger.info('Creating a new one.')

            # login new
            ig_api = Client(
                username,
                password,
                on_login=lambda x: onlogin_callback(x, cookie_file),
                proxy=pil.proxy)
            # ig_api = Client(username, password, on_login=lambda x: onlogin_callback(x, cookie_file), proxy=pil.proxy)
            login(ig_api)
        else:
            with open(cookie_file) as file_data:
                cached_settings = json.load(file_data, object_hook=from_json)
            logger.info('Using settings file: {0!s}'.format(cookie_file))

            # device_id = cached_settings.get('device_id')
            # reuse auth cached_settings
            try:
                ig_api = Client(username,
                                password,
                                settings=cached_settings,
                                proxy=pil.proxy)

            except (ClientSentryBlockError, ClientChallengeRequiredError,
                    ClientCheckpointRequiredError, ClientCookieExpiredError,
                    ClientLoginError, ClientError) as e:
                logger.separator()
                logger.warn('Some sort of login exception!')
                if pil.verbose:
                    logger.plain(json.dumps(e.error_response))
                logger.error('Could not login: {:s}'.format(e.error_response))
                logger.error('{:s}'.format(
                    json.loads(e.error_response).get("message",
                                                     e.error_response)))
                logger.error('{:s}'.format(e.error_response))
                logger.separator()

                ig_api = Client(
                    username,
                    password,
                    on_login=lambda x: onlogin_callback(x, cookie_file),
                    proxy=pil.proxy)
                login(ig_api)
                logger.warn('successfully resolved error and logged back in!')

    except (ClientLoginError, ClientError) as e:
        logger.separator()
        if pil.verbose:
            logger.plain(json.dumps(e.error_response))
        logger.error('Could not login: {:s}'.format(e.error_response))
        logger.error('{:s}'.format(
            json.loads(e.error_response).get("message", e.error_response)))
        logger.error('{:s}'.format(e.error_response))
        logger.separator()
    except Exception as e:
        if pil.verbose:
            logger.plain(json.dumps(e))
        if str(e).startswith("unsupported pickle protocol"):
            logger.warn(
                "This cookie file is not compatible with Python {}.".format(
                    sys.version.split(' ')[0][0]))
            logger.warn(
                "Please delete your cookie file '{}.json' and try again.".
                format(username))
        else:
            logger.separator()
            logger.error('Unexpected exception: {:s}'.format(e))
        logger.separator()
    except KeyboardInterrupt:
        logger.separator()
        logger.warn("The user authentication has been aborted.")
        logger.separator()

    if ig_api:
        logger.info('Successfully logged into account: {:s}'.format(
            str(ig_api.authenticated_user_name)))
        if pil.show_cookie_expiry and not force_use_login_args:
            try:
                cookie_expiry = ig_api.cookie_jar.auth_expires
                logger.info('Cookie file expiry date: {:s}'.format(
                    datetime.datetime.fromtimestamp(cookie_expiry).strftime(
                        '%Y-%m-%d at %I:%M:%S %p')))
            except Exception as e:
                logger.warn(
                    'An error occurred while getting the cookie file expiry date: {:s}'
                    .format(str(e)))

        logger.separator()
        return ig_api
    else:
        return None
コード例 #19
0
def validate_inputs(config, args, unknown_args):
    error_arr = []
    banner_shown = False
    try:
        if args.configpath:
            if os.path.isfile(args.configpath):
                pil.config_path = args.configpath
            else:
                logger.banner()
                banner_shown = True
                logger.warn("Custom config path is invalid, falling back to default path: {:s}".format(pil.config_path))
                pil.config_path = os.path.join(os.getcwd(), "pyinstalive.ini")
                logger.separator()


        if not os.path.isfile(pil.config_path):  # Create new config if it doesn't exist
            if not banner_shown:
                logger.banner()
            helpers.new_config()
            return False
        pil.config_path = os.path.realpath(pil.config_path)
        config.read(pil.config_path)

        if args.download:
            pil.dl_user = args.download
            if args.downloadfollowing or args.batchfile:
                logger.banner()
                logger.warn("Please use only one download method. Use -h for more information.")
                logger.separator()
                return False
        elif not args.clean and not args.info and not args.assemble and not args.downloadfollowing and not args.batchfile and not args.organize:
            logger.banner()
            logger.error("Please use a download method. Use -h for more information.")
            logger.separator()
            return False

        if helpers.bool_str_parse(config.get('pyinstalive', 'log_to_file')) == "Invalid":
            pil.log_to_file = True
            error_arr.append(['log_to_file', 'True'])
        elif helpers.bool_str_parse(config.get('pyinstalive', 'log_to_file')):
            pil.log_to_file = True
        else:
            pil.log_to_file = False

        logger.banner()

        if args.batchfile:
            if os.path.isfile(args.batchfile):
                pil.dl_batchusers = [user.rstrip('\n') for user in open(args.batchfile)]
                if not pil.dl_batchusers:
                    logger.error("The specified file is empty.")
                    logger.separator()
                    return False
                else:
                    logger.info("Downloading {:d} users from batch file.".format(len(pil.dl_batchusers)))
                    logger.separator()
            else:
                logger.error('The specified file does not exist.')
                logger.separator()
                return False

        if unknown_args:
            pil.uargs = unknown_args
            logger.warn("The following unknown argument(s) were provided and will be ignored: ")
            logger.warn('    ' + ' '.join(unknown_args))
            logger.separator()


        pil.ig_user = config.get('pyinstalive', 'username')
        pil.ig_pass = config.get('pyinstalive', 'password')
        pil.dl_path = config.get('pyinstalive', 'download_path')
        pil.run_at_start = config.get('pyinstalive', 'run_at_start')
        pil.run_at_finish = config.get('pyinstalive', 'run_at_finish')
        pil.ffmpeg_path = config.get('pyinstalive', 'ffmpeg_path')
        pil.verbose = config.get('pyinstalive', 'verbose')
        pil.skip_merge = config.get('pyinstalive', 'skip_merge')
        pil.args = args
        pil.config = config
        pil.proxy = config.get('pyinstalive', 'proxy')

        if args.dlpath:
            pil.dl_path = args.dlpath

        if helpers.bool_str_parse(config.get('pyinstalive', 'show_cookie_expiry')) == "Invalid":
            pil.show_cookie_expiry = False
            error_arr.append(['show_cookie_expiry', 'False'])
        elif helpers.bool_str_parse(config.get('pyinstalive', 'show_cookie_expiry')):
            pil.show_cookie_expiry = True
        else:
            pil.show_cookie_expiry = False

        if helpers.bool_str_parse(config.get('pyinstalive', 'verbose')) == "Invalid":
            pil.verbose = False
            error_arr.append(['verbose', 'False'])
        elif helpers.bool_str_parse(config.get('pyinstalive', 'verbose')):
            pil.verbose = True
        else:
            pil.verbose = False

        if helpers.bool_str_parse(config.get('pyinstalive', 'skip_merge')) == "Invalid":
            pil.skip_merge = False
            error_arr.append(['skip_merge', 'False'])
        elif helpers.bool_str_parse(config.get('pyinstalive', 'skip_merge')):
            pil.skip_merge = True
        else:
            pil.skip_merge = False

        if helpers.bool_str_parse(config.get('pyinstalive', 'use_locks')) == "Invalid":
            pil.use_locks = False
            error_arr.append(['use_locks', 'False'])
        elif helpers.bool_str_parse(config.get('pyinstalive', 'use_locks')):
            pil.use_locks = True
        else:
            pil.use_locks = False

        if helpers.bool_str_parse(config.get('pyinstalive', 'clear_temp_files')) == "Invalid":
            pil.clear_temp_files = False
            error_arr.append(['clear_temp_files', 'False'])
        elif helpers.bool_str_parse(config.get('pyinstalive', 'clear_temp_files')):
            pil.clear_temp_files = True
        else:
            pil.clear_temp_files = False

        if helpers.bool_str_parse(config.get('pyinstalive', 'do_heartbeat')) == "Invalid":
            pil.do_heartbeat = True
            error_arr.append(['do_heartbeat', 'True'])
        if helpers.bool_str_parse(config.get('pyinstalive', 'do_heartbeat')):
            pil.do_heartbeat = True
        if args.noheartbeat or not helpers.bool_str_parse(config.get('pyinstalive', 'do_heartbeat')):
            pil.do_heartbeat = False
            logger.warn("Getting livestream heartbeat is disabled, this may cause degraded performance.")
            logger.separator()

        if not args.nolives and helpers.bool_str_parse(config.get('pyinstalive', 'download_lives')) == "Invalid":
            pil.dl_lives = True
            error_arr.append(['download_lives', 'True'])
        elif helpers.bool_str_parse(config.get('pyinstalive', 'download_lives')):
            pil.dl_lives = True
        else:
            pil.dl_lives = False

        if not args.noreplays and helpers.bool_str_parse(config.get('pyinstalive', 'download_replays')) == "Invalid":
            pil.dl_replays = True
            error_arr.append(['download_replays', 'True'])
        elif helpers.bool_str_parse(config.get('pyinstalive', 'download_replays')):
            pil.dl_replays = True
        else:
            pil.dl_replays = False

        if helpers.bool_str_parse(config.get('pyinstalive', 'download_comments')) == "Invalid":
            pil.dl_comments = True
            error_arr.append(['download_comments', 'True'])
        elif helpers.bool_str_parse(config.get('pyinstalive', 'download_comments')):
            pil.dl_comments = True
        else:
            pil.dl_comments = False

        if args.nolives:
            pil.dl_lives = False

        if args.noreplays:
            pil.dl_replays = False

        if args.verbose:
            pil.verbose = True
        if args.skip_merge:
            pil.skip_merge = True

        if not pil.dl_lives and not pil.dl_replays:
            logger.error("You have disabled both livestream and replay downloading.")
            logger.error("Please enable at least one of them and try again.")
            logger.separator()
            return False

        if pil.ffmpeg_path:
            if not os.path.isfile(pil.ffmpeg_path):
                pil.ffmpeg_path = None
                cmd = "where" if platform.system() == "Windows" else "which"
                logger.warn("Custom FFmpeg binary path is invalid, falling back to environment variable.")
            else:
                logger.binfo("Overriding FFmpeg binary path: {:s}".format(pil.ffmpeg_path))
        else:
            if not helpers.command_exists('ffmpeg') and not args.info:
                logger.error("FFmpeg framework not found, exiting.")
                logger.separator()
                return False

        if not pil.ig_user or not len(pil.ig_user):
            raise Exception("Invalid value for 'username'. This value is required.")

        if not pil.ig_pass or not len(pil.ig_pass):
            raise Exception("Invalid value for 'password'. This value is required.")

        if not pil.dl_path.endswith('/'):
            pil.dl_path = pil.dl_path + '/'
        if not pil.dl_path or not os.path.exists(pil.dl_path):
            pil.dl_path = os.getcwd() + "/"
            if not args.dlpath:
                error_arr.append(['download_path', os.getcwd() + "/"])
            else:
                logger.warn("Custom config path is invalid, falling back to default path: {:s}".format(pil.dl_path))
                logger.separator()

        if pil.proxy and pil.proxy != '':
            parsed_url = urlparse(pil.proxy)
            if not parsed_url.netloc or not parsed_url.scheme:
                error_arr.append(['proxy', 'None'])
                pil.proxy = None

        if error_arr:
            for error in error_arr:
                logger.warn("Invalid value for '{:s}'. Using default value: {:s}".format(error[0], error[1]))
                logger.separator()

        if args.info:
            helpers.show_info()
            return False
        elif args.clean:
            helpers.clean_download_dir()
            return False
        elif args.assemble:
            pil.assemble_arg = args.assemble
            assembler.assemble()
            return False
        elif args.organize:
            organize.organize_videos()
            return False

        return True
    except Exception as e:
        logger.error("An error occurred: {:s}".format(str(e)))
        logger.error("Make sure the config file and given arguments are valid and try again.")
        logger.separator()
        return False
コード例 #20
0
def run():
    pil.initialize()
    logging.disable(logging.CRITICAL)
    config = configparser.ConfigParser()
    parser = argparse.ArgumentParser(
        description="You are running PyInstaLive {:s} using Python {:s}".format(Constants.SCRIPT_VER,
                                                                                Constants.PYTHON_VER))

    parser.add_argument('-u', '--username', dest='username', type=str, required=False,
                        help="Instagram username to login with.")
    parser.add_argument('-p', '--password', dest='password', type=str, required=False,
                        help="Instagram password to login with.")
    parser.add_argument('-d', '--download', dest='download', type=str, required=False,
                        help="The username of the user whose livestream or replay you want to save.")
    parser.add_argument('-b,', '--batch-file', dest='batchfile', type=str, required=False,
                        help="Read a text file of usernames to download livestreams or replays from.")
    parser.add_argument('-i', '--info', dest='info', action='store_true', help="View information about PyInstaLive.")
    parser.add_argument('-nr', '--no-replays', dest='noreplays', action='store_true',
                        help="When used, do not check for any available replays.")
    parser.add_argument('-nl', '--no-lives', dest='nolives', action='store_true',
                        help="When used, do not check for any available livestreams.")
    parser.add_argument('-cl', '--clean', dest='clean', action='store_true',
                        help="PyInstaLive will clean the current download folder of all leftover files.")
    parser.add_argument('-cp', '--config-path', dest='configpath', type=str, required=False,
                        help="Path to a PyInstaLive configuration file.")
    parser.add_argument('-dp', '--download-path', dest='dlpath', type=str, required=False,
                        help="Path to folder where PyInstaLive should save livestreams and replays.")
    parser.add_argument('-as', '--assemble', dest='assemble', type=str, required=False,
                        help="Path to json file required by the assembler to generate a video file from the segments.")
    parser.add_argument('-df', '--download-following', dest='downloadfollowing', action='store_true',
                        help="PyInstaLive will check for available livestreams and replays from users the account "
                             "used to login follows.")
    parser.add_argument('-nhb', '--no-heartbeat', dest='noheartbeat', action='store_true', help="Disable heartbeat "
                                                                                                "check for "
                                                                                                "livestreams.")
    parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help="PyInstaLive will output JSON "
                                                                                     "responses and some misc "
                                                                                     "variables.")
    parser.add_argument('-sm', '--skip-merge', dest='skip_merge', action='store_true', help="PyInstaLive will not merge the downloaded livestream files.")
    parser.add_argument('-o', '--organize', action='store_true', help="Create a folder for each user whose livestream(s) you have downloaded. The names of the folders will be their usernames. Then move the video(s) of each user into their associated folder.")

    # Workaround to 'disable' argument abbreviations
    parser.add_argument('--usernamx', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('--passworx', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('--infx', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('--noreplayx', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('--cleax', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('--downloadfollowinx', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('--configpatx', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('--confix', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('--organizx', help=argparse.SUPPRESS, metavar='IGNORE')

    parser.add_argument('-cx', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('-nx', help=argparse.SUPPRESS, metavar='IGNORE')
    parser.add_argument('-dx', help=argparse.SUPPRESS, metavar='IGNORE')

    args, unknown_args = parser.parse_known_args()  # Parse arguments

    if validate_inputs(config, args, unknown_args):
        if not args.username and not args.password:
            pil.ig_api = auth.authenticate(username=pil.ig_user, password=pil.ig_pass)
        elif (args.username and not args.password) or (args.password and not args.username):
            logger.warn("Missing --username or --password argument. Falling back to config file.")
            logger.separator()
            pil.ig_api = auth.authenticate(username=pil.ig_user, password=pil.ig_pass)
        elif args.username and args.password:
            pil.ig_api = auth.authenticate(username=args.username, password=args.password, force_use_login_args=True)

        if pil.ig_api:
            if pil.dl_user or pil.args.downloadfollowing:
                downloader.start()
            elif pil.dl_batchusers:
                if not helpers.command_exists("pyinstalive"):
                    logger.error("PyInstaLive must be properly installed when using the -b argument.")
                    logger.separator()
                else:
                    dlfuncs.iterate_users(pil.dl_batchusers)
コード例 #21
0
def start():
    if pil.args.downloadfollowing:
        if not pil.dl_lives:
            logger.binfo("Livestream downloading is disabled either with an argument or in the config file.")
            logger.separator()
        if not pil.dl_replays:
            logger.binfo("Replay downloading is disabled either with an argument or in the config file.")
            logger.separator()
        if not helpers.command_exists("pyinstalive"):
            logger.error("PyInstaLive must be properly installed when using the -df argument.")
            logger.separator()
        else:
            dlfuncs.download_following()
    else:
        if not helpers.download_folder_has_lockfile():
            helpers.create_lock_user()
            checking_self = pil.dl_user == pil.ig_api.authenticated_user_name
            if dlfuncs.get_broadcasts_info():
                if pil.dl_lives:
                    if checking_self:
                        logger.warn("Login with a different account to download your own livestreams.")
                    elif pil.livestream_obj:
                        logger.info("Livestream available, starting download.")
                        dlfuncs.download_livestream()
                    else:
                        logger.info('There are no available livestreams.')
                else:
                    logger.binfo("Livestream downloading is disabled either with an argument or in the config file.")

                logger.separator()

                if pil.dl_replays:
                    if pil.replays_obj:
                        logger.info(
                            '{:s} available, beginning download.'.format("Replays" if len(
                                pil.replays_obj) > 1 else "Replay"))
                        dlfuncs.download_replays()
                    else:
                        logger.info('There are no available replays{:s}.'.format(" saved on your account" if checking_self else ""))
                else:
                    logger.binfo("Replay downloading is disabled either with an argument or in the config file.")

            helpers.remove_lock()
            logger.separator()
        else:
            logger.warn("Lock file is already present for this user, there is probably another download ongoing.")
            logger.warn("If this is not the case, manually delete the file '{:s}' and try again.".format(
                pil.dl_user + '.lock'))
            logger.separator()
コード例 #22
0
def download_hashtag(target_user_hashtag):
    try:
        max_cursor = 0
        has_more = 0
        downloaded_total = 0
        checked_total = 0
        available_total = 0
        current_feed_page = 1
        hashtag_id = None
        target_user_hashtag = "#" + target_user_hashtag

        if not os.path.exists(os.path.join(ptts.dl_path, target_user_hashtag)):
            os.makedirs(os.path.join(ptts.dl_path, target_user_hashtag))

        download_path = os.path.join(ptts.dl_path, target_user_hashtag)
        while True:
            if ptts.args.recent:
                logger.separator()
                logger.binfo(
                    "Only checking the first 10 videos (--recent was passed).")
            if not has_more and not max_cursor:
                logger.separator()
                logger.info("Retrieving first feed page (page {:d})".format(
                    current_feed_page))
                logger.separator()
            if not hashtag_id:
                json_data = api.hashtag_search(text=ptts.tt_target_hashtag)
                hashtag_id = json_data.get("challenge_list")[0].get(
                    "challenge_info").get("cid")
                logger.info("Got hashtag Id: " + hashtag_id)
            json_data = api.hashtag_feed(hashtag_id=hashtag_id,
                                         cursor=max_cursor)
            open("hashtag.json", 'w').write(json.dumps(json_data))
            max_cursor = json_data.get('cursor')
            has_more = json_data.get('has_more')
            if not json_data.get("aweme_list", None):
                if checked_total:
                    logger.separator()
                    logger.info(
                        "End of feed reached. {:d} {:s} been downloaded.".
                        format(
                            downloaded_total, "video has"
                            if downloaded_total == 1 else "videos have"))
                elif not checked_total:
                    logger.info("There are no available videos to download.")
                logger.separator()
                break
            else:
                current_feed_page += 1
                available_total += len(json_data.get(
                    "aweme_list")) if not ptts.args.recent else 10
                for video in json_data.get("aweme_list"):
                    if ptts.args.recent and checked_total == 10:
                        if downloaded_total:
                            logger.separator()
                            logger.info(
                                "10 videos have been checked. {:d} {:s} been downloaded."
                                .format(
                                    downloaded_total, "video has" if
                                    downloaded_total == 1 else "videos have"))
                        else:
                            logger.info(
                                "10 videos have been checked. There are no available videos to download."
                            )
                        logger.separator()
                        return
                    else:
                        video_uri = video.get("video").get("play_addr").get(
                            "uri")
                        video_desc = video.get("desc")
                        filename = '{:d}_{:s}.mp4'.format(
                            video.get("create_time"),
                            video.get("author").get("unique_id"))
                        if video_uri.isdigit():
                            actual_video_uri = video.get("video").get(
                                "play_addr").get("url_list")[0]
                            if not os.path.isfile(
                                    os.path.join(download_path, filename)):

                                rr = requests.get(actual_video_uri,
                                                  verify=True)
                                if rr.status_code == 200:
                                    open(os.path.join(download_path, filename),
                                         'wb').write(rr.content)
                                    try:
                                        mp4_video_tags = MP4(
                                            os.path.join(
                                                download_path, filename))
                                        mp4_video_tags['\xa9cmt'] = video_desc
                                        mp4_video_tags.save()
                                    except Exception as e:
                                        pass
                                    logger.info(
                                        "({:d}/{:d}) - Downloaded video with Id: {}"
                                        .format(checked_total + 1,
                                                available_total, video_uri))
                                    downloaded_total += 1
                                else:
                                    logger.warn(
                                        "Response did not return status 200, was {:d} instead. Giving up and "
                                        "moving on.".format(rr.status_code))
                                    logger.warn(
                                        "The video Id was: {:s}".format(
                                            video_uri))
                            else:
                                logger.info(
                                    "({:d}/{:d}) - Already downloaded video with Id: {}"
                                    .format(checked_total + 1, available_total,
                                            video_uri))
                        else:
                            if not os.path.isfile(
                                    os.path.join(download_path, filename)):
                                rr = requests.get(
                                    Constants.VIDEO_BASE_URL.format(
                                        video_uri, 1),
                                    verify=True,
                                    headers=Constants.REQUESTS_VIDEO_UA)
                                if rr.status_code == 200:
                                    open(os.path.join(download_path, filename),
                                         'wb').write(rr.content)
                                    try:
                                        mp4_video_tags = MP4(
                                            os.path.join(
                                                download_path, filename))
                                        mp4_video_tags['\xa9cmt'] = video_desc
                                        mp4_video_tags.save()
                                    except Exception as e:
                                        pass
                                    logger.info(
                                        "({:d}/{:d}) - Downloaded video with Id: {}"
                                        .format(checked_total + 1,
                                                available_total, video_uri))
                                    downloaded_total += 1
                                else:
                                    logger.warn(
                                        "Response did not return status 200, was {:d} instead. Trying with "
                                        "lower bitrate.".format(
                                            rr.status_code))
                                    rr = requests.get(
                                        Constants.VIDEO_BASE_URL.format(
                                            video_uri, 0),
                                        verify=True,
                                        headers=Constants.REQUESTS_VIDEO_UA)
                                    if rr.status_code == 200:
                                        open(
                                            os.path.join(
                                                download_path, filename),
                                            'wb').write(rr.content)
                                        logger.info(
                                            "({:d}/{:d}) - Downloaded video with Id: {}"
                                            .format(checked_total + 1,
                                                    available_total,
                                                    video_uri))
                                        downloaded_total += 1
                                    else:
                                        logger.warn(
                                            "Response did not return status 200, was {:d} instead. Giving up "
                                            "and moving on.".format(
                                                rr.status_code))
                                        logger.warn(
                                            "The video Id was: {:s}".format(
                                                video_uri))
                            else:
                                logger.info(
                                    "({:d}/{:d}) - Already downloaded video with Id: {}"
                                    .format(checked_total + 1, available_total,
                                            video_uri))
                        checked_total += 1
                if has_more:
                    logger.separator()
                    logger.info("Retrieving next feed page (page {:d})".format(
                        current_feed_page))
                    logger.separator()
    except KeyboardInterrupt:
        logger.separator()
        logger.info("The download has been aborted.")
        logger.separator()
    except Exception as e:
        logger.separator()
        logger.error("Something went wrong: " + str(e))
        logger.separator()
コード例 #23
0
def run():
    ptts.initialize()
    logging.disable(logging.CRITICAL)
    config = configparser.ConfigParser()
    parser = argparse.ArgumentParser(
        description="You are running PyTikTokScraper {:s} using Python {:s}".
        format(Constants.SCRIPT_VER, Constants.PYTHON_VER))
    parser.add_argument(
        '-d',
        '--download',
        dest='download',
        type=str,
        required=False,
        help="The username (or uid) of the user whose posts you want to save.")
    parser.add_argument(
        '-ht',
        '--hashtag',
        dest='hashtag',
        type=str,
        required=False,
        help="The hashtag whose posts in the feed you want to save.")
    parser.add_argument(
        '-r',
        '--recent',
        dest='recent',
        action='store_true',
        help="When used, only retrieves the first 10 videos in the user's feed."
    )
    parser.add_argument(
        '-gf',
        '--get-following',
        dest='getfollowing',
        type=str,
        required=False,
        help="When used, retrieves the list of people you're following.")
    parser.add_argument(
        '-uid',
        '--is-uid',
        dest='isuid',
        action='store_true',
        help="When used, treat the download argument as the user ID.")
    parser.add_argument('-s',
                        '--single',
                        dest='single',
                        type=str,
                        required=False,
                        help="Pass a single video Id to download.")
    parser.add_argument(
        '-l',
        '--livestream',
        dest='livestream',
        type=str,
        required=False,
        help="Pass an username to download a livestream, if available.")
    args = parser.parse_args()

    if validate_inputs(config, args):
        api.login()
        if args.download or args.livestream and not args.isuid:
            try:
                target_user_json = api.search_user(ptts.tt_target_user)
                for user in target_user_json.get('user_list'):
                    if user.get('user_info').get(
                            'unique_id') == ptts.tt_target_user:
                        ptts.tt_target_id = user.get('user_info').get('uid')
                        response_user = api.get_user_info(ptts.tt_target_id)
                        ptts.tt_target_user_liveroomid = response_user.get(
                            'user').get('room_id') if response_user.get(
                                'user').get('room_id') > 0 else None
                        video_count = user.get('user_info').get('aweme_count')
                        logger.info(
                            "Found matching user profile with {:d} videos.".
                            format(video_count))
                        if args.download and video_count < 1:
                            logger.separator()
                            logger.binfo(
                                "This user has no available videos to download."
                            )
                            logger.separator()
                            sys.exit(0)
                if not ptts.tt_target_id:
                    raise IndexError
            except (IndexError, TypeError):
                logger.error(
                    "No user found matching '{:s}', trying tiktokapi.ga search."
                    .format(ptts.tt_target_user))
                logger.separator()
                try:
                    target_user_json = api.search_user_tta(ptts.tt_target_user)
                    if target_user_json:
                        for user in target_user_json.get('user_list'):
                            if user.get('user_info').get(
                                    'unique_id') == ptts.tt_target_user:
                                open("usersearch.json", "w").write(
                                    json.dumps(user.get("user_info")))
                                ptts.tt_target_id = user.get('user_info').get(
                                    'uid')
                                ptts.tt_target_user_liveroomid = user.get(
                                    'user_info'
                                ).get('room_id') if user.get('user_info').get(
                                    'room_id') > 0 else None
                                video_count = user.get('user_info').get(
                                    'aweme_count')
                                logger.info(
                                    "Found matching user profile with {:d} videos."
                                    .format(video_count))
                                if args.download and video_count < 1:
                                    logger.separator()
                                    logger.binfo(
                                        "This user has no available videos to download."
                                    )
                                    logger.separator()
                                    sys.exit(0)
                        if not ptts.tt_target_id:
                            raise IndexError
                    else:
                        raise IndexError
                except (IndexError, TypeError):
                    logger.error(
                        "No results on tiktokapi.ga either, the script will now exit."
                        .format(ptts.tt_target_user))
                    logger.separator()
                    sys.exit(0)
        elif args.download and args.isuid:
            ptts.tt_target_id = args.download
            try:
                int(ptts.tt_target_id)
            except ValueError:
                logger.error(
                    "The user ID '{}' is not a valid value. Exiting.".format(
                        ptts.tt_target_id))
                logger.separator()
                sys.exit(1)
        elif args.livestream and args.isuid:
            ptts.tt_target_id = args.livestream
            try:
                int(ptts.tt_target_id)
            except ValueError:
                logger.error(
                    "The user ID '{}' is not a valid value. Exiting.".format(
                        ptts.tt_target_id))
                logger.separator()
                sys.exit(1)
            response_user = api.get_user_info(ptts.tt_target_id)
            ptts.tt_target_user_liveroomid = response_user.get('user').get(
                'room_id'
            ) if response_user.get('user').get('room_id') > 0 else None
        if ptts.tt_target_id:
            logger.info("Retrieved user ID: {:s}".format(ptts.tt_target_id))
            logger.separator()
        if args.getfollowing:
            logger.info("Retrieving list of following users...")
            logger.warn(
                "Pagination does not work properly, use this at own risk!")
            logger.separator()
            json_resp = api.get_following(ptts.tt_target_id)
            following_txt = os.path.join(
                os.getcwd(), "following_{:s}.txt".format(ptts.tt_target_user))
            if os.path.isfile(following_txt):
                os.remove(following_txt)
            for user in json_resp.get('followings'):
                user_text = user.get('unique_id') + " - " + user.get('uid')
                logger.plain(user_text)
                open(following_txt, 'a').write(user_text + '\n')
            logger.separator()
            logger.info("Written {:d} users to {:s}".format(
                len(json_resp.get('followings')), following_txt))
            logger.separator()
        if ptts.args.download:
            logger.info("Starting download of all videos from profile.")
            downloader.download_all(ptts.tt_target_id)
        if ptts.args.hashtag:
            logger.info(
                "Starting download of all posts from hashtag '{:s}'.".format(
                    ptts.tt_target_hashtag))
            downloader.download_hashtag(ptts.tt_target_hashtag)
        if ptts.args.livestream:
            if ptts.tt_target_user_liveroomid:
                logger.info("Starting download for livestream.")
                downloader.download_live(ptts.tt_target_user_liveroomid)
            else:
                logger.warn("There currently no ongoing livestream available.")
                logger.separator()
コード例 #24
0
def clean_download_dir():
    dir_delcount = 0
    file_delcount = 0
    error_count = 0
    lock_count = 0
    try:
        logger.info('Cleaning up temporary files and folders.')
        if Constants.PYTHON_VER[0] == "2":
            directories = (os.walk(pil.dl_path).next()[1])
            files = (os.walk(pil.dl_path).next()[2])
        else:
            directories = (os.walk(pil.dl_path).__next__()[1])
            files = (os.walk(pil.dl_path).__next__()[2])

        for directory in directories:
            if directory.endswith('_downloads'):
                if not any(
                        filename.endswith('.lock') for filename in os.listdir(
                            os.path.join(pil.dl_path, directory))):
                    try:
                        shutil.rmtree(os.path.join(pil.dl_path, directory))
                        dir_delcount += 1
                    except Exception as e:
                        logger.error("Could not remove folder: {:s}".format(
                            str(e)))
                        error_count += 1
                else:
                    lock_count += 1
        logger.separator()
        for file in files:
            if file.endswith('_downloads.json'):
                if not any(
                        filename.endswith('.lock')
                        for filename in os.listdir(os.path.join(pil.dl_path))):
                    try:
                        os.remove(os.path.join(pil.dl_path, file))
                        file_delcount += 1
                    except Exception as e:
                        logger.error("Could not remove file: {:s}".format(
                            str(e)))
                        error_count += 1
                else:
                    lock_count += 1
        if dir_delcount == 0 and file_delcount == 0 and error_count == 0 and lock_count == 0:
            logger.info('The cleanup has finished. No items were removed.')
            logger.separator()
            return
        logger.info('The cleanup has finished.')
        logger.info('Folders removed:     {:d}'.format(dir_delcount))
        logger.info('Files removed:       {:d}'.format(file_delcount))
        logger.info('Locked items:        {:d}'.format(lock_count))
        logger.info('Errors:              {:d}'.format(error_count))
        logger.separator()
    except KeyboardInterrupt as e:
        logger.separator()
        logger.warn("The cleanup has been aborted.")
        if dir_delcount == 0 and file_delcount == 0 and error_count == 0 and lock_count == 0:
            logger.info('No items were removed.')
            logger.separator()
            return
        logger.info('Folders removed:     {:d}'.format(dir_delcount))
        logger.info('Files removed:       {:d}'.format(file_delcount))
        logger.info('Locked items  :      {:d}'.format(lock_count))
        logger.info('Errors:              {:d}'.format(error_count))
        logger.separator()
コード例 #25
0
ファイル: comments.py プロジェクト: rawr-z/PyInstaLive
    def generate_log(comments={}, download_start_time=0, log_file="", comments_delay=10.0, gen_from_arg=False):
        try:
            if gen_from_arg:
                with open(pil.gencomments_arg, 'r') as comments_json:
                    comments = json.load(comments_json).get("comments", None)
                if comments:
                    log_file = os.path.join(
                        pil.dl_path, os.path.basename(pil.gencomments_arg.replace(".json", ".log")))
                    logger.info("Generating comments file from input...")
                else:
                    logger.warn(
                        "The input file does not contain any comments.")
                    logger.separator()
                    return None
            python_version = sys.version.split(' ')[0]
            comments_timeline = {}
            wide_build = sys.maxunicode > 65536
            for c in comments:
                if 'offset' in c:
                    for k in list(c.get('comment')):
                        c[k] = c.get('comment', {}).get(k)
                    c['created_at_utc'] = download_start_time + c.get('offset')
                created_at_utc = str(2 * (c.get('created_at_utc') // 2))
                comment_list = comments_timeline.get(created_at_utc) or []
                comment_list.append(c)
                comments_timeline[created_at_utc] = comment_list

            if comments_timeline:
                comment_errors = 0
                total_comments = 0
                timestamps = sorted(list(comments_timeline))
                subs = []
                for tc in timestamps:
                    t = comments_timeline[tc]
                    clip_start = int(tc) - int(download_start_time) + \
                        int(comments_delay)
                    if clip_start < 0:
                        clip_start = 0

                    comments_log = ''
                    for c in t:
                        try:
                            if python_version.startswith('3'):
                                if c.get('user', {}).get('is_verified'):
                                    comments_log += '{}{}\n\n'.format(time.strftime('%H:%M:%S\n', time.gmtime(clip_start)),
                                                                      '{} {}: {}'.format(c.get('user', {}).get('username'),
                                                                                         "(v)", c.get('text')))
                                else:
                                    comments_log += '{}{}\n\n'.format(time.strftime('%H:%M:%S\n', time.gmtime(clip_start)),
                                                                      '{}: {}'.format(c.get('user', {}).get('username'),
                                                                                      c.get('text')))
                            else:
                                if not wide_build:
                                    if c.get('user', {}).get('is_verified'):
                                        comments_log += '{}{}\n\n'.format(
                                            time.strftime(
                                                '%H:%M:%S\n', time.gmtime(clip_start)),
                                            '{} {}: {}'.format(c.get('user', {}).get('username'), "(v)",
                                                               c.get('text').encode('ascii', 'ignore')))
                                    else:
                                        comments_log += '{}{}\n\n'.format(
                                            time.strftime(
                                                '%H:%M:%S\n', time.gmtime(clip_start)),
                                            '{}: {}'.format(c.get('user', {}).get('username'),
                                                            c.get('text').encode('ascii', 'ignore')))
                                else:
                                    if c.get('user', {}).get('is_verified'):
                                        comments_log += '{}{}\n\n'.format(
                                            time.strftime(
                                                '%H:%M:%S\n', time.gmtime(clip_start)),
                                            '{} {}: {}'.format(c.get('user', {}).get('username'), "(v)", c.get('text')))
                                    else:
                                        comments_log += '{}{}\n\n'.format(
                                            time.strftime(
                                                '%H:%M:%S\n', time.gmtime(clip_start)),
                                            '{}: {}'.format(c.get('user', {}).get('username'), c.get('text')))
                        except Exception:
                            comment_errors += 1
                            try:
                                if c.get('user', {}).get('is_verified'):
                                    comments_log += '{}{}\n\n'.format(time.strftime('%H:%M:%S\n', time.gmtime(clip_start)),
                                                                      '{} {}: {}'.format(c.get('user', {}).get('username'),
                                                                                         "(v)",
                                                                                         c.get('text').encode('ascii',
                                                                                                              'ignore')))
                                else:
                                    comments_log += '{}{}\n\n'.format(time.strftime('%H:%M:%S\n', time.gmtime(clip_start)),
                                                                      '{}: {}'.format(c.get('user', {}).get('username'),
                                                                                      c.get('text').encode('ascii',
                                                                                                           'ignore')))
                            except Exception:
                                pass
                        total_comments += 1
                    subs.append(comments_log)

                with codecs.open(log_file, 'w', 'utf-8-sig') as log_outfile:
                    if python_version.startswith('2') and not wide_build:
                        log_outfile.write(
                            'This log was generated using Python {:s} without wide unicode support. This means characters '
                            'such as emotes are not saved.\nUser comments without any text usually are comments that only '
                            'had emotes.\nBuild Python 2 with the --enable-unicode=ucs4 argument or use Python 3 for full '
                            'unicode support.\n\n'.format(
                                python_version) + ''.join(subs))
                    else:
                        log_outfile.write(''.join(subs))
                if gen_from_arg:
                    if comment_errors:
                        logger.warn(
                            "Successfully saved {:s} comments but {:s} comments are (partially) missing.".format(
                                str(total_comments), str(comment_errors)))
                    else:
                        logger.info("Successfully saved {:s} comments.".format(
                            str(total_comments)))
                    logger.separator()
                return comment_errors, total_comments
        except Exception as e:
            logger.error(
                "An error occurred while saving comments: {:s}".format(str(e)))
            logger.separator()
コード例 #26
0
ファイル: auth.py プロジェクト: wivb0/PyInstaLive
def authenticate(username, password, force_use_login_args=False):
    ig_api = None
    try:
        if force_use_login_args:
            logger.binfo("Overriding configuration file login with -u and -p arguments.")
            logger.separator()
        cookie_file = "{}.json".format(username)
        if not os.path.isfile(cookie_file):
            # settings file does not exist
            logger.warn('Unable to find cookie file: {0!s}'.format(cookie_file))
            logger.info('Creating a new one.')

            # login new
            ig_api = Client(
                username, password,
                on_login=lambda x: onlogin_callback(x, cookie_file))
        else:
            with open(cookie_file) as file_data:
                cached_settings = json.load(file_data, object_hook=from_json)
            # logger.info('Using settings file: {0!s}'.format(cookie_file))

            device_id = cached_settings.get('device_id')
            # reuse auth cached_settings
            try:
                ig_api = Client(
                    username, password,
                    settings=cached_settings)

            except ClientCookieExpiredError as e:
                logger.warn('The current cookie file has expired, creating a new one.')

                ig_api = Client(
                    username, password,
                    device_id=device_id,
                    on_login=lambda x: onlogin_callback(x, cookie_file))

    except (ClientLoginError, ClientError) as e:
        logger.separator()
        logger.error('Could not login: {:s}'.format(
            json.loads(e.error_response).get("error_title", "Error title not available.")))
        logger.error('{:s}'.format(json.loads(e.error_response).get("message", "Not available")))
        # logger.error('{:s}'.format(e.error_response))
        logger.separator()
    except Exception as e:
        if str(e).startswith("unsupported pickle protocol"):
            logger.warn("This cookie file is not compatible with Python {}.".format(sys.version.split(' ')[0][0]))
            logger.warn("Please delete your cookie file '{}.json' and try again.".format(username))
        else:
            logger.separator()
            logger.error('Unexpected exception: {:s}'.format(e))
        logger.separator()
    except KeyboardInterrupt:
        logger.separator()
        logger.warn("The user authentication has been aborted.")
        logger.separator()

    if ig_api:
        logger.info('Successfully logged into account: {:s}'.format(str(ig_api.authenticated_user_name)))
        if pil.show_cookie_expiry and not force_use_login_args:
            try:
                cookie_expiry = ig_api.cookie_jar.auth_expires
                logger.info('Cookie file expiry date: {:s}'.format(
                    datetime.datetime.fromtimestamp(cookie_expiry).strftime('%Y-%m-%d at %I:%M:%S %p')))
            except AttributeError as e:
                logger.warn('An error occurred while getting the cookie file expiry date: {:s}'.format(str(e)))

        logger.separator()
        return ig_api
    else:
        return None
コード例 #27
0
ファイル: dlfuncs.py プロジェクト: tinymoss/PyInstaLive
def download_replays():
    try:
        try:
            logger.info('Amount of replays    : {:s}'.format(
                str(len(pil.replays_obj))))
            for replay_index, replay_obj in enumerate(pil.replays_obj):
                bc_dash_manifest = parseString(replay_obj.get(
                    'dash_manifest')).getElementsByTagName('Period')
                bc_duration_raw = bc_dash_manifest[0].getAttribute("duration")
                bc_minutes = (bc_duration_raw.split("H"))[1].split("M")[0]
                bc_seconds = ((
                    bc_duration_raw.split("M"))[1].split("S")[0]).split('.')[0]
                logger.info(
                    'Replay {:s} duration    : {:s} minutes and {:s} seconds'.
                    format(str(replay_index + 1), bc_minutes, bc_seconds))
        except Exception as e:
            logger.warn(
                "An error occurred while getting replay duration information: {:s}"
                .format(str(e)))
        logger.separator()
        logger.info("Downloading replays, press [CTRL+C] to abort.")
        logger.separator()
        for replay_index, replay_obj in enumerate(pil.replays_obj):
            exists = False
            pil.livestream_obj = replay_obj
            if Constants.PYTHON_VER[0][0] == '2':
                directories = (os.walk(pil.dl_path).next()[1])
            else:
                directories = (os.walk(pil.dl_path).__next__()[1])

            for directory in directories:
                if (str(replay_obj.get('id'))
                        in directory) and ("_live_" not in directory):
                    logger.binfo(
                        "Already downloaded a replay with ID '{:s}'.".format(
                            str(replay_obj.get('id'))))
                    exists = True
            if not exists:
                current = replay_index + 1
                logger.info(
                    "Downloading replay {:s} of {:s} with ID '{:s}'.".format(
                        str(current), str(len(pil.replays_obj)),
                        str(replay_obj.get('id'))))
                pil.live_folder_path = '{}{}_{}_{}_{}_replay_downloads'.format(
                    pil.dl_path, pil.datetime_compat, pil.dl_user,
                    pil.livestream_obj.get('id'), pil.epochtime)
                broadcast_downloader = replay.Downloader(
                    mpd=replay_obj.get('dash_manifest'),
                    output_dir=pil.live_folder_path,
                    user_agent=pil.ig_api.user_agent,
                    ffmpeg_binary=pil.ffmpeg_path)
                if pil.use_locks:
                    helpers.create_lock_folder()
                replay_mp4_file = '{}{}_{}_{}_{}_replay.mp4'.format(
                    pil.dl_path, pil.datetime_compat, pil.dl_user,
                    pil.livestream_obj.get('id'), pil.epochtime)

                comments_json_file = '{}{}_{}_{}_{}_replay_comments.json'.format(
                    pil.dl_path, pil.datetime_compat, pil.dl_user,
                    pil.livestream_obj.get('id'), pil.epochtime)

                pil.comment_thread_worker = threading.Thread(
                    target=get_replay_comments, args=(comments_json_file, ))

                broadcast_downloader.download(
                    replay_mp4_file, cleartempfiles=pil.clear_temp_files)
                if pil.clear_temp_files:
                    helpers.remove_temp_folder()
                if pil.dl_comments:
                    logger.info("Downloading replay comments.")
                    try:
                        get_replay_comments(comments_json_file)
                    except Exception as e:
                        logger.error(
                            'An error occurred while downloading comments: {:s}'
                            .format(str(e)))

                logger.info("Finished downloading replay {:s} of {:s}.".format(
                    str(current), str(len(pil.replays_obj))))
                helpers.remove_lock()

                if current != len(pil.replays_obj):
                    logger.separator()

        logger.separator()
        logger.info("Finished downloading all available replays.")
        helpers.remove_lock()
    except Exception as e:
        logger.error('Could not save replay: {:s}'.format(str(e)))
        helpers.remove_lock()
    except KeyboardInterrupt:
        logger.separator()
        logger.binfo('The download has been aborted by the user, exiting.')
        logger.separator()
        helpers.remove_temp_folder()
        helpers.remove_lock()
コード例 #28
0
ファイル: dlfuncs.py プロジェクト: tinymoss/PyInstaLive
def download_livestream():
    try:

        def print_status(sep=True):
            if pil.do_heartbeat:
                heartbeat_info = pil.ig_api.broadcast_heartbeat_and_viewercount(
                    pil.livestream_obj.get('id'))
            viewers = pil.livestream_obj.get('viewer_count', 0)
            if sep:
                logger.separator()
            else:
                logger.info('Username    : {:s}'.format(pil.dl_user))
            logger.info('Viewers     : {:s} watching'.format(str(
                int(viewers))))
            logger.info('Airing time : {:s}'.format(get_stream_duration(0)))
            if pil.do_heartbeat:
                logger.info('Status      : {:s}'.format(
                    heartbeat_info.get('broadcast_status').title()))
                return heartbeat_info.get('broadcast_status') not in [
                    'active', 'interrupted'
                ]
            else:
                return None

        mpd_url = (pil.livestream_obj.get('dash_manifest')
                   or pil.livestream_obj.get('dash_abr_playback_url')
                   or pil.livestream_obj.get('dash_playback_url'))

        pil.live_folder_path = '{}{}_{}_{}_{}_live_downloads'.format(
            pil.dl_path, pil.datetime_compat, pil.dl_user,
            pil.livestream_obj.get('id'), pil.epochtime)
        pil.broadcast_downloader = live.Downloader(
            mpd=mpd_url,
            output_dir=pil.live_folder_path,
            user_agent=pil.ig_api.user_agent,
            max_connection_error_retry=3,
            duplicate_etag_retry=30,
            callback_check=print_status,
            mpd_download_timeout=3,
            download_timeout=3,
            ffmpeg_binary=pil.ffmpeg_path)
    except Exception as e:
        logger.error('Could not start downloading livestream: {:s}'.format(
            str(e)))
        logger.separator()
        helpers.remove_lock()
    try:
        broadcast_owner = pil.livestream_obj.get('broadcast_owner',
                                                 {}).get('username')
        try:
            broadcast_guest = pil.livestream_obj.get('cobroadcasters',
                                                     {})[0].get('username')
        except Exception:
            broadcast_guest = None
        if broadcast_owner != pil.dl_user:
            logger.binfo(
                'This livestream is a dual-live, the owner is "{}".'.format(
                    broadcast_owner))
            broadcast_guest = None
        if broadcast_guest:
            logger.binfo(
                'This livestream is a dual-live, the current guest is "{}".'.
                format(broadcast_guest))
            pil.has_guest = broadcast_guest
        logger.separator()
        print_status(False)
        logger.separator()
        helpers.create_lock_folder()
        pil.segments_json_thread_worker = threading.Thread(
            target=helpers.generate_json_segments)
        pil.segments_json_thread_worker.start()
        logger.info('Downloading livestream, press [CTRL+C] to abort.')

        if pil.run_at_start:
            try:
                thread = threading.Thread(target=helpers.run_command,
                                          args=(pil.run_at_start, ))
                thread.daemon = True
                thread.start()
                logger.binfo("Launched start command: {:s}".format(
                    pil.run_at_start))
            except Exception as e:
                logger.warn('Could not launch command: {:s}'.format(str(e)))

        if pil.dl_comments:
            try:
                comments_json_file = '{}{}_{}_{}_{}_live_comments.json'.format(
                    pil.dl_path, pil.datetime_compat, pil.dl_user,
                    pil.livestream_obj.get('id'), pil.epochtime)
                pil.comment_thread_worker = threading.Thread(
                    target=get_live_comments, args=(comments_json_file, ))
                pil.comment_thread_worker.start()
            except Exception as e:
                logger.error(
                    'An error occurred while downloading comments: {:s}'.
                    format(str(e)))
        pil.broadcast_downloader.run()
        logger.separator()
        logger.info("The livestream has been ended by the user.")
        logger.separator()
        logger.info('Airtime duration  : {}'.format(get_stream_duration(0)))
        logger.info('Download duration : {}'.format(get_stream_duration(1)))
        logger.info('Missing (approx.) : {}'.format(get_stream_duration(2)))
        logger.separator()
        merge_segments()
    except KeyboardInterrupt:
        logger.separator()
        logger.binfo('The download has been aborted.')
        logger.separator()
        logger.info('Airtime duration  : {}'.format(get_stream_duration(0)))
        logger.info('Download duration : {}'.format(get_stream_duration(1)))
        logger.info('Missing (approx.) : {}'.format(get_stream_duration(2)))
        logger.separator()
        if not pil.broadcast_downloader.is_aborted:
            pil.broadcast_downloader.stop()
            merge_segments()