def iterate_users(user_list): for user in user_list: try: if os.path.isfile(os.path.join(pil.dl_path, user + '.lock')): logger.warn( "Lock file is already present for '{:s}', there is probably another download " "ongoing!".format(user)) logger.warn( "If this is not the case, manually delete the file '{:s}' and try again." .format(user + '.lock')) else: logger.info( "Launching daemon process for '{:s}'.".format(user)) start_result = helpers.run_command( "pyinstalive -d {:s} -cp '{:s}' -dp '{:s}' {:s} {:s} {:s}". format(user, pil.config_path, pil.dl_path, '--no-lives' if not pil.dl_lives else '', '--no-replays' if not pil.dl_replays else '', '--no-heartbeat' if not pil.do_heartbeat else '')) if start_result: logger.warn("Could not start process: {:s}".format( str(start_result))) else: logger.info("Process started successfully.") logger.separator() time.sleep(2) except Exception as e: logger.warn("Could not start process: {:s}".format(str(e))) except KeyboardInterrupt: logger.binfo('The process launching has been aborted by the user.') logger.separator() break
def get_replay_comments(comments_json_file): try: comments_downloader = CommentsDownloader(destination_file=comments_json_file) comments_downloader.get_replay() try: if comments_downloader.comments: comments_log_file = comments_json_file.replace('.json', '.log') comment_errors, total_comments = CommentsDownloader.generate_log( comments_downloader.comments, pil.livestream_obj.get('published_time'), comments_log_file, comments_delay=0) if total_comments == 1: logger.info("Successfully saved 1 comment to logfile.") os.remove(comments_json_file) logger.separator() return True else: if comment_errors: logger.warn( "Successfully saved {:s} comments but {:s} comments are (partially) missing.".format( str(total_comments), str(comment_errors))) else: logger.info("Successfully saved {:s} comments.".format(str(total_comments))) os.remove(comments_json_file) logger.separator() return True else: logger.info("There are no available comments to save.") return False except Exception as e: logger.error('Could not save comments to logfile: {:s}'.format(str(e))) return False except KeyboardInterrupt as e: logger.binfo("Downloading replay comments has been aborted.") return False
def get_broadcasts_info(): try: user_id = get_user_id() if user_id: broadcasts = pil.ig_api.user_story_feed(user_id) pil.livestream_obj = broadcasts.get('broadcast') pil.replays_obj = broadcasts.get('post_live_item', {}).get('broadcasts', []) return True else: return False except Exception as e: logger.error('Could not finish checking: {:s}'.format(str(e))) if "timed out" in str(e): logger.error( 'The connection timed out, check your internet connection.') if "login_required" in str(e): logger.error( 'Login cookie was loaded but user is not actually logged in. Delete the cookie file and try ' 'again.') return False except KeyboardInterrupt: logger.binfo( 'Aborted checking for livestreams and replays, exiting.'.format( pil.dl_user)) return False except ClientThrottledError as cte: logger.error( 'Could not check because you are making too many requests at this time.' ) return False
def download_single(video_id): try: download_path = os.path.join(ptts.dl_path, video_id + ".mp4") if not os.path.isfile(download_path): rr = requests.get(Constants.VIDEO_BASE_URL.format(video_id, 1), verify=True, headers=Constants.REQUESTS_VIDEO_UA) if rr.status_code == 200: open(download_path, 'wb').write(rr.content) logger.info("Downloaded video with Id: {}".format(video_id)) else: logger.warn( "Response did not return status 200, was {:d} instead. Trying with lower " "bitrate.".format(rr.status_code)) rr = requests.get(Constants.VIDEO_BASE_URL.format(video_id, 0), verify=True, headers=Constants.REQUESTS_VIDEO_UA) if rr.status_code == 200: open(download_path, 'wb').write(rr.content) else: logger.warn( "Response did not return status 200, was {:d} instead. Giving up." .format(rr.status_code)) logger.separator() else: logger.binfo("This video already exists.") logger.separator() except KeyboardInterrupt: logger.separator() logger.info("The download has been aborted.") logger.separator() except Exception as e: logger.separator() logger.error("Something went wrong: " + str(e)) logger.separator()
def get_live_comments(comments_json_file): try: comments_downloader = CommentsDownloader( destination_file=comments_json_file) first_comment_created_at = 0 try: while not pil.broadcast_downloader.is_aborted: if 'initial_buffered_duration' not in pil.livestream_obj and pil.broadcast_downloader.initial_buffered_duration: pil.livestream_obj[ 'initial_buffered_duration'] = pil.broadcast_downloader.initial_buffered_duration comments_downloader.broadcast = pil.livestream_obj first_comment_created_at = comments_downloader.get_live( first_comment_created_at) except ClientError as e: if not 'media has been deleted' in e.error_response: logger.warn("Comment collection ClientError: %d %s" % (e.code, e.error_response)) try: if comments_downloader.comments: comments_downloader.save() comments_log_file = comments_json_file.replace('.json', '.log') comment_errors, total_comments = CommentsDownloader.generate_log( comments_downloader.comments, pil.epochtime, comments_log_file, comments_delay=pil.broadcast_downloader. initial_buffered_duration) if len(comments_downloader.comments) == 1: logger.info("Successfully saved 1 comment.") os.remove(comments_json_file) logger.separator() return True else: if comment_errors: logger.warn( "Successfully saved {:s} comments but {:s} comments are (partially) missing." .format(str(total_comments), str(comment_errors))) else: logger.info("Successfully saved {:s} comments.".format( str(total_comments))) os.remove(comments_json_file) logger.separator() return True else: logger.info("There are no available comments to save.") logger.separator() return False except Exception as e: logger.error('Could not save comments: {:s}'.format(str(e))) return False except KeyboardInterrupt as e: logger.binfo("Downloading livestream comments has been aborted.") return False
def merge_segments(): try: if pil.run_at_finish: try: thread = threading.Thread(target=helpers.run_command, args=(pil.run_at_finish,)) thread.daemon = True thread.start() logger.binfo("Launched finish command: {:s}".format(pil.run_at_finish)) except Exception as e: logger.warn('Could not execute command: {:s}'.format(str(e))) live_mp4_file = '{}{}_{}_{}_{}_live.mp4'.format(pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime) live_segments_path = os.path.normpath(pil.broadcast_downloader.output_dir) if pil.segments_json_thread_worker and pil.segments_json_thread_worker.is_alive(): pil.kill_segment_thread = True pil.segments_json_thread_worker.join() if pil.comment_thread_worker and pil.comment_thread_worker.is_alive(): logger.info("Waiting for comment downloader to finish.") pil.comment_thread_worker.join() try: if not pil.skip_merge: logger.info('Merging downloaded files into video.') pil.broadcast_downloader.stitch(live_mp4_file, cleartempfiles=pil.clear_temp_files) logger.info('Successfully merged downloaded files into video.') else: logger.binfo("Merging of downloaded files has been disabled.") logger.binfo("Use --assemble command to manually merge downloaded segments.") if pil.clear_temp_files: helpers.remove_temp_folder() helpers.remove_lock() except ValueError as e: logger.separator() logger.error('Could not merge downloaded files: {:s}'.format(str(e))) if os.listdir(live_segments_path): logger.separator() logger.binfo("Segment directory is not empty. Trying to merge again.") logger.separator() pil.assemble_arg = live_mp4_file.replace(".mp4", "_downloads.json") assembler.assemble(user_called=False) else: logger.separator() logger.error("Segment directory is empty. There is nothing to merge.") logger.separator() helpers.remove_lock() except Exception as e: logger.error('Could not merge downloaded files: {:s}'.format(str(e))) helpers.remove_lock() except KeyboardInterrupt: logger.binfo('Aborted merging process, no video was created.') helpers.remove_lock()
def check_if_guesting(): try: broadcast_guest = pil.livestream_obj.get('cobroadcasters', {})[0].get('username') except Exception: broadcast_guest = None print(broadcast_guest) if broadcast_guest and not pil.has_guest: logger.binfo('The livestream owner has started guesting "{}".'.format(broadcast_guest)) pil.has_guest = broadcast_guest if not broadcast_guest and pil.has_guest: logger.binfo('The livestream owner has stopped guesting "{}".'.format(broadcast_guest)) pil.has_guest = None
def get_user_id(): is_user_id = False user_id = None try: user_id = int(pil.dl_user) is_user_id = True except ValueError: try: user_res = pil.ig_api.username_info(pil.dl_user) if pil.verbose: logger.plain(json.dumps(user_res)) user_id = user_res.get('user', {}).get('pk') except ClientConnectionError as cce: logger.error( "Could not get user info for '{:s}': {:d} {:s}".format( pil.dl_user, cce.code, str(cce))) if "getaddrinfo failed" in str(cce): logger.error( 'Could not resolve host, check your internet connection.') if "timed out" in str(cce): logger.error( 'The connection timed out, check your internet connection.' ) except ClientThrottledError as cte: logger.error( "Could not get user info for '{:s}': {:d} {:s}".format( pil.dl_user, cte.code, str(cte))) except ClientError as ce: logger.error( "Could not get user info for '{:s}': {:d} {:s}".format( pil.dl_user, ce.code, str(ce))) if "Not Found" in str(ce): logger.error('The specified user does not exist.') except Exception as e: logger.error("Could not get user info for '{:s}': {:s}".format( pil.dl_user, str(e))) except KeyboardInterrupt: logger.binfo( "Aborted getting user info for '{:s}', exiting.".format( pil.dl_user)) if user_id and is_user_id: logger.info( "Getting info for '{:s}' successful. Assuming input is an user Id." .format(pil.dl_user)) logger.separator() return user_id elif user_id: logger.info("Getting info for '{:s}' successful.".format(pil.dl_user)) logger.separator() return user_id else: return None
def download_following(): try: is_checking = '' if pil.dl_lives and pil.dl_replays: is_checking = 'livestreams or replays' elif pil.dl_lives and not pil.dl_replays: is_checking = 'livestreams' elif not pil.dl_lives and pil.dl_replays: is_checking = 'replays' logger.info( "Checking following users for any {:s}.".format(is_checking)) broadcast_f_list = pil.ig_api.reels_tray() if pil.verbose: logger.plain(json.dumps(broadcast_f_list)) usernames_available_livestreams = [] usernames_available_replays = [] if broadcast_f_list['broadcasts'] and pil.dl_lives: for broadcast_f in broadcast_f_list['broadcasts']: username = broadcast_f['broadcast_owner']['username'] if username not in usernames_available_livestreams: usernames_available_livestreams.append(username) if broadcast_f_list.get('post_live', {}).get('post_live_items', []) and pil.dl_replays: for broadcast_r in broadcast_f_list.get('post_live', {}).get( 'post_live_items', []): for broadcast_f in broadcast_r.get("broadcasts", []): username = broadcast_f['broadcast_owner']['username'] if username not in usernames_available_replays: usernames_available_replays.append(username) logger.separator() available_total = list(usernames_available_livestreams) available_total.extend(x for x in usernames_available_replays if x not in available_total) if available_total: logger.info( "The following users have available {:s}.".format(is_checking)) logger.info(', '.join(available_total)) logger.separator() iterate_users(available_total) else: logger.info( "There are currently no available {:s}.".format(is_checking)) logger.separator() except Exception as e: logger.error("Could not finish checking following users: {:s}".format( str(e))) except KeyboardInterrupt: logger.separator() logger.binfo('The checking process has been aborted by the user.') logger.separator()
def start(): if pil.args.downloadfollowing: if not pil.dl_lives: logger.binfo("Livestream downloading is disabled either with an argument or in the config file.") logger.separator() if not pil.dl_replays: logger.binfo("Replay downloading is disabled either with an argument or in the config file.") logger.separator() if not helpers.command_exists("pyinstalive"): logger.error("PyInstaLive must be properly installed when using the -df argument.") logger.separator() else: dlfuncs.download_following() else: if not helpers.download_folder_has_lockfile(): helpers.create_lock_user() checking_self = pil.dl_user == pil.ig_api.authenticated_user_name if dlfuncs.get_broadcasts_info(): if pil.dl_lives: if checking_self: logger.warn("Login with a different account to download your own livestreams.") elif pil.livestream_obj: logger.info("Livestream available, starting download.") dlfuncs.download_livestream() else: logger.info('There are no available livestreams.') else: logger.binfo("Livestream downloading is disabled either with an argument or in the config file.") logger.separator() if pil.dl_replays: if pil.replays_obj: logger.info( '{:s} available, beginning download.'.format("Replays" if len( pil.replays_obj) > 1 else "Replay")) dlfuncs.download_replays() else: logger.info('There are no available replays{:s}.'.format(" saved on your account" if checking_self else "")) else: logger.binfo("Replay downloading is disabled either with an argument or in the config file.") helpers.remove_lock() logger.separator() else: logger.warn("Lock file is already present for this user, there is probably another download ongoing.") logger.warn("If this is not the case, manually delete the file '{:s}' and try again.".format( pil.dl_user + '.lock')) logger.separator()
def validate_inputs(config, args, unknown_args): error_arr = [] banner_shown = False try: if args.configpath: if os.path.isfile(args.configpath): pil.config_path = args.configpath else: logger.banner() banner_shown = True logger.warn("Custom config path is invalid, falling back to default path: {:s}".format(pil.config_path)) pil.config_path = os.path.join(os.getcwd(), "pyinstalive.ini") logger.separator() if not os.path.isfile(pil.config_path): # Create new config if it doesn't exist if not banner_shown: logger.banner() helpers.new_config() return False pil.config_path = os.path.realpath(pil.config_path) config.read(pil.config_path) if args.download: pil.dl_user = args.download if args.downloadfollowing or args.batchfile: logger.banner() logger.warn("Please use only one download method. Use -h for more information.") logger.separator() return False elif not args.clean and not args.info and not args.assemble and not args.downloadfollowing and not args.batchfile and not args.organize: logger.banner() logger.error("Please use a download method. Use -h for more information.") logger.separator() return False if helpers.bool_str_parse(config.get('pyinstalive', 'log_to_file')) == "Invalid": pil.log_to_file = True error_arr.append(['log_to_file', 'True']) elif helpers.bool_str_parse(config.get('pyinstalive', 'log_to_file')): pil.log_to_file = True else: pil.log_to_file = False logger.banner() if args.batchfile: if os.path.isfile(args.batchfile): pil.dl_batchusers = [user.rstrip('\n') for user in open(args.batchfile)] if not pil.dl_batchusers: logger.error("The specified file is empty.") logger.separator() return False else: logger.info("Downloading {:d} users from batch file.".format(len(pil.dl_batchusers))) logger.separator() else: logger.error('The specified file does not exist.') logger.separator() return False if unknown_args: pil.uargs = unknown_args logger.warn("The following unknown argument(s) were provided and will be ignored: ") logger.warn(' ' + ' '.join(unknown_args)) logger.separator() pil.ig_user = config.get('pyinstalive', 'username') pil.ig_pass = config.get('pyinstalive', 'password') pil.dl_path = config.get('pyinstalive', 'download_path') pil.run_at_start = config.get('pyinstalive', 'run_at_start') pil.run_at_finish = config.get('pyinstalive', 'run_at_finish') pil.ffmpeg_path = config.get('pyinstalive', 'ffmpeg_path') pil.verbose = config.get('pyinstalive', 'verbose') pil.skip_merge = config.get('pyinstalive', 'skip_merge') pil.args = args pil.config = config pil.proxy = config.get('pyinstalive', 'proxy') if args.dlpath: pil.dl_path = args.dlpath if helpers.bool_str_parse(config.get('pyinstalive', 'show_cookie_expiry')) == "Invalid": pil.show_cookie_expiry = False error_arr.append(['show_cookie_expiry', 'False']) elif helpers.bool_str_parse(config.get('pyinstalive', 'show_cookie_expiry')): pil.show_cookie_expiry = True else: pil.show_cookie_expiry = False if helpers.bool_str_parse(config.get('pyinstalive', 'verbose')) == "Invalid": pil.verbose = False error_arr.append(['verbose', 'False']) elif helpers.bool_str_parse(config.get('pyinstalive', 'verbose')): pil.verbose = True else: pil.verbose = False if helpers.bool_str_parse(config.get('pyinstalive', 'skip_merge')) == "Invalid": pil.skip_merge = False error_arr.append(['skip_merge', 'False']) elif helpers.bool_str_parse(config.get('pyinstalive', 'skip_merge')): pil.skip_merge = True else: pil.skip_merge = False if helpers.bool_str_parse(config.get('pyinstalive', 'use_locks')) == "Invalid": pil.use_locks = False error_arr.append(['use_locks', 'False']) elif helpers.bool_str_parse(config.get('pyinstalive', 'use_locks')): pil.use_locks = True else: pil.use_locks = False if helpers.bool_str_parse(config.get('pyinstalive', 'clear_temp_files')) == "Invalid": pil.clear_temp_files = False error_arr.append(['clear_temp_files', 'False']) elif helpers.bool_str_parse(config.get('pyinstalive', 'clear_temp_files')): pil.clear_temp_files = True else: pil.clear_temp_files = False if helpers.bool_str_parse(config.get('pyinstalive', 'do_heartbeat')) == "Invalid": pil.do_heartbeat = True error_arr.append(['do_heartbeat', 'True']) if helpers.bool_str_parse(config.get('pyinstalive', 'do_heartbeat')): pil.do_heartbeat = True if args.noheartbeat or not helpers.bool_str_parse(config.get('pyinstalive', 'do_heartbeat')): pil.do_heartbeat = False logger.warn("Getting livestream heartbeat is disabled, this may cause degraded performance.") logger.separator() if not args.nolives and helpers.bool_str_parse(config.get('pyinstalive', 'download_lives')) == "Invalid": pil.dl_lives = True error_arr.append(['download_lives', 'True']) elif helpers.bool_str_parse(config.get('pyinstalive', 'download_lives')): pil.dl_lives = True else: pil.dl_lives = False if not args.noreplays and helpers.bool_str_parse(config.get('pyinstalive', 'download_replays')) == "Invalid": pil.dl_replays = True error_arr.append(['download_replays', 'True']) elif helpers.bool_str_parse(config.get('pyinstalive', 'download_replays')): pil.dl_replays = True else: pil.dl_replays = False if helpers.bool_str_parse(config.get('pyinstalive', 'download_comments')) == "Invalid": pil.dl_comments = True error_arr.append(['download_comments', 'True']) elif helpers.bool_str_parse(config.get('pyinstalive', 'download_comments')): pil.dl_comments = True else: pil.dl_comments = False if args.nolives: pil.dl_lives = False if args.noreplays: pil.dl_replays = False if args.verbose: pil.verbose = True if args.skip_merge: pil.skip_merge = True if not pil.dl_lives and not pil.dl_replays: logger.error("You have disabled both livestream and replay downloading.") logger.error("Please enable at least one of them and try again.") logger.separator() return False if pil.ffmpeg_path: if not os.path.isfile(pil.ffmpeg_path): pil.ffmpeg_path = None cmd = "where" if platform.system() == "Windows" else "which" logger.warn("Custom FFmpeg binary path is invalid, falling back to environment variable.") else: logger.binfo("Overriding FFmpeg binary path: {:s}".format(pil.ffmpeg_path)) else: if not helpers.command_exists('ffmpeg') and not args.info: logger.error("FFmpeg framework not found, exiting.") logger.separator() return False if not pil.ig_user or not len(pil.ig_user): raise Exception("Invalid value for 'username'. This value is required.") if not pil.ig_pass or not len(pil.ig_pass): raise Exception("Invalid value for 'password'. This value is required.") if not pil.dl_path.endswith('/'): pil.dl_path = pil.dl_path + '/' if not pil.dl_path or not os.path.exists(pil.dl_path): pil.dl_path = os.getcwd() + "/" if not args.dlpath: error_arr.append(['download_path', os.getcwd() + "/"]) else: logger.warn("Custom config path is invalid, falling back to default path: {:s}".format(pil.dl_path)) logger.separator() if pil.proxy and pil.proxy != '': parsed_url = urlparse(pil.proxy) if not parsed_url.netloc or not parsed_url.scheme: error_arr.append(['proxy', 'None']) pil.proxy = None if error_arr: for error in error_arr: logger.warn("Invalid value for '{:s}'. Using default value: {:s}".format(error[0], error[1])) logger.separator() if args.info: helpers.show_info() return False elif args.clean: helpers.clean_download_dir() return False elif args.assemble: pil.assemble_arg = args.assemble assembler.assemble() return False elif args.organize: organize.organize_videos() return False return True except Exception as e: logger.error("An error occurred: {:s}".format(str(e))) logger.error("Make sure the config file and given arguments are valid and try again.") logger.separator() return False
def download_hashtag(target_user_hashtag): try: max_cursor = 0 has_more = 0 downloaded_total = 0 checked_total = 0 available_total = 0 current_feed_page = 1 hashtag_id = None target_user_hashtag = "#" + target_user_hashtag if not os.path.exists(os.path.join(ptts.dl_path, target_user_hashtag)): os.makedirs(os.path.join(ptts.dl_path, target_user_hashtag)) download_path = os.path.join(ptts.dl_path, target_user_hashtag) while True: if ptts.args.recent: logger.separator() logger.binfo( "Only checking the first 10 videos (--recent was passed).") if not has_more and not max_cursor: logger.separator() logger.info("Retrieving first feed page (page {:d})".format( current_feed_page)) logger.separator() if not hashtag_id: json_data = api.hashtag_search(text=ptts.tt_target_hashtag) hashtag_id = json_data.get("challenge_list")[0].get( "challenge_info").get("cid") logger.info("Got hashtag Id: " + hashtag_id) json_data = api.hashtag_feed(hashtag_id=hashtag_id, cursor=max_cursor) open("hashtag.json", 'w').write(json.dumps(json_data)) max_cursor = json_data.get('cursor') has_more = json_data.get('has_more') if not json_data.get("aweme_list", None): if checked_total: logger.separator() logger.info( "End of feed reached. {:d} {:s} been downloaded.". format( downloaded_total, "video has" if downloaded_total == 1 else "videos have")) elif not checked_total: logger.info("There are no available videos to download.") logger.separator() break else: current_feed_page += 1 available_total += len(json_data.get( "aweme_list")) if not ptts.args.recent else 10 for video in json_data.get("aweme_list"): if ptts.args.recent and checked_total == 10: if downloaded_total: logger.separator() logger.info( "10 videos have been checked. {:d} {:s} been downloaded." .format( downloaded_total, "video has" if downloaded_total == 1 else "videos have")) else: logger.info( "10 videos have been checked. There are no available videos to download." ) logger.separator() return else: video_uri = video.get("video").get("play_addr").get( "uri") video_desc = video.get("desc") filename = '{:d}_{:s}.mp4'.format( video.get("create_time"), video.get("author").get("unique_id")) if video_uri.isdigit(): actual_video_uri = video.get("video").get( "play_addr").get("url_list")[0] if not os.path.isfile( os.path.join(download_path, filename)): rr = requests.get(actual_video_uri, verify=True) if rr.status_code == 200: open(os.path.join(download_path, filename), 'wb').write(rr.content) try: mp4_video_tags = MP4( os.path.join( download_path, filename)) mp4_video_tags['\xa9cmt'] = video_desc mp4_video_tags.save() except Exception as e: pass logger.info( "({:d}/{:d}) - Downloaded video with Id: {}" .format(checked_total + 1, available_total, video_uri)) downloaded_total += 1 else: logger.warn( "Response did not return status 200, was {:d} instead. Giving up and " "moving on.".format(rr.status_code)) logger.warn( "The video Id was: {:s}".format( video_uri)) else: logger.info( "({:d}/{:d}) - Already downloaded video with Id: {}" .format(checked_total + 1, available_total, video_uri)) else: if not os.path.isfile( os.path.join(download_path, filename)): rr = requests.get( Constants.VIDEO_BASE_URL.format( video_uri, 1), verify=True, headers=Constants.REQUESTS_VIDEO_UA) if rr.status_code == 200: open(os.path.join(download_path, filename), 'wb').write(rr.content) try: mp4_video_tags = MP4( os.path.join( download_path, filename)) mp4_video_tags['\xa9cmt'] = video_desc mp4_video_tags.save() except Exception as e: pass logger.info( "({:d}/{:d}) - Downloaded video with Id: {}" .format(checked_total + 1, available_total, video_uri)) downloaded_total += 1 else: logger.warn( "Response did not return status 200, was {:d} instead. Trying with " "lower bitrate.".format( rr.status_code)) rr = requests.get( Constants.VIDEO_BASE_URL.format( video_uri, 0), verify=True, headers=Constants.REQUESTS_VIDEO_UA) if rr.status_code == 200: open( os.path.join( download_path, filename), 'wb').write(rr.content) logger.info( "({:d}/{:d}) - Downloaded video with Id: {}" .format(checked_total + 1, available_total, video_uri)) downloaded_total += 1 else: logger.warn( "Response did not return status 200, was {:d} instead. Giving up " "and moving on.".format( rr.status_code)) logger.warn( "The video Id was: {:s}".format( video_uri)) else: logger.info( "({:d}/{:d}) - Already downloaded video with Id: {}" .format(checked_total + 1, available_total, video_uri)) checked_total += 1 if has_more: logger.separator() logger.info("Retrieving next feed page (page {:d})".format( current_feed_page)) logger.separator() except KeyboardInterrupt: logger.separator() logger.info("The download has been aborted.") logger.separator() except Exception as e: logger.separator() logger.error("Something went wrong: " + str(e)) logger.separator()
def assemble(user_called=True, retry_with_zero_m4v=False): try: ass_json_file = pil.assemble_arg if pil.assemble_arg.endswith( ".json") else pil.assemble_arg + ".json" ass_mp4_file = os.path.join( pil.dl_path, os.path.basename(ass_json_file).replace("_downloads", "").replace( ".json", ".mp4")) ass_segment_dir = pil.assemble_arg if not pil.assemble_arg.endswith( ".json") else pil.assemble_arg.replace(".json", "") # if pil.verbose: # logger.plain("{}\n{}\n{}".format(ass_json_file, ass_mp4_file, ass_segment_dir)) broadcast_info = {} if not os.path.isdir(ass_segment_dir) or not os.listdir( ass_segment_dir): logger.error( 'The segment directory does not exist or does not contain any files: %s' % ass_segment_dir) logger.separator() return if not os.path.isfile(ass_json_file): logger.warn( "No matching json file found for the segment directory, trying to continue without it." ) ass_stream_id = os.listdir(ass_segment_dir)[0].split('-')[0] broadcast_info['id'] = ass_stream_id broadcast_info['broadcast_status'] = "active" broadcast_info['segments'] = {} else: with open(ass_json_file) as info_file: try: broadcast_info = json.load(info_file) except Exception as e: logger.warn( "Could not decode json file, trying to continue without it." ) ass_stream_id = os.listdir(ass_segment_dir)[0].split( '-')[0] broadcast_info['id'] = ass_stream_id broadcast_info['broadcast_status'] = "active" broadcast_info['segments'] = {} if broadcast_info.get('broadcast_status', '') == 'post_live': logger.error( 'Video segment files from replay downloads cannot be assembled.' ) return stream_id = str(broadcast_info['id']) segment_meta = broadcast_info.get('segments', {}) if segment_meta: all_segments = [ os.path.join(ass_segment_dir, k) for k in broadcast_info['segments'].keys() ] else: all_segments = list( filter( os.path.isfile, glob.glob( os.path.join(ass_segment_dir, '%s-*.m4v' % stream_id)))) all_segments = sorted(all_segments, key=lambda x: _get_file_index(x)) sources = [] audio_stream_format = 'assembled_source_{0}_{1}_mp4.tmp' video_stream_format = 'assembled_source_{0}_{1}_m4a.tmp' video_stream = '' audio_stream = '' has_skipped_zero_m4v = False if not all_segments: logger.error( "No video segment files have been found in the specified folder." ) logger.separator() return else: logger.info( "Assembling video segment files from specified folder: {}". format(ass_segment_dir)) for segment in all_segments: segment = re.sub('\?.*$', '', segment) if not os.path.isfile(segment.replace('.m4v', '.m4a')): logger.warn('Audio segment not found: {0!s}'.format( segment.replace('.m4v', '.m4a'))) continue if segment.endswith('-init.m4v'): logger.info('Replacing %s' % segment) segment = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'repair', 'init.m4v') if segment.endswith('-0.m4v') and not retry_with_zero_m4v: has_skipped_zero_m4v = True continue video_stream = os.path.join( ass_segment_dir, video_stream_format.format(stream_id, len(sources))) audio_stream = os.path.join( ass_segment_dir, audio_stream_format.format(stream_id, len(sources))) file_mode = 'ab' with open(video_stream, file_mode) as outfile, open(segment, 'rb') as readfile: shutil.copyfileobj(readfile, outfile) with open(audio_stream, file_mode) as outfile, open( segment.replace('.m4v', '.m4a'), 'rb') as readfile: shutil.copyfileobj(readfile, outfile) if audio_stream and video_stream: sources.append({'video': video_stream, 'audio': audio_stream}) for n, source in enumerate(sources): ffmpeg_binary = os.getenv('FFMPEG_BINARY', 'ffmpeg') cmd = [ ffmpeg_binary, '-loglevel', 'warning', '-y', '-i', source['audio'], '-i', source['video'], '-c:v', 'copy', '-c:a', 'copy', ass_mp4_file ] #fnull = open(os.devnull, 'w') fnull = None exit_code = subprocess.call(cmd, stdout=fnull, stderr=subprocess.STDOUT) if exit_code != 0: logger.warn( "FFmpeg exit code not '0' but '{:d}'.".format(exit_code)) if has_skipped_zero_m4v and not retry_with_zero_m4v: logger.binfo( "*-0.m4v segment was detected but skipped, retrying to assemble video without " "skipping it.") os.remove(source['audio']) os.remove(source['video']) logger.separator() assemble(user_called, retry_with_zero_m4v=True) return else: logger.info('The video file has been generated: %s' % os.path.basename(ass_mp4_file)) os.remove(source['audio']) os.remove(source['video']) if user_called: logger.separator() except Exception as e: logger.error("An error occurred: {:s}".format(str(e)))
def authenticate(username, password, force_use_login_args=False): ig_api = None try: if force_use_login_args: logger.binfo("Overriding configuration file login with -u and -p arguments.") logger.separator() cookie_file = "{}.json".format(username) if not os.path.isfile(cookie_file): # settings file does not exist logger.warn('Unable to find cookie file: {0!s}'.format(cookie_file)) logger.info('Creating a new one.') # login new ig_api = Client( username, password, on_login=lambda x: onlogin_callback(x, cookie_file)) else: with open(cookie_file) as file_data: cached_settings = json.load(file_data, object_hook=from_json) # logger.info('Using settings file: {0!s}'.format(cookie_file)) device_id = cached_settings.get('device_id') # reuse auth cached_settings try: ig_api = Client( username, password, settings=cached_settings) except ClientCookieExpiredError as e: logger.warn('The current cookie file has expired, creating a new one.') ig_api = Client( username, password, device_id=device_id, on_login=lambda x: onlogin_callback(x, cookie_file)) except (ClientLoginError, ClientError) as e: logger.separator() logger.error('Could not login: {:s}'.format( json.loads(e.error_response).get("error_title", "Error title not available."))) logger.error('{:s}'.format(json.loads(e.error_response).get("message", "Not available"))) # logger.error('{:s}'.format(e.error_response)) logger.separator() except Exception as e: if str(e).startswith("unsupported pickle protocol"): logger.warn("This cookie file is not compatible with Python {}.".format(sys.version.split(' ')[0][0])) logger.warn("Please delete your cookie file '{}.json' and try again.".format(username)) else: logger.separator() logger.error('Unexpected exception: {:s}'.format(e)) logger.separator() except KeyboardInterrupt: logger.separator() logger.warn("The user authentication has been aborted.") logger.separator() if ig_api: logger.info('Successfully logged into account: {:s}'.format(str(ig_api.authenticated_user_name))) if pil.show_cookie_expiry and not force_use_login_args: try: cookie_expiry = ig_api.cookie_jar.auth_expires logger.info('Cookie file expiry date: {:s}'.format( datetime.datetime.fromtimestamp(cookie_expiry).strftime('%Y-%m-%d at %I:%M:%S %p'))) except AttributeError as e: logger.warn('An error occurred while getting the cookie file expiry date: {:s}'.format(str(e))) logger.separator() return ig_api else: return None
def run(): ptts.initialize() logging.disable(logging.CRITICAL) config = configparser.ConfigParser() parser = argparse.ArgumentParser( description="You are running PyTikTokScraper {:s} using Python {:s}". format(Constants.SCRIPT_VER, Constants.PYTHON_VER)) parser.add_argument( '-d', '--download', dest='download', type=str, required=False, help="The username (or uid) of the user whose posts you want to save.") parser.add_argument( '-ht', '--hashtag', dest='hashtag', type=str, required=False, help="The hashtag whose posts in the feed you want to save.") parser.add_argument( '-r', '--recent', dest='recent', action='store_true', help="When used, only retrieves the first 10 videos in the user's feed." ) parser.add_argument( '-gf', '--get-following', dest='getfollowing', type=str, required=False, help="When used, retrieves the list of people you're following.") parser.add_argument( '-uid', '--is-uid', dest='isuid', action='store_true', help="When used, treat the download argument as the user ID.") parser.add_argument('-s', '--single', dest='single', type=str, required=False, help="Pass a single video Id to download.") parser.add_argument( '-l', '--livestream', dest='livestream', type=str, required=False, help="Pass an username to download a livestream, if available.") args = parser.parse_args() if validate_inputs(config, args): api.login() if args.download or args.livestream and not args.isuid: try: target_user_json = api.search_user(ptts.tt_target_user) for user in target_user_json.get('user_list'): if user.get('user_info').get( 'unique_id') == ptts.tt_target_user: ptts.tt_target_id = user.get('user_info').get('uid') response_user = api.get_user_info(ptts.tt_target_id) ptts.tt_target_user_liveroomid = response_user.get( 'user').get('room_id') if response_user.get( 'user').get('room_id') > 0 else None video_count = user.get('user_info').get('aweme_count') logger.info( "Found matching user profile with {:d} videos.". format(video_count)) if args.download and video_count < 1: logger.separator() logger.binfo( "This user has no available videos to download." ) logger.separator() sys.exit(0) if not ptts.tt_target_id: raise IndexError except (IndexError, TypeError): logger.error( "No user found matching '{:s}', trying tiktokapi.ga search." .format(ptts.tt_target_user)) logger.separator() try: target_user_json = api.search_user_tta(ptts.tt_target_user) if target_user_json: for user in target_user_json.get('user_list'): if user.get('user_info').get( 'unique_id') == ptts.tt_target_user: open("usersearch.json", "w").write( json.dumps(user.get("user_info"))) ptts.tt_target_id = user.get('user_info').get( 'uid') ptts.tt_target_user_liveroomid = user.get( 'user_info' ).get('room_id') if user.get('user_info').get( 'room_id') > 0 else None video_count = user.get('user_info').get( 'aweme_count') logger.info( "Found matching user profile with {:d} videos." .format(video_count)) if args.download and video_count < 1: logger.separator() logger.binfo( "This user has no available videos to download." ) logger.separator() sys.exit(0) if not ptts.tt_target_id: raise IndexError else: raise IndexError except (IndexError, TypeError): logger.error( "No results on tiktokapi.ga either, the script will now exit." .format(ptts.tt_target_user)) logger.separator() sys.exit(0) elif args.download and args.isuid: ptts.tt_target_id = args.download try: int(ptts.tt_target_id) except ValueError: logger.error( "The user ID '{}' is not a valid value. Exiting.".format( ptts.tt_target_id)) logger.separator() sys.exit(1) elif args.livestream and args.isuid: ptts.tt_target_id = args.livestream try: int(ptts.tt_target_id) except ValueError: logger.error( "The user ID '{}' is not a valid value. Exiting.".format( ptts.tt_target_id)) logger.separator() sys.exit(1) response_user = api.get_user_info(ptts.tt_target_id) ptts.tt_target_user_liveroomid = response_user.get('user').get( 'room_id' ) if response_user.get('user').get('room_id') > 0 else None if ptts.tt_target_id: logger.info("Retrieved user ID: {:s}".format(ptts.tt_target_id)) logger.separator() if args.getfollowing: logger.info("Retrieving list of following users...") logger.warn( "Pagination does not work properly, use this at own risk!") logger.separator() json_resp = api.get_following(ptts.tt_target_id) following_txt = os.path.join( os.getcwd(), "following_{:s}.txt".format(ptts.tt_target_user)) if os.path.isfile(following_txt): os.remove(following_txt) for user in json_resp.get('followings'): user_text = user.get('unique_id') + " - " + user.get('uid') logger.plain(user_text) open(following_txt, 'a').write(user_text + '\n') logger.separator() logger.info("Written {:d} users to {:s}".format( len(json_resp.get('followings')), following_txt)) logger.separator() if ptts.args.download: logger.info("Starting download of all videos from profile.") downloader.download_all(ptts.tt_target_id) if ptts.args.hashtag: logger.info( "Starting download of all posts from hashtag '{:s}'.".format( ptts.tt_target_hashtag)) downloader.download_hashtag(ptts.tt_target_hashtag) if ptts.args.livestream: if ptts.tt_target_user_liveroomid: logger.info("Starting download for livestream.") downloader.download_live(ptts.tt_target_user_liveroomid) else: logger.warn("There currently no ongoing livestream available.") logger.separator()
def authenticate(username, password, force_use_login_args=False): ig_api = None try: if force_use_login_args: pil.ig_user = username pil.ig_pass = password pil.config_login_overridden = True logger.binfo( "Overriding configuration file login with -u and -p arguments." ) logger.separator() cookie_file = os.path.join(os.path.dirname(pil.config_path), "{}.json".format(username)) if not os.path.isfile(cookie_file): # settings file does not exist logger.warn('Unable to find cookie file: {0!s}'.format( os.path.basename(cookie_file))) logger.info('Creating a new one.') # login new ig_api = Client( username, password, on_login=lambda x: onlogin_callback(x, cookie_file), proxy=pil.proxy) # ig_api = Client(username, password, on_login=lambda x: onlogin_callback(x, cookie_file), proxy=pil.proxy) login(ig_api) else: with open(cookie_file) as file_data: cached_settings = json.load(file_data, object_hook=from_json) logger.info('Using settings file: {0!s}'.format(cookie_file)) # device_id = cached_settings.get('device_id') # reuse auth cached_settings try: ig_api = Client(username, password, settings=cached_settings, proxy=pil.proxy) except (ClientSentryBlockError, ClientChallengeRequiredError, ClientCheckpointRequiredError, ClientCookieExpiredError, ClientLoginError, ClientError) as e: logger.separator() logger.warn('Some sort of login exception!') if pil.verbose: logger.plain(json.dumps(e.error_response)) logger.error('Could not login: {:s}'.format(e.error_response)) logger.error('{:s}'.format( json.loads(e.error_response).get("message", e.error_response))) logger.error('{:s}'.format(e.error_response)) logger.separator() ig_api = Client( username, password, on_login=lambda x: onlogin_callback(x, cookie_file), proxy=pil.proxy) login(ig_api) logger.warn('successfully resolved error and logged back in!') except (ClientLoginError, ClientError) as e: logger.separator() if pil.verbose: logger.plain(json.dumps(e.error_response)) logger.error('Could not login: {:s}'.format(e.error_response)) logger.error('{:s}'.format( json.loads(e.error_response).get("message", e.error_response))) logger.error('{:s}'.format(e.error_response)) logger.separator() except Exception as e: if pil.verbose: logger.plain(json.dumps(e)) if str(e).startswith("unsupported pickle protocol"): logger.warn( "This cookie file is not compatible with Python {}.".format( sys.version.split(' ')[0][0])) logger.warn( "Please delete your cookie file '{}.json' and try again.". format(username)) else: logger.separator() logger.error('Unexpected exception: {:s}'.format(e)) logger.separator() except KeyboardInterrupt: logger.separator() logger.warn("The user authentication has been aborted.") logger.separator() if ig_api: logger.info('Successfully logged into account: {:s}'.format( str(ig_api.authenticated_user_name))) if pil.show_cookie_expiry and not force_use_login_args: try: cookie_expiry = ig_api.cookie_jar.auth_expires logger.info('Cookie file expiry date: {:s}'.format( datetime.datetime.fromtimestamp(cookie_expiry).strftime( '%Y-%m-%d at %I:%M:%S %p'))) except Exception as e: logger.warn( 'An error occurred while getting the cookie file expiry date: {:s}' .format(str(e))) logger.separator() return ig_api else: return None
def organize_files(): try: files = [ f for f in os.listdir(pil.dl_path) if os.path.isfile(os.path.join(pil.dl_path, f)) ] not_moved = 0 has_moved = 0 username_regex = r'(?<=\d{8}_)(.*?)(?=_\d)' date_regex = r'^\d{8}' timestamp_regex = r'_(\d{10})_' type_regex = r'(live|replay)' raw_file_dict = {} new_file_dict = {} for file in files: username = re.search(username_regex, file)[0] date_ts = datetime.strptime( re.search(date_regex, file)[0], '%Y%m%d').strftime('%d-%m-%Y') time_ts = time.strftime( '%I-%M-%S-%p', time.localtime(int(re.search(timestamp_regex, file)[1]))) file_ext = os.path.splitext(file)[1] file_type = re.search(type_regex, file)[0] new_file = "{:s} {:s} {:s} ({:s}){:s}".format( date_ts, time_ts, username, file_type, file_ext) raw_file_dict[file] = username new_file_dict[file] = new_file for filename, username in raw_file_dict.items(): try: os.makedirs(os.path.join(pil.dl_path, username)) except: pass source_path = os.path.join(pil.dl_path, filename) destination_path = os.path.join(pil.dl_path, username, new_file_dict.get(filename)) if not os.path.isfile(destination_path): try: shutil.move(source_path, destination_path) logger.info( "Moved and renamed '{:s}' successfully.".format( filename)) has_moved += 1 except OSError as oe: logger.warn("Could not move and rename {:s}: {:s}".format( filename, str(oe))) not_moved += 1 else: logger.binfo( "Did not move and rename '{:s}' because it already exists." .format(filename)) not_moved += 1 logger.separator() logger.info("{} {} moved.".format( has_moved, "file was" if has_moved == 1 else "files were")) if not_moved: logger.binfo("{} {} not moved.".format( not_moved, "file was" if not_moved == 1 else "files were")) logger.separator() except Exception as e: logger.error("Could not organize files: {:s}".format(str(e)))
def download_livestream(): try: def print_status(sep=True): if pil.do_heartbeat: heartbeat_info = pil.ig_api.broadcast_heartbeat_and_viewercount( pil.livestream_obj.get('id')) viewers = pil.livestream_obj.get('viewer_count', 0) if sep: logger.separator() else: logger.info('Username : {:s}'.format(pil.dl_user)) logger.info('Viewers : {:s} watching'.format(str( int(viewers)))) logger.info('Airing time : {:s}'.format(get_stream_duration(0))) if pil.do_heartbeat: logger.info('Status : {:s}'.format( heartbeat_info.get('broadcast_status').title())) return heartbeat_info.get('broadcast_status') not in [ 'active', 'interrupted' ] else: return None mpd_url = (pil.livestream_obj.get('dash_manifest') or pil.livestream_obj.get('dash_abr_playback_url') or pil.livestream_obj.get('dash_playback_url')) pil.live_folder_path = '{}{}_{}_{}_{}_live_downloads'.format( pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime) pil.broadcast_downloader = live.Downloader( mpd=mpd_url, output_dir=pil.live_folder_path, user_agent=pil.ig_api.user_agent, max_connection_error_retry=3, duplicate_etag_retry=30, callback_check=print_status, mpd_download_timeout=3, download_timeout=3, ffmpeg_binary=pil.ffmpeg_path) except Exception as e: logger.error('Could not start downloading livestream: {:s}'.format( str(e))) logger.separator() helpers.remove_lock() try: broadcast_owner = pil.livestream_obj.get('broadcast_owner', {}).get('username') try: broadcast_guest = pil.livestream_obj.get('cobroadcasters', {})[0].get('username') except Exception: broadcast_guest = None if broadcast_owner != pil.dl_user: logger.binfo( 'This livestream is a dual-live, the owner is "{}".'.format( broadcast_owner)) broadcast_guest = None if broadcast_guest: logger.binfo( 'This livestream is a dual-live, the current guest is "{}".'. format(broadcast_guest)) pil.has_guest = broadcast_guest logger.separator() print_status(False) logger.separator() helpers.create_lock_folder() pil.segments_json_thread_worker = threading.Thread( target=helpers.generate_json_segments) pil.segments_json_thread_worker.start() logger.info('Downloading livestream, press [CTRL+C] to abort.') if pil.run_at_start: try: thread = threading.Thread(target=helpers.run_command, args=(pil.run_at_start, )) thread.daemon = True thread.start() logger.binfo("Launched start command: {:s}".format( pil.run_at_start)) except Exception as e: logger.warn('Could not launch command: {:s}'.format(str(e))) if pil.dl_comments: try: comments_json_file = '{}{}_{}_{}_{}_live_comments.json'.format( pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime) pil.comment_thread_worker = threading.Thread( target=get_live_comments, args=(comments_json_file, )) pil.comment_thread_worker.start() except Exception as e: logger.error( 'An error occurred while downloading comments: {:s}'. format(str(e))) pil.broadcast_downloader.run() logger.separator() logger.info("The livestream has been ended by the user.") logger.separator() logger.info('Airtime duration : {}'.format(get_stream_duration(0))) logger.info('Download duration : {}'.format(get_stream_duration(1))) logger.info('Missing (approx.) : {}'.format(get_stream_duration(2))) logger.separator() merge_segments() except KeyboardInterrupt: logger.separator() logger.binfo('The download has been aborted.') logger.separator() logger.info('Airtime duration : {}'.format(get_stream_duration(0))) logger.info('Download duration : {}'.format(get_stream_duration(1))) logger.info('Missing (approx.) : {}'.format(get_stream_duration(2))) logger.separator() if not pil.broadcast_downloader.is_aborted: pil.broadcast_downloader.stop() merge_segments()
def organize_videos(): try: # Make a variable equal to the names of the files in the current directory. download_path_files = os.listdir(pil.dl_path) # Count the amount of files moved and not moved because they already exist etc. not_moved = 0 has_moved = 0 # The downloaded livestream(s) are in MP4 format. video_format = ['mp4'] # Find the MP4 files and save them in a variable called 'filenames'. filenames = [ filename for filename in download_path_files if filename.split('.')[-1] in video_format ] if len(filenames) == 0: logger.binfo("No files were found to organize.") logger.separator() return for filename in filenames: # Split the filenames into parts. filename_parts = filename.split('_') # Get the date from the filename. date = datetime.strptime(filename_parts[0], '%Y%m%d').strftime('%d-%m-%Y') # Get the username from the filename. username = '******'.join(filename_parts[1:-3]) # Get the time from the unix timestamp. time_from_unix_timestamp = time.strftime( '%I.%M%p', time.localtime(int(filename_parts[-2]))) # # Remove the leading zero from single-digit hours. if float(time_from_unix_timestamp[0:2]) < 10: time_from_unix_timestamp = time_from_unix_timestamp[1:] # Get the last part of the filename ("live.mp4" or "replay.mp4"). live_or_replay = filename_parts[-1] # The path of each original filename is as follows: old_filename_path = os.path.join(pil.dl_path, filename) # We want to change the format of each filename to: new_filename_format = date + " " + username + " [" + time_from_unix_timestamp + "] " + live_or_replay # The path of each new filename is as follows: new_filename_path = os.path.join(pil.dl_path, new_filename_format) # Change the filenames. os.rename(old_filename_path, new_filename_path) # Now that the files have been renamed, we need to rescan the files in the directory. download_path_files = os.listdir(pil.dl_path) new_filenames = [ filename for filename in download_path_files if filename.split('.')[-1] in video_format ] # We want a dictionary where the filenames are the keys # and the usernames are the values. filenames_to_usernames = {} # Populate the dictionary with a loop. for filename in new_filenames: # Split the filenames into parts so we get just the usernames: filename_parts = filename.split() # This is how to get the usernames from the split filenames: username = filename_parts[1] # Filename = key and username = value: filenames_to_usernames[filename] = username # We only want one folder for each username, so convert the list into a set to remove duplicates. usernames = set(filenames_to_usernames.values()) # Make a folder for each username. for username in usernames: username_path = os.path.join(pil.dl_path, username) if not os.path.isdir(username_path): os.mkdir(username_path) # Move the videos into the folders for filename, username in filenames_to_usernames.items(): filename_base = os.path.basename(filename) source_path = os.path.join(pil.dl_path, filename) destination_path = os.path.join(pil.dl_path, username, filename_base) if not os.path.isfile(destination_path): try: shutil.move(source_path, destination_path) logger.info( "Moved '{:s}' successfully.".format(filename_base)) has_moved += 1 except OSError as oe: logger.warn("Could not move {:s}: {:s}".format( filename_base, str(oe))) not_moved += 1 else: logger.binfo( "Did not move '{:s}' because it already exists.".format( filename_base)) not_moved += 1 logger.separator() logger.info("{} {} moved.".format( has_moved, "file was" if has_moved == 1 else "files were")) if not_moved: logger.binfo("{} {} not moved.".format( not_moved, "file was" if not_moved == 1 else "files were")) logger.separator() except Exception as e: logger.error("Could not organize files: {:s}".format(str(e)))
def download_replays(): try: try: logger.info('Amount of replays : {:s}'.format( str(len(pil.replays_obj)))) for replay_index, replay_obj in enumerate(pil.replays_obj): bc_dash_manifest = parseString(replay_obj.get( 'dash_manifest')).getElementsByTagName('Period') bc_duration_raw = bc_dash_manifest[0].getAttribute("duration") bc_minutes = (bc_duration_raw.split("H"))[1].split("M")[0] bc_seconds = (( bc_duration_raw.split("M"))[1].split("S")[0]).split('.')[0] logger.info( 'Replay {:s} duration : {:s} minutes and {:s} seconds'. format(str(replay_index + 1), bc_minutes, bc_seconds)) except Exception as e: logger.warn( "An error occurred while getting replay duration information: {:s}" .format(str(e))) logger.separator() logger.info("Downloading replays, press [CTRL+C] to abort.") logger.separator() for replay_index, replay_obj in enumerate(pil.replays_obj): exists = False pil.livestream_obj = replay_obj if Constants.PYTHON_VER[0][0] == '2': directories = (os.walk(pil.dl_path).next()[1]) else: directories = (os.walk(pil.dl_path).__next__()[1]) for directory in directories: if (str(replay_obj.get('id')) in directory) and ("_live_" not in directory): logger.binfo( "Already downloaded a replay with ID '{:s}'.".format( str(replay_obj.get('id')))) exists = True if not exists: current = replay_index + 1 logger.info( "Downloading replay {:s} of {:s} with ID '{:s}'.".format( str(current), str(len(pil.replays_obj)), str(replay_obj.get('id')))) pil.live_folder_path = '{}{}_{}_{}_{}_replay_downloads'.format( pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime) broadcast_downloader = replay.Downloader( mpd=replay_obj.get('dash_manifest'), output_dir=pil.live_folder_path, user_agent=pil.ig_api.user_agent, ffmpeg_binary=pil.ffmpeg_path) if pil.use_locks: helpers.create_lock_folder() replay_mp4_file = '{}{}_{}_{}_{}_replay.mp4'.format( pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime) comments_json_file = '{}{}_{}_{}_{}_replay_comments.json'.format( pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime) pil.comment_thread_worker = threading.Thread( target=get_replay_comments, args=(comments_json_file, )) broadcast_downloader.download( replay_mp4_file, cleartempfiles=pil.clear_temp_files) if pil.clear_temp_files: helpers.remove_temp_folder() if pil.dl_comments: logger.info("Downloading replay comments.") try: get_replay_comments(comments_json_file) except Exception as e: logger.error( 'An error occurred while downloading comments: {:s}' .format(str(e))) logger.info("Finished downloading replay {:s} of {:s}.".format( str(current), str(len(pil.replays_obj)))) helpers.remove_lock() if current != len(pil.replays_obj): logger.separator() logger.separator() logger.info("Finished downloading all available replays.") helpers.remove_lock() except Exception as e: logger.error('Could not save replay: {:s}'.format(str(e))) helpers.remove_lock() except KeyboardInterrupt: logger.separator() logger.binfo('The download has been aborted by the user, exiting.') logger.separator() helpers.remove_temp_folder() helpers.remove_lock()