def get_broadcasts_info(): try: user_id = get_user_id() if user_id: broadcasts = pil.ig_api.user_story_feed(user_id) if pil.verbose: logger.plain(json.dumps(broadcasts)) pil.livestream_obj = broadcasts.get('broadcast') pil.replays_obj = broadcasts.get('post_live_item', {}).get('broadcasts', []) return True else: return False except ClientThrottledError: logger.error( 'Could not check because you are making too many requests at this time.' ) return False except Exception as e: logger.error('Could not finish checking: {:s}'.format(str(e))) if "timed out" in str(e): logger.error( 'The connection timed out, check your internet connection.') if "login_required" in str(e): logger.error( 'Login cookie was loaded but user is not actually logged in. Delete the cookie file and try ' 'again.') return False except KeyboardInterrupt: logger.binfo('Aborted checking for livestreams and replays, exiting.') return False
def get_replay(self): comments_collected = [] starting_offset = 0 encoding_tag = self.broadcast.get('encoding_tag') while True: try: comments_res = self.api.replay_broadcast_comments( self.broadcast.get('id'), starting_offset=starting_offset, encoding_tag=encoding_tag) if pil.verbose: logger.plain(json.dumps(comments_res)) starting_offset = comments_res.get('ending_offset', 0) comments = comments_res.get('comments', []) comments_collected.extend(comments) if not comments_res.get('comments') or not starting_offset: break time.sleep(4) except Exception: pass if comments_collected: self.broadcast['comments'] = comments_collected self.broadcast['initial_buffered_duration'] = 0 with open(self.destination_file, 'w') as outfile: json.dump(self.broadcast, outfile, indent=2) self.comments = comments_collected
def show_info(): cookie_files = [] cookie_from_config = '' try: for file in os.listdir(os.getcwd()): if file.endswith(".json"): with open(file) as data_file: try: json_data = json.load(data_file) if json_data.get('created_ts'): cookie_files.append(file) except Exception as e: pass if pil.ig_user == file.replace(".json", ''): cookie_from_config = file except Exception as e: logger.warn("Could not check for cookie files: {:s}".format(str(e))) logger.whiteline() logger.info("To see all the available arguments, use the -h argument.") logger.whiteline() logger.info("PyInstaLive version: {:s}".format( Constants.SCRIPT_VER)) logger.info("Python version: {:s}".format( Constants.PYTHON_VER)) if not command_exists("ffmpeg"): logger.error("FFmpeg framework: Not found") else: logger.info("FFmpeg framework: Available") if len(cookie_from_config) > 0: logger.info( "Cookie files: {:s} ({:s} matches config user)". format(str(len(cookie_files)), cookie_from_config)) elif len(cookie_files) > 0: logger.info("Cookie files: {:s}".format( str(len(cookie_files)))) else: logger.warn("Cookie files: None found") logger.info("CLI supports color: {:s}".format( "No" if not logger.supports_color() else "Yes")) logger.info("Command to run at start: {:s}".format( "None" if not pil.run_at_start else pil.run_at_start)) logger.info("Command to run at finish: {:s}".format( "None" if not pil.run_at_finish else pil.run_at_finish)) if os.path.exists(pil.config_path): logger.info("Config file contents:") logger.whiteline() with open(pil.config_path) as f: for line in f: logger.plain(" {:s}".format(line.rstrip())) else: logger.error("Config file: Not found") logger.whiteline() logger.info("End of PyInstaLive information screen.") logger.separator()
def get_user_id(): is_user_id = False user_id = None try: user_id = int(pil.dl_user) is_user_id = True except ValueError: try: user_res = pil.ig_api.username_info(pil.dl_user) if pil.verbose: logger.plain(json.dumps(user_res)) user_id = user_res.get('user', {}).get('pk') except ClientConnectionError as cce: logger.error( "Could not get user info for '{:s}': {:d} {:s}".format( pil.dl_user, cce.code, str(cce))) if "getaddrinfo failed" in str(cce): logger.error( 'Could not resolve host, check your internet connection.') if "timed out" in str(cce): logger.error( 'The connection timed out, check your internet connection.' ) except ClientThrottledError as cte: logger.error( "Could not get user info for '{:s}': {:d} {:s}".format( pil.dl_user, cte.code, str(cte))) except ClientError as ce: logger.error( "Could not get user info for '{:s}': {:d} {:s}".format( pil.dl_user, ce.code, str(ce))) if "Not Found" in str(ce): logger.error('The specified user does not exist.') except Exception as e: logger.error("Could not get user info for '{:s}': {:s}".format( pil.dl_user, str(e))) except KeyboardInterrupt: logger.binfo( "Aborted getting user info for '{:s}', exiting.".format( pil.dl_user)) if user_id and is_user_id: logger.info( "Getting info for '{:s}' successful. Assuming input is an user Id." .format(pil.dl_user)) logger.separator() return user_id elif user_id: logger.info("Getting info for '{:s}' successful.".format(pil.dl_user)) logger.separator() return user_id else: return None
def download_following(): try: is_checking = '' if pil.dl_lives and pil.dl_replays: is_checking = 'livestreams or replays' elif pil.dl_lives and not pil.dl_replays: is_checking = 'livestreams' elif not pil.dl_lives and pil.dl_replays: is_checking = 'replays' logger.info( "Checking following users for any {:s}.".format(is_checking)) broadcast_f_list = pil.ig_api.reels_tray() if pil.verbose: logger.plain(json.dumps(broadcast_f_list)) usernames_available_livestreams = [] usernames_available_replays = [] if broadcast_f_list['broadcasts'] and pil.dl_lives: for broadcast_f in broadcast_f_list['broadcasts']: username = broadcast_f['broadcast_owner']['username'] if username not in usernames_available_livestreams: usernames_available_livestreams.append(username) if broadcast_f_list.get('post_live', {}).get('post_live_items', []) and pil.dl_replays: for broadcast_r in broadcast_f_list.get('post_live', {}).get( 'post_live_items', []): for broadcast_f in broadcast_r.get("broadcasts", []): username = broadcast_f['broadcast_owner']['username'] if username not in usernames_available_replays: usernames_available_replays.append(username) logger.separator() available_total = list(usernames_available_livestreams) available_total.extend(x for x in usernames_available_replays if x not in available_total) if available_total: logger.info( "The following users have available {:s}.".format(is_checking)) logger.info(', '.join(available_total)) logger.separator() iterate_users(available_total) else: logger.info( "There are currently no available {:s}.".format(is_checking)) logger.separator() except Exception as e: logger.error("Could not finish checking following users: {:s}".format( str(e))) except KeyboardInterrupt: logger.separator() logger.binfo('The checking process has been aborted by the user.') logger.separator()
def print_status(sep=True): if pil.do_heartbeat: heartbeat_info = pil.ig_api.broadcast_heartbeat_and_viewercount(pil.livestream_obj.get('id')) if pil.verbose: logger.plain(json.dumps(heartbeat_info)) viewers = pil.livestream_obj.get('viewer_count', 0) + 1 if sep: logger.separator() else: logger.info('Username : {:s}'.format(pil.dl_user)) logger.info('Viewers : {:s} watching'.format(str(int(viewers)))) logger.info('Airing time : {:s}'.format(get_stream_duration(0))) if pil.do_heartbeat: logger.info('Status : {:s}'.format(heartbeat_info.get('broadcast_status').title())) return heartbeat_info.get('broadcast_status') not in ['active', 'interrupted'] else: return None
def get_live(self, first_comment_created_at=0): comments_collected = self.comments before_count = len(comments_collected) try: comments_res = self.api.broadcast_comments( self.broadcast.get('id'), last_comment_ts=first_comment_created_at) if pil.verbose: logger.plain(json.dumps(comments_res)) comments = comments_res.get('comments', []) first_comment_created_at = (comments[0]['created_at_utc'] if comments else int(time.time() - 5)) comments_collected.extend(comments) after_count = len(comments_collected) if after_count > before_count: broadcast = self.broadcast.copy() broadcast.pop('segments', None) # save space broadcast['comments'] = comments_collected with open(self.destination_file, 'w') as outfile: json.dump(broadcast, outfile, indent=2) self.comments = comments_collected except (SSLError, timeout, URLError, HTTPException, SocketError) as e: logger.warn('Comment downloading error: %s' % e) except ClientError as e: if e.code == 500: logger.warn('Comment downloading ClientError: %d %s' % (e.code, e.error_response)) elif e.code == 400 and not e.msg: logger.warn('Comment downloading ClientError: %d %s' % (e.code, e.error_response)) else: raise e finally: try: time.sleep(4) except KeyboardInterrupt: return first_comment_created_at return first_comment_created_at
def new_config(): try: if os.path.exists(pil.config_path): logger.info("A configuration file is already present:") logger.whiteline() with open(pil.config_path) as f: for line in f: logger.plain(" {:s}".format(line.rstrip())) logger.whiteline() logger.info( "To create a default config file, delete 'pyinstalive.ini' and run this script again." ) logger.separator() else: try: logger.warn( "Could not find configuration file, creating a default one." ) config_file = open(pil.config_path, "w") config_file.write( Constants.CONFIG_TEMPLATE.format(os.getcwd()).strip()) config_file.close() logger.warn( "Edit the created 'pyinstalive.ini' file and run this script again." ) logger.separator() return except Exception as e: logger.error( "Could not create default config file: {:s}".format( str(e))) logger.warn( "You must manually create and edit it with the following template: " ) logger.whiteline() for line in Constants.CONFIG_TEMPLATE.strip().splitlines(): logger.plain(" {:s}".format(line.rstrip())) logger.whiteline() logger.warn( "Save it as 'pyinstalive.ini' and run this script again.") logger.separator() except Exception as e: logger.error("An error occurred: {:s}".format(str(e))) logger.warn( "If you don't have a configuration file, manually create and edit one with the following template:" ) logger.whiteline() logger.plain(Constants.CONFIG_TEMPLATE) logger.whiteline() logger.warn("Save it as 'pyinstalive.ini' and run this script again.") logger.separator()
def assemble(user_called=True, retry_with_zero_m4v=False): try: ass_json_file = pil.assemble_arg if pil.assemble_arg.endswith( ".json") else pil.assemble_arg + ".json" ass_mp4_file = os.path.join( pil.dl_path, os.path.basename(ass_json_file).replace("_downloads", "").replace( ".json", ".mp4")) ass_segment_dir = pil.assemble_arg if not pil.assemble_arg.endswith( ".json") else pil.assemble_arg.replace(".json", "") if pil.verbose: logger.plain("{}\n{}\n{}".format(ass_json_file, ass_mp4_file, ass_segment_dir)) broadcast_info = {} if not os.path.isdir(ass_segment_dir) or not os.listdir( ass_segment_dir): logger.error( 'The segment directory does not exist or does not contain any files: %s' % ass_segment_dir) logger.separator() return if not os.path.isfile(ass_json_file): logger.warn( "No matching json file found for the segment directory, trying to continue without it." ) ass_stream_id = os.listdir(ass_segment_dir)[0].split('-')[0] broadcast_info['id'] = ass_stream_id broadcast_info['broadcast_status'] = "active" broadcast_info['segments'] = {} else: with open(ass_json_file) as info_file: try: broadcast_info = json.load(info_file) except Exception as e: logger.warn( "Could not decode json file, trying to continue without it." ) ass_stream_id = os.listdir(ass_segment_dir)[0].split( '-')[0] broadcast_info['id'] = ass_stream_id broadcast_info['broadcast_status'] = "active" broadcast_info['segments'] = {} if broadcast_info.get('broadcast_status', '') == 'post_live': logger.error( 'Video segment files from replay downloads cannot be assembled.' ) return stream_id = str(broadcast_info['id']) segment_meta = broadcast_info.get('segments', {}) if segment_meta: all_segments = [ os.path.join(ass_segment_dir, k) for k in broadcast_info['segments'].keys() ] else: all_segments = list( filter( os.path.isfile, glob.glob( os.path.join(ass_segment_dir, '%s-*.m4v' % stream_id)))) all_segments = sorted(all_segments, key=lambda x: _get_file_index(x)) sources = [] audio_stream_format = 'assembled_source_{0}_{1}_mp4.tmp' video_stream_format = 'assembled_source_{0}_{1}_m4a.tmp' video_stream = '' audio_stream = '' has_skipped_zero_m4v = False if not all_segments: logger.error( "No video segment files have been found in the specified folder." ) logger.separator() return else: logger.info( "Assembling video segment files from specified folder: {}". format(ass_segment_dir)) for segment in all_segments: if not os.path.isfile(segment.replace('.m4v', '.m4a')): logger.warn('Audio segment not found: {0!s}'.format( segment.replace('.m4v', '.m4a'))) continue if segment.endswith('-init.m4v'): logger.info('Replacing %s' % segment) segment = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'repair', 'init.m4v') if segment.endswith('-0.m4v') and not retry_with_zero_m4v: has_skipped_zero_m4v = True continue video_stream = os.path.join( ass_segment_dir, video_stream_format.format(stream_id, len(sources))) audio_stream = os.path.join( ass_segment_dir, audio_stream_format.format(stream_id, len(sources))) file_mode = 'ab' with open(video_stream, file_mode) as outfile, open(segment, 'rb') as readfile: shutil.copyfileobj(readfile, outfile) with open(audio_stream, file_mode) as outfile, open( segment.replace('.m4v', '.m4a'), 'rb') as readfile: shutil.copyfileobj(readfile, outfile) if audio_stream and video_stream: sources.append({'video': video_stream, 'audio': audio_stream}) for n, source in enumerate(sources): ffmpeg_binary = os.getenv('FFMPEG_BINARY', 'ffmpeg') cmd = [ ffmpeg_binary, '-loglevel', 'warning', '-y', '-i', source['audio'], '-i', source['video'], '-c:v', 'copy', '-c:a', 'copy', ass_mp4_file ] #fnull = open(os.devnull, 'w') fnull = None exit_code = subprocess.call(cmd, stdout=fnull, stderr=subprocess.STDOUT) if exit_code != 0: logger.warn( "FFmpeg exit code not '0' but '{:d}'.".format(exit_code)) if has_skipped_zero_m4v and not retry_with_zero_m4v: logger.binfo( "*-0.m4v segment was detected but skipped, retrying to assemble video without " "skipping it.") os.remove(source['audio']) os.remove(source['video']) logger.separator() assemble(user_called, retry_with_zero_m4v=True) return else: logger.info('The video file has been generated: %s' % os.path.basename(ass_mp4_file)) os.remove(source['audio']) os.remove(source['video']) if user_called: logger.separator() except Exception as e: logger.error("An error occurred: {:s}".format(str(e)))
def authenticate(username, password, force_use_login_args=False): ig_api = None try: if force_use_login_args: pil.ig_user = username pil.ig_pass = password pil.config_login_overridden = True logger.binfo( "Overriding configuration file login with -u and -p arguments." ) logger.separator() cookie_file = os.path.join(os.path.dirname(pil.config_path), "{}.json".format(username)) if not os.path.isfile(cookie_file): # settings file does not exist logger.warn('Unable to find cookie file: {0!s}'.format( os.path.basename(cookie_file))) logger.info('Creating a new one.') # login new ig_api = Client( username, password, on_login=lambda x: onlogin_callback(x, cookie_file), proxy=pil.proxy) # ig_api = Client(username, password, on_login=lambda x: onlogin_callback(x, cookie_file), proxy=pil.proxy) login(ig_api) else: with open(cookie_file) as file_data: cached_settings = json.load(file_data, object_hook=from_json) logger.info('Using settings file: {0!s}'.format(cookie_file)) # device_id = cached_settings.get('device_id') # reuse auth cached_settings try: ig_api = Client(username, password, settings=cached_settings, proxy=pil.proxy) except (ClientSentryBlockError, ClientChallengeRequiredError, ClientCheckpointRequiredError, ClientCookieExpiredError, ClientLoginError, ClientError) as e: logger.separator() logger.warn('Some sort of login exception!') if pil.verbose: logger.plain(json.dumps(e.error_response)) logger.error('Could not login: {:s}'.format(e.error_response)) logger.error('{:s}'.format( json.loads(e.error_response).get("message", e.error_response))) logger.error('{:s}'.format(e.error_response)) logger.separator() ig_api = Client( username, password, on_login=lambda x: onlogin_callback(x, cookie_file), proxy=pil.proxy) login(ig_api) logger.warn('successfully resolved error and logged back in!') except (ClientLoginError, ClientError) as e: logger.separator() if pil.verbose: logger.plain(json.dumps(e.error_response)) logger.error('Could not login: {:s}'.format(e.error_response)) logger.error('{:s}'.format( json.loads(e.error_response).get("message", e.error_response))) logger.error('{:s}'.format(e.error_response)) logger.separator() except Exception as e: if pil.verbose: logger.plain(json.dumps(e)) if str(e).startswith("unsupported pickle protocol"): logger.warn( "This cookie file is not compatible with Python {}.".format( sys.version.split(' ')[0][0])) logger.warn( "Please delete your cookie file '{}.json' and try again.". format(username)) else: logger.separator() logger.error('Unexpected exception: {:s}'.format(e)) logger.separator() except KeyboardInterrupt: logger.separator() logger.warn("The user authentication has been aborted.") logger.separator() if ig_api: logger.info('Successfully logged into account: {:s}'.format( str(ig_api.authenticated_user_name))) if pil.show_cookie_expiry and not force_use_login_args: try: cookie_expiry = ig_api.cookie_jar.auth_expires logger.info('Cookie file expiry date: {:s}'.format( datetime.datetime.fromtimestamp(cookie_expiry).strftime( '%Y-%m-%d at %I:%M:%S %p'))) except Exception as e: logger.warn( 'An error occurred while getting the cookie file expiry date: {:s}' .format(str(e))) logger.separator() return ig_api else: return None
def download_replays(): try: try: logger.info('Amount of replays : {:s}'.format( str(len(pil.replays_obj)))) for replay_index, replay_obj in enumerate(pil.replays_obj): bc_dash_manifest = parseString(replay_obj.get( 'dash_manifest')).getElementsByTagName('Period') bc_duration_raw = bc_dash_manifest[0].getAttribute("duration") bc_minutes = (bc_duration_raw.split("H"))[1].split("M")[0] bc_seconds = (( bc_duration_raw.split("M"))[1].split("S")[0]).split('.')[0] logger.info( 'Replay {:s} duration : {:s} minutes and {:s} seconds'. format(str(replay_index + 1), bc_minutes, bc_seconds)) except Exception as e: logger.warn( "An error occurred while getting replay duration information: {:s}" .format(str(e))) logger.separator() logger.info("Downloading replays, press [CTRL+C] to abort.") logger.separator() for replay_index, replay_obj in enumerate(pil.replays_obj): exists = False pil.livestream_obj = replay_obj dl_path_files = os.listdir(pil.dl_path) if pil.verbose: logger.separator() logger.plain("Listing contents of the folder '{}':".format( pil.dl_path)) for dl_path_file in dl_path_files: logger.plain(dl_path_file) logger.separator() logger.separator() for dl_path_file in dl_path_files: if (str(replay_obj.get('id')) in dl_path_file) and ("_replay" in dl_path_file) and ( dl_path_file.endswith(".mp4")): logger.binfo( "Already downloaded a replay with ID '{:s}'.".format( str(replay_obj.get('id')))) exists = True if not exists: current = replay_index + 1 logger.info( "Downloading replay {:s} of {:s} with ID '{:s}'.".format( str(current), str(len(pil.replays_obj)), str(replay_obj.get('id')))) pil.live_folder_path = '{}{}_{}_{}_{}_replay_downloads'.format( pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime) broadcast_downloader = replay.Downloader( mpd=replay_obj.get('dash_manifest'), output_dir=pil.live_folder_path, user_agent=pil.ig_api.user_agent, ffmpeg_binary=pil.ffmpeg_path) if pil.use_locks: helpers.create_lock_folder() replay_mp4_file = '{}{}_{}_{}_{}_replay.mp4'.format( pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime) comments_json_file = '{}{}_{}_{}_{}_replay_comments.json'.format( pil.dl_path, pil.datetime_compat, pil.dl_user, pil.livestream_obj.get('id'), pil.epochtime) pil.comment_thread_worker = threading.Thread( target=get_replay_comments, args=(comments_json_file, )) broadcast_downloader.download( replay_mp4_file, cleartempfiles=pil.clear_temp_files) if pil.clear_temp_files: helpers.remove_temp_folder() if pil.dl_comments: logger.info("Downloading replay comments.") try: get_replay_comments(comments_json_file) except Exception as e: logger.error( 'An error occurred while downloading comments: {:s}' .format(str(e))) logger.info("Finished downloading replay {:s} of {:s}.".format( str(current), str(len(pil.replays_obj)))) helpers.remove_lock() if current != len(pil.replays_obj): logger.separator() logger.separator() logger.info("Finished downloading all available replays.") helpers.remove_lock() except Exception as e: logger.error('Could not save replay: {:s}'.format(str(e))) helpers.remove_lock() except KeyboardInterrupt: logger.separator() logger.binfo('The download has been aborted by the user, exiting.') helpers.remove_temp_folder() helpers.remove_lock()
def run(): ptts.initialize() logging.disable(logging.CRITICAL) config = configparser.ConfigParser() parser = argparse.ArgumentParser( description="You are running PyTikTokScraper {:s} using Python {:s}". format(Constants.SCRIPT_VER, Constants.PYTHON_VER)) parser.add_argument( '-d', '--download', dest='download', type=str, required=False, help="The username (or uid) of the user whose posts you want to save.") parser.add_argument( '-ht', '--hashtag', dest='hashtag', type=str, required=False, help="The hashtag whose posts in the feed you want to save.") parser.add_argument( '-r', '--recent', dest='recent', action='store_true', help="When used, only retrieves the first 10 videos in the user's feed." ) parser.add_argument( '-gf', '--get-following', dest='getfollowing', type=str, required=False, help="When used, retrieves the list of people you're following.") parser.add_argument( '-uid', '--is-uid', dest='isuid', action='store_true', help="When used, treat the download argument as the user ID.") parser.add_argument('-s', '--single', dest='single', type=str, required=False, help="Pass a single video Id to download.") parser.add_argument( '-l', '--livestream', dest='livestream', type=str, required=False, help="Pass an username to download a livestream, if available.") args = parser.parse_args() if validate_inputs(config, args): api.login() if args.download or args.livestream and not args.isuid: try: target_user_json = api.search_user(ptts.tt_target_user) for user in target_user_json.get('user_list'): if user.get('user_info').get( 'unique_id') == ptts.tt_target_user: ptts.tt_target_id = user.get('user_info').get('uid') response_user = api.get_user_info(ptts.tt_target_id) ptts.tt_target_user_liveroomid = response_user.get( 'user').get('room_id') if response_user.get( 'user').get('room_id') > 0 else None video_count = user.get('user_info').get('aweme_count') logger.info( "Found matching user profile with {:d} videos.". format(video_count)) if args.download and video_count < 1: logger.separator() logger.binfo( "This user has no available videos to download." ) logger.separator() sys.exit(0) if not ptts.tt_target_id: raise IndexError except (IndexError, TypeError): logger.error( "No user found matching '{:s}', trying tiktokapi.ga search." .format(ptts.tt_target_user)) logger.separator() try: target_user_json = api.search_user_tta(ptts.tt_target_user) if target_user_json: for user in target_user_json.get('user_list'): if user.get('user_info').get( 'unique_id') == ptts.tt_target_user: open("usersearch.json", "w").write( json.dumps(user.get("user_info"))) ptts.tt_target_id = user.get('user_info').get( 'uid') ptts.tt_target_user_liveroomid = user.get( 'user_info' ).get('room_id') if user.get('user_info').get( 'room_id') > 0 else None video_count = user.get('user_info').get( 'aweme_count') logger.info( "Found matching user profile with {:d} videos." .format(video_count)) if args.download and video_count < 1: logger.separator() logger.binfo( "This user has no available videos to download." ) logger.separator() sys.exit(0) if not ptts.tt_target_id: raise IndexError else: raise IndexError except (IndexError, TypeError): logger.error( "No results on tiktokapi.ga either, the script will now exit." .format(ptts.tt_target_user)) logger.separator() sys.exit(0) elif args.download and args.isuid: ptts.tt_target_id = args.download try: int(ptts.tt_target_id) except ValueError: logger.error( "The user ID '{}' is not a valid value. Exiting.".format( ptts.tt_target_id)) logger.separator() sys.exit(1) elif args.livestream and args.isuid: ptts.tt_target_id = args.livestream try: int(ptts.tt_target_id) except ValueError: logger.error( "The user ID '{}' is not a valid value. Exiting.".format( ptts.tt_target_id)) logger.separator() sys.exit(1) response_user = api.get_user_info(ptts.tt_target_id) ptts.tt_target_user_liveroomid = response_user.get('user').get( 'room_id' ) if response_user.get('user').get('room_id') > 0 else None if ptts.tt_target_id: logger.info("Retrieved user ID: {:s}".format(ptts.tt_target_id)) logger.separator() if args.getfollowing: logger.info("Retrieving list of following users...") logger.warn( "Pagination does not work properly, use this at own risk!") logger.separator() json_resp = api.get_following(ptts.tt_target_id) following_txt = os.path.join( os.getcwd(), "following_{:s}.txt".format(ptts.tt_target_user)) if os.path.isfile(following_txt): os.remove(following_txt) for user in json_resp.get('followings'): user_text = user.get('unique_id') + " - " + user.get('uid') logger.plain(user_text) open(following_txt, 'a').write(user_text + '\n') logger.separator() logger.info("Written {:d} users to {:s}".format( len(json_resp.get('followings')), following_txt)) logger.separator() if ptts.args.download: logger.info("Starting download of all videos from profile.") downloader.download_all(ptts.tt_target_id) if ptts.args.hashtag: logger.info( "Starting download of all posts from hashtag '{:s}'.".format( ptts.tt_target_hashtag)) downloader.download_hashtag(ptts.tt_target_hashtag) if ptts.args.livestream: if ptts.tt_target_user_liveroomid: logger.info("Starting download for livestream.") downloader.download_live(ptts.tt_target_user_liveroomid) else: logger.warn("There currently no ongoing livestream available.") logger.separator()