def recommend_more(): try: pageToken = request.args.get('pageToken', '') playlistId = request.args.get('playlistId', '') topicId = request.args.get('topicId', '') response, page = [], 'ajax/recommended/more' if len(playlistId.strip()) > 0: playlist_tracks = YouTube.getPlaylistInfo(playlistId, 8, pageToken) if playlist_tracks['status'] == True: if playlist_tracks.get('videos') is not None: response.append(playlist_tracks) elif len(topicId.strip()) > 0: options = { 'part': 'snippet', 'maxResults': 8, 'key': 'AIzaSyDkNYRbreB8JHOggvjSznMIuw6TlvkHjGQ', 'regionCode': Repo.getRegion()['countryCode'], 'topicId': topicId, 'pageToken': pageToken } playlist_topics = YouTube.search(options) if playlist_topics['status'] is True: if len(playlist_topics.get('videos', [])) > 0: response.append(playlist_topics) return render('recommended', response=response, page=page) except Exception as e: return render('error', code=500, message=str(e))
def stream(): try: v = request.args.get('v') plist = request.args.get('list') playlist = request.args.get('playlist') if v is not None: video = YouTube.getVideoInfo(v) return render( 'stream', type='video', core=video, json_core=json.dumps(video), relatedToVideoId=v) if video.get('status') == True else render( 'error', code=404, message=video['message']) elif plist is not None: playlist = YouTube.getPlaylistInfo(plist) return render( 'stream', type='playlist', core=playlist, json_core=json.dumps(playlist), relatedToVideoId=playlist['videos'][0]['id']) if playlist.get( 'status', False) == True else render( 'error', code=404, message=playlist['message']) else: return render( 'error', code=900, message= 'Please provide the Video Id for the song to stream, or search any other song.' ) except Exception as e: return render('error', code=500, message=str(e))
def add_comments(user_id, query_id): current_comment_thread = Comment(mongo_curs, query_id) param = request.get_json() api = YouTube(api_key=param['api_key']) list_vid = param['list_vid'] for id_video in list_vid: commentThreads_result = api.get_query('commentThreads', videoId=id_video, part='id, replies, snippet') current_comment_thread.create_comment_for_each(commentThreads_result) # Loop and save while there is content while 'nextPageToken' in commentThreads_result: commentThreads_result = api.get_query( 'commentThreads', videoId=id_video, part='id, replies, snippet', pageToken=commentThreads_result['nextPageToken']) current_comment_thread.create_comment_for_each( commentThreads_result) count_comments = int( mongo_curs.db.comments.find({ 'query_id': query_id }).count()) mongo_curs.db.queries.update_one( {'query_id': query_id}, {'$set': { 'count_comments': count_comments }}) return 'POST REQUEST add_comments IS RECEIVED'
def video_info(): if request.method == 'POST': # specific for 'try it' on / # since it is my own api_key used for now... if request.form.get('api_key_test') is not None: api_key = app.config['api_key_test'] elif 'api_key' in session: api_key = session['api_key'] else: return render_template('explore.html', message=""" <h4><strong class="text-danger">You can try Pytheas but to go further you will need to get an api_key from Google services <br>Please go to <a href="./config" class="btn btn-primary btn-lg active" role="button" aria-pressed="true">Config</a> and follow guidelines</strong></h4> """) id_video = request.form.get('unique_id_video') id_video = YouTube.cleaning_video(id_video) part = ', '.join(request.form.getlist('part')) api = YouTube(api_key=api_key) video_result = api.get_query('videos', id=id_video, part=part) video_result_string = json.dumps(video_result, sort_keys=True, indent=2, separators=(',', ': ')) return render_template('methods/view_results.html', result=video_result, string=video_result_string) return render_template('explore.html')
def youtube(): try: req_data = request.get_json() search = req_data['search'] continued = int(req_data['more']) prevList=[] if continued==1: prevList = list(req_data['prev']) yt = YouTube() yt.NewMain(search,prevList) yturl = list(yt.getLinks()) try : maax = int(req_data['items']) if maax > 0 and maax < len(yturl): yturl=yturl[:maax] except : return {"error" : "Invalid json data. items field compulsory"} lst = {} for url in yturl: dcdr = Decoder(url) lst.update({url : dcdr.getDat()}) data= {"data" : lst} data.update({"search" : search}) data.update({"items" : len(lst)}) return data except Exception as ex: return({"error":ex})
def add_related(user_id, query_id): current_relatedVideos = RelatedVideos(mongo_curs, query_id) param = request.get_json() api = YouTube(api_key=param['api_key']) list_vid = param['list_vid'] for id_video in list_vid: search_results = api.get_query( 'search', part='id,snippet', maxResults=50, relatedToVideoId=id_video, type='video', ) for each in search_results['items']: each.update({'query_id': query_id}) each.update({'videoId': id_video}) mongo_curs.db.relatedVideos.insert_one(each) ## Loop and save while 'nextPageToken' in search_results: search_results = api.get_query( 'search', part='id,snippet', maxResults=50, relatedToVideoId=id_video, type='video', pageToken=search_results['nextPageToken']) if not search_results['items']: return redirect(url_for('manage')) else: for each in search_results['items']: each.update({'query_id': query_id}) each.update({'belongTo': id_video}) mongo_curs.db.relatedVideos.insert_one(each) # add metrics for query in json count_videos = int( mongo_curs.db.relatedVideos.find({ 'query_id': query_id }).count()) mongo_curs.db.queries.update_one( {'query_id': query_id}, {'$set': { 'count_relatedVideos': count_videos }}) # add part as indicator part_value = mongo_curs.db.queries.find_one_or_404({'query_id': query_id}) mongo_curs.db.queries.update_one( {'query_id': query_id}, {'$set': { 'part': part_value['part'] + ", relatedVideos", }}, upsert=False) return 'POST REQUEST add_related IS RECEIVED'
def __init__(self, engine): super(YouTubeWatcher, self).__init__(engine) # db.upsert('youtube', [ # ('channel', 'varchar(64)'), # ('video_id', 'varchar(64)') # ]) self.yt = YouTube() self.start_later(1)
def recommended(): try: limit, response, page = int(request.args.get( 'limit', '0')), [], 'ajax/recommended/more' playlists, countryCode, nextPageToken = [ 'PLMC9KNkIncKvYin_USF1qoJQnIyMAfRxl', 'PLNCA1T91UH31_SnlMecke_9wsbc-5mamS', 'PLYVjGTi85afoMw4yMGHLTB99T8ZTTP0ZP' ], Repo.getRegion()['countryCode'], '' options = { 'part': 'id', 'maxResults': 50, 'order': 'relevance', 'key': 'AIzaSyDkNYRbreB8JHOggvjSznMIuw6TlvkHjGQ', 'channelId': 'UCk8vhgJslhfcLcwS5Q2KADw', 'type': 'playlist' } channel_playlists = YouTube.search(options) if channel_playlists.get('items') is not None: for item in channel_playlists.get('items'): playlistId = item['id'].get('playlistId') if playlistId is not None: playlists.append(playlistId) for playlist in playlists: playlist_tracks = YouTube.getPlaylistInfo(playlist, 8) if playlist_tracks['status'] == True: video = playlist_tracks.get('videos') if video is not None: response.append(playlist_tracks) topics = [{ 'topicId': '/m/04rlf', 'title': 'Music' }, { 'topicId': '/m/02lkt', 'title': 'EDM' }, { 'topicId': '/m/06by7', 'title': 'Rock' }] for topic in topics: options_topic = { 'part': 'snippet', 'maxResults': 8, 'key': 'AIzaSyDkNYRbreB8JHOggvjSznMIuw6TlvkHjGQ', 'regionCode': Repo.getRegion()['countryCode'], 'topicId': topic['topicId'] } playlist_topics = YouTube.search(options_topic) if playlist_topics['status'] == True: playlist_topics.update({'title': topic['title']}) response.append(playlist_topics) return render('recommended', response=response, page=page) except Exception as e: return render('error', code=500, message=str(e))
def youtube(request): """get the youtube page with popular games""" logged_in = False user = request.user if user.is_authenticated(): logged_in = True youtube = YouTube() popular_games = youtube.get_popular_games() context = {'popular_games': popular_games, 'logged_in': logged_in, } return render(request, 'youtube/index.html', context)
def __init__(self, username): config = ConfigParser.ConfigParser() config.read('./config.properties') self.pinterest = Pinterest(username, config) self.googleplus = GooglePlus(username, config) self.youtube = YouTube(username, config) self.twitch = Twitch(username, config) self.vimeo = Vimeo(username, config) self.behance = Behance(username, config) self.instagram = Instagram(username, config) self.twitter = Twitter(username, config) self.github = Github(username, config) self.dict = dict()
def save_threads(youtube: YouTube, da: DataAccess, from_vid: str, dry_run: bool = True): for video in da.gen_all_videos_in_order(from_vid): vid = video["id"] vtitle = video["snippet"]["title"] print() print(f"Processing {vtitle}...") if da.have_comments_for_video(vid): print(f'We\'ve already got comments for "{vtitle}".') print("Skipping...") continue if not dry_run: threads = youtube.get_comment_threads_for_video(vid) with open(os.path.join(ROOT_DIR, "db", "commentThreads", f"{vid}.json"), mode="w") as f: f.write(json.dumps(threads)) else: print("\t(Dry run)") print(f'Threads for "{vtitle}" saved.') print() print("------------------------------------------------------------") # Give a little delay between batches. # - DOS paranoia. sleep(1)
def channel(): if request.method == 'POST': if not 'api_key' in session: return render_template('explore.html', message='api key not set') user_id = session['profil']['id'] query_id = str(uuid4()) query_name = str(request.form.get('query_name')) part = ', '.join(request.form.getlist('part')) list_channel_username = [ YouTube.cleaning_channel(username_or_id) for username_or_id in request.form.getlist('list_username') ] list_channel_id = [ YouTube.cleaning_channel(username_or_id) for username_or_id in request.form.getlist('list_id') ] list_channel = list_channel_username + list_channel_id list_channel_id = list_channel_id[0].splitlines() param = { 'author_id': session['profil']['id'], 'api_key': session['api_key'], 'query_id': query_id, 'query': query_name, 'channel_id': list_channel_id, 'channel_username': list_channel_username, 'part': part, 'maxResults': maxResults, 'kind': 'channelItems', } r_query = requests.post("http://restapp:" + app.config['REST_PORT'] + "/" + user_id + "/add_query/" + query_id, json=param) def send_request(): requests.post("http://restapp:" + app.config['REST_PORT'] + "/" + user_id + "/query/" + query_id + "/add_video/channel", json=param) Thread(target=send_request).start() return render_template('methods/download_process.html')
def channel_info(): if request.method == 'POST': if 'api_key' in session: id_channel = request.form.get('unique_id_channel') id_username = request.form.get('unique_user_channel') part = ', '.join(request.form.getlist('part')) api = YouTube(api_key=session['api_key']) if 'youtube.com/channel/' in id_channel: id_channel = YouTube.cleaning_channel(id_channel) channel_result = api.get_query('channels', id=id_channel, part=part) elif id_username != '': id_username = YouTube.cleaning_channel(id_username) channel_result = api.get_query('channels', forUsername=id_username, part=part) else: channel_result = api.get_query('channels', id=id_channel, part=part) channel_result_string = json.dumps(channel_result, sort_keys=True, indent=2, separators=(',', ': ')) return render_template('methods/view_results.html', result=channel_result, string=channel_result_string) else: return render_template('explore.html', message='api key not set') return render_template('explore.html')
class Funlink(): regexpattern = r':(.+) (?:PRIVMSG) ([\S]+) :.addurl(?: (.+))' def __init__(self): self.Twitter = Twitter("","") self.YouTube = YouTube() def handleInput(self,Matchlist): Source = Matchlist[0] Target = Matchlist[1] Text = Matchlist[2].split() try: URL = tinyurl.create_one(Text[0]) except Exception: PrivMsg(Target,"4Error in 'TINYURL.Modul' >> '" + str(Exception) + "'") return Nick = re.match("(.+?)!", Source).group(1) if (len(Text) >= 2) or (re.search("(?:.+)youtube.com/(?:.+)v=(\w+)",Text[0]) and len(Text) == 1): #Beschreibung mit angegeben x = "[" + Nick + "] " #Zusatzinformation ermitteln, wie [YouTube] [PNG] [TIF] if (re.search("(?:.+)youtube.com/(?:.+)v=(\w+)",Text[0])): x += "[YouTube] " elif (re.search("(\w+).rofl.to",Text[0])): r = re.search("(\w+).rofl.to",Text[0]).group(1) x += "[rofl.to] (" + str(r) +") " elif (re.search("collegehumor.com/(\w+)",Text[0])): r = re.search("collegehumor.com/(\w+)",Text[0]).group(1) x += "[CollegeHumor] (" + str(r) + ")" elif (re.search("newgrounds.com/",Text[0])): x += "[Newsground] " else: try: Tag = re.search("\.(bmp|jpg|gif|img|jp2|jpeg|png|psd|tga|tif|txt)$",Text[0]).group(1) x += "[" + Tag.upper() + "] " except: pass if (len(Text) > 1): x += URL + " " + " ".join(Text[1:]) else: r = re.search("(?:.+)youtube.com/(?:.+)v=([-_\w]+)",Text[0]).group(1) t = self.YouTube.getInfo(r) x += URL + " " + t #Twitter Tweets dürfen nicht länger als 140 Zeichen sein if (len(x) <= 140): self.Twitter.sendTweet(x) PrivMsg(Target,"hinzugefügt! - http://twitter.com/fptlnk","15Funlink:07 ") else: PrivMsg(Target,"Beschreibung zu lang. Max 140 Zeichen. Dein Add war " \ + str(len(x)) + " Zeichen lang.","15Funlink:07 ") else: #Keine Beschreibung PrivMsg(Target,"Die Beschreibung fehlt!","15Funlink:07 ")
def download(youtube: Union[YouTube, None], playlist: Union[Playlist, None], stream: str, path: str) -> None: """ Function to handle download for a video/playlist. Parameters ---------- stream : `str` Selected value of the window stream path: `str` Path where the video will be downloaded """ if not playlist and not youtube: window.s_append('You must search for a video before download!') return if playlist: for video_url in playlist.video_urls: # A try here is needed since it would # stop downloading the playlist after # an exception. try: youtube = YouTube(path=path, url=video_url, window=window) if 'Audio' in stream: youtube.download_audio(title=True) else: youtube.download_video() except Exception as e: window.s_append('An unexpected pytube library error occured,' ' could not download.') print(f'Exception {e}') window.s_append('All downloads finished!') if youtube: if youtube.path != path: youtube.path = path quality = stream[:5].strip() fps = int(stream[-7:-5]) if 'kb' in quality: youtube.download_audio(bitrate=stream) else: youtube.download_video(res=quality, fps=fps) window.s_append(f'{youtube.title} download finished!')
def add_captions(user_id, query_id): current_captions = Caption(mongo_curs, query_id) param = request.get_json() api = YouTube(api_key=param['api_key']) list_vid = param['list_vid'] for id_video in list_vid: worker.logger.debug(id_video) current_captions.create_if_not_exist(id_video) # counting captions current_captions.count_captions() return 'POST REQUEST add_captions IS RECEIVED'
def video(payload=None): if request.method == 'POST': if not 'api_key' in session: return render_template('config.html', message='api key not set') query_id = str(uuid4()) user_id = session['profil']['id'] query_name = str(request.form.get('name_query')) part = ', '.join(request.form.getlist('part')) # come from input_csv if payload: query_name = payload['query'] part = payload['part'] list_videos = payload['videos'] list_videos = [YouTube.cleaning_video(x) for x in list_videos] else: list_videos = request.form.get('list_videos').splitlines() list_videos = [YouTube.cleaning_video(x) for x in list_videos] param = { 'query_id': query_id, 'query': query_name, 'part': part, 'api_key': session['api_key'], 'kind': 'videosList', 'videos': list_videos, } r_query = requests.post("http://restapp:" + app.config['REST_PORT'] + "/" + user_id + "/add_query/" + query_id, json=param) def send_request(): requests.post("http://restapp:" + app.config['REST_PORT'] + "/" + user_id + "/query/" + query_id + "/add_video/videos", json=param) Thread(target=send_request).start() return render_template('methods/download_process.html')
def playlist_info(): if request.method == 'POST': if 'api_key' in session: id_playlist = request.form.get('unique_id_playlist') # remember to make same as for cleanning_ytb if 'youtube.com/watch?v=' in id_playlist: f = furl(id_playlist) id_playlist = f.args['list'] part = ', '.join(request.form.getlist('part')) api = YouTube(api_key=session['api_key']) playlist_info = api.get_query('playlists', id=id_playlist, part=part) playlist_info_string = json.dumps(playlist_info, sort_keys=True, indent=2, separators=(',', ': ')) return render_template('methods/view_results.html', result=playlist_info, string=playlist_info_string) else: return render_template('explore.html', message='api key not set') return render_template('explore.html')
def channel(channelId): try: order = request.args.get('order', 'date') options = { 'part': 'snippet', 'order': order, 'key': 'AIzaSyDkNYRbreB8JHOggvjSznMIuw6TlvkHjGQ', 'regionCode': Repo.getRegion()['countryCode'], 'channelId': channelId, 'type': 'video', 'maxResults': 50 } channel = YouTube.getChannel(channelId) if channel['status'] == True: search = YouTube.search(options) return render( 'channel', search=search, channel=channel, order=order) if search['status'] == True else render( 'error', code=404, message=search['message']) else: return render('error', code=400, message=channel['message']) except Exception as e: return render('error', code=500, message=str(e))
class YouTubeHandler: def __init__(self, app): self.app = app self.yt = YouTube() def __call__(self, params, **kwargs): command = params[0] print(kwargs["handler"].request.body) if command == "search": val = urllib.parse.unquote(params[1]) url = urllib.parse.urlparse(val) queryParams = urllib.parse.parse_qs(url.query) try: js = self.yt.search(url.path, token = queryParams.get("token",[None])[0], type = queryParams.get("type", ["video"])[0]) except Exception as e: return "not ok: " + str(e) items = [{"id": x["id"]["playlistId"] if "playlistId" in x["id"] else x["id"]["videoId"], "title": x["snippet"]["title"], "thumbnail": x["snippet"]["thumbnails"]["medium"]["url"], "published": x["snippet"]["publishedAt"]} for x in js["items"] if x["id"]["kind"] == "youtube#video" or x["id"]["kind"] == "youtube#playlist"] res = {"items": items} if "nextPageToken" in js: res["nextToken"] = js["nextPageToken"] if "prevPageToken" in js: res["prevToken"] = js["prevPageToken"] result = json.dumps(res, indent=1) return result elif command == "play": try: self.app.xbmc.openYoutubeVideo(params[1]) return "ok" except IndexError: return "not ok" elif command == "playlist": try: self.app.xbmc.openYoutubeList(params[1]) self.app.xbmc.StartPlaylist(1) return "ok" except IndexError: return "not ok" return "Not found"
def search( url: str, path: str, ) -> Tuple[Union[YouTube, None], Union[None, Playlist]]: """ Function to handle search for a video/playlist. Parameters ---------- url : `str` Searched video/playlist url. path: `str` Path where the video will be downloaded Returns ------- It can returns a YouTube or Playlist object, or nothing if any exception was raised """ window.s_append(f'Searching for {url}...') youtube = playlist = None if 'playlist?' in url: playlist = Playlist(url=url, window=window) window.s_append('Playlist found! Select a download mode on ' 'the streams below!') window['stream'].update( values=['Video (Max quality)', 'Audio (Max quality)']) else: youtube = YouTube(path=path, url=url, window=window) v = [f'{v.resolution} ({v.fps} FPS)' for v in youtube['videos']] a = [a.abr for a in youtube['audios']] window['stream'].update(values=v + a) window.s_append(f'{len(youtube)} downloadable streams found!' ' Select one below!') return youtube, playlist
def trending(): try: pageToken = request.args.get('pageToken', '') youtube = YouTube.getTrending({ 'regionCode': Repo.getRegion()['countryCode'], 'videoCategoryId': '10', 'pageToken': pageToken }) return render( 'trending', youtube=youtube) if youtube['status'] == True else render( 'error', code=404, message=youtube['message']) except Exception as e: return render('error', code=500, message=str(e))
def save_all_playlist_items(youtube: YouTube, playlist_ids: List[str], dry_run: bool = True): for pid in playlist_ids: print(f"Fetching {pid}") if not dry_run: data = youtube.get_pitems_for_pid(pid) with open(os.path.join(ROOT_DIR, "db", "playlist_items", f"{pid}.json"), mode="w") as f: f.write(json.dumps(data)) else: print("\t(Dry run)") print("Done.") sleep(0.5)
def streamer(): try: v = request.args.get('v') if v is not None: musicUrl = YouTube.musicURL(v) if musicUrl is not False: def generate(): fogg = urllib2.urlopen(musicUrl) data = fogg.read(1024) while data: yield data data = fogg.read(1024) return Response(generate(), mimetype="audio/mpeg") return Response(mimetype="audio/mpeg") except Exception as e: return Response(mimetype="audio/mpeg")
def download(url, tempdir): yt = YouTube() yt.url = url video = None if len(yt.videos) == 0: raise NotFound(url) if len(yt.filter('webm')) > 0: video = yt.filter('webm')[-1] # Best resolution elif len(yt.filter('mp4')) > 0: video = yt.filter('mp4')[-1] # Best resolution else: video = yt.videos[-1] # Best resolution video.download(tempdir) return video.filename
def playlists(): try: pageToken = request.args.get('pageToken', '') options = { 'part': 'snippet', 'maxResults': 50, 'order': 'relevance', 'key': 'AIzaSyDkNYRbreB8JHOggvjSznMIuw6TlvkHjGQ', 'regionCode': Repo.getRegion()['countryCode'], 'channelId': 'UCk8vhgJslhfcLcwS5Q2KADw', 'pageToken': pageToken, 'type': 'playlist' } playlists = YouTube.search(options) return render( 'playlists', playlists=playlists) if playlists['status'] is True else render( 'error', code=404, message=playlists['message']) except Exception as e: return render('error', code=500, message=str(e))
def main(): # Disable OAuthlib's HTTPS verification when running locally. # *DO NOT* leave this option enabled in production. os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1" # Get the API key as a CLI arg. api_key = sys.argv[1] if not api_key: raise Exception("No API key provided.") # Get credentials and create an API client youtube = YouTube(api_key) # Do stuff. da = DataAccess() pitems_dict = da.get_pitems_dict(OTHER_PLAYLIST_IDS) current_vid = "lM28rfsHge0" # save_threads(youtube, da, from_vid=current_vid, dry_run=False) # save_all_playlist_items(youtube, OTHER_PLAYLIST_IDS, dry_run=False) save_all_videos(youtube, pitems_dict, dry_run=False)
def save_all_videos( youtube: YouTube, playlist_item_dict: Dict[str, List], dry_run: bool = True, ): for pid, pitems in playlist_item_dict.items(): print(f"Fetching videos for {pid}") if not dry_run: data = youtube.get_videos_for_pitems(pitems) for video in data: with open( os.path.join(ROOT_DIR, "db", "videos", f"{video['id']}.json"), mode="w", ) as f: f.write(json.dumps(video)) else: print("\t(Dry run)") print("Done.") sleep(0.5)
class SocialMediaApi(object): def __init__(self, username): config = ConfigParser.ConfigParser() config.read('./config.properties') self.pinterest = Pinterest(username, config) self.googleplus = GooglePlus(username, config) self.youtube = YouTube(username, config) self.twitch = Twitch(username, config) self.vimeo = Vimeo(username, config) self.behance = Behance(username, config) self.instagram = Instagram(username, config) self.twitter = Twitter(username, config) self.github = Github(username, config) self.dict = dict() def getAllInfo(self): pinterestcount = self.pinterest.getPinterestInfoForUser() googlepluscount = self.googleplus.getGooglePlusInfoForUser() youtubecount = self.youtube.getYouTubeInfoForUser() twitchcount = self.twitch.getTwitchInfoForUser() vimeocount = self.vimeo.getVimeoInfoForUser() behancecount = self.behance.getBehanceInfoForUser() instagramcount = self.instagram.getInstagramInfoForUser() twittercount = self.twitter.getTwitterInfoForUser() githubcount = self.github.getGitHubInfoForUser() self.dict['GITHUB'] = githubcount self.dict['TWITTER'] = twittercount self.dict['INSTAGRAM'] = instagramcount self.dict['BEHANCE'] = behancecount self.dict['PINTEREST'] = pinterestcount self.dict['GOOGLEPLUS'] = googlepluscount self.dict['YOUTUBE'] = youtubecount self.dict['TWITCH'] = twitchcount self.dict['VIMEO'] = vimeocount def printDict(self): print("\n ********Output********* \n") pprint.pprint(self.dict)
class YouTubeWatcher(Task): def __init__(self, engine): super(YouTubeWatcher, self).__init__(engine) # db.upsert('youtube', [ # ('channel', 'varchar(64)'), # ('video_id', 'varchar(64)') # ]) self.yt = YouTube() self.start_later(1) def run(self): channels = ('DotaCinema', 'WronchiAnimation') for channel in channels: try: videos = self.yt.get_latest(channel) video = videos[0] last_video_id = get_var('youtube.{}.last_id'.format(channel), '') if video.id != last_video_id: set_var('youtube.{}.last_id'.format(channel), video.id) print 'New video: {}'.format(str(video)) self.engine.telegram.sendPhoto( chat_id=configurator.get('CHAT_ID'), photo=video.img, caption=u'{}: {}'.format(video.name, video.url)) # match = db.select('SELECT * FROM youtube WHERE id="{}"'.format(video.id)) except: traceback.print_exc() self.start_later(60)
class YouTubeWatcher(Task): def __init__(self, engine): super(YouTubeWatcher, self).__init__(engine) # db.upsert('youtube', [ # ('channel', 'varchar(64)'), # ('video_id', 'varchar(64)') # ]) self.yt = YouTube() self.start_later(1) def run(self): channels = ('DotaCinema', 'WronchiAnimation') for channel in channels: try: videos = self.yt.get_latest(channel) video = videos[0] last_video_id = get_var('youtube.{}.last_id'.format(channel), '') if video.id != last_video_id: set_var('youtube.{}.last_id'.format(channel), video.id) print 'New video: {}'.format(str(video)) self.engine.telegram.sendPhoto( chat_id=configurator.get('CHAT_ID'), photo=video.img, caption=u'{}: {}'.format(video.name, video.url) ) # match = db.select('SELECT * FROM youtube WHERE id="{}"'.format(video.id)) except: traceback.print_exc() self.start_later(60)
#TODO: melhorar banner usage = f"""usage: python3 {argv[0]} PLAYLIST [pattern] Download albums from youtube/soundcloud and set metadata in mp3 files positional arguments: playlist: youtube/soundcloud playlist url pattern: (optional) pattern to be removed from the track title eg.: python3 {argv[0]} https://www.youtube.com/playlist?list=PLGeJR8ZOrTZdMuBWM9IYta6IoHKku0nH4 'bladee - ' """ if "-h" in argv or "--help" in argv: exit(usage) elif len(argv) < 2: exit("error: invalid argvs\n" + usage) album = argv[1] remove_from_title = "" if len(argv) == 3: remove_from_title = argv[2] if re.search(r"^https?:\/\/(www\.)?youtube\.com/playlist\?list=.*", album) != None: YouTube.get_playlist_info(album, remove_from_title).download() elif re.search( r"^https?:\/\/(www\.)?soundcloud\.com/[a-z 0-9 _ \-]*/sets/[a-z 0-9 _ \-]*", album) != None: SoundCloud.get_set_info(album, remove_from_title).download() else: print("error: invalid url")
def resolve(self, link): domain = urlparse(link).netloc if domain in self.youtube_domains: return YouTube()
def __init__(self): self.Twitter = Twitter("","") self.YouTube = YouTube()
def main(): # The main Noon Pacific index npfile = 'np-200.json' # List of ids of tracks for which no corresponding video exists no_video_file = 'no-youtube-video.json' # Dictionary mapping ids to correct video results wrong_video_file = 'wrong-youtube-video.json' with open(npfile, encoding='utf8') as f: npdata = json.load(f) # list of dicts containing lists of dicts with open(no_video_file) as f: no_video = set(json.load(f)) with open(wrong_video_file) as f: corrected_video_id = json.load(f) # Aggregate all the tracks into one list tracks = [] for mixtape in npdata: tape_tracks = mixtape['tracks'] for track in tape_tracks: track['np_release'] = mixtape['release'] tracks.extend(tape_tracks) # Add youtube plays to each track's data yt = YouTube() for i, track in enumerate(tracks): try: if track['id'] in no_video: # There is no youtube video for this track. continue if track['id'] in corrected_video_id: video_id = corrected_video_id[track['id']] snippet = yt.snippet(video_id) s = snippet['items'][0]['snippet'] else: q = track['artist'] + ' ' + track['title'] result = yt.search_first(q) if not result: # No video found continue s = result['snippet'] video_id = result['id']['videoId'] video_title = s['title'] video_date = s['publishedAt'] video_view_count = yt.view_count(video_id) # Add the new data to the track tracks[i].update({'video_id': video_id, 'video_title': video_title, 'video_date': video_date, 'listens': video_view_count}) print(video_title) # Just for progress monitoring except requests.exceptions.HTTPError as e: # A server-side error from the API # Just eat these errors if they occur. There are too many # songs to analyze to worry about one missing track. print(str(e)) continue # Write out the new extended track data with open('np-200-tracks.json', 'w', encoding='utf8') as f: json.dump(tracks, f, indent=4)
key = os.getenv('KEY') idVideo = os.getenv('ID_VIDEO') idChannel = os.getenv('ID_CHANNEL') client = os.getenv('PAST_CLIENT') category = os.getenv('CATEGORY') refresh = os.getenv('REFRESH') access = os.getenv('ACCESS') scopes = [ "https://www.googleapis.com/auth/youtubepartner", "https://www.googleapis.com/auth/youtube", "https://www.googleapis.com/auth/youtube.force-ssl" ] # YouTube object that will hold details of the video video = YouTube(idVideo) thumbnail_path = "thumbnail_base.png" # Function: main() # The function that will create an API client and use the logic to update the # title and description. def main(): api_service = 'youtube' api_version = 'v3' client_secrets_file = client # Getting credentials and create API client flow = google_auth_oauthlib.flow.InstalledAppFlow.from_client_secrets_file(
def __init__(self, app): self.app = app self.yt = YouTube()
def test_get_request_url_popular_games(self): """test if requeset url is set""" youtube = YouTube() self.assertIsNotNone(youtube.get_request_url_popular_games())
def test_get_popular_games(self): """test if YouTube app can get popular games""" youtube = YouTube() self.assertIsNotNone(youtube.get_popular_games())