async def run(self, url=None, website_pid=None): if url and not website_pid: website_pid = self.get_pid(url) nextPageToken = '' async with aiohttp.ClientSession() as session: while True: for key in Config.YOUTUBE_API_KEYS.split(","): if nextPageToken: api_url = f"https://www.googleapis.com/youtube/v3/playlistItems?pageToken={nextPageToken}&part=snippet&maxResults=50&playlistId={website_pid}&key={key}" else: api_url = f"https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&maxResults=50&playlistId={website_pid}&key={key}" async with session.get(api_url) as resp: if resp.status == 200: apirespond = await resp.text() break else: log_ne(op='youtube_playlist_run_async', level='WARN', obj={ 'msg': 'FETCH_FAILED', 'key': key, 'resp': apirespond.content, 'url': api_url }) ret = loads(apirespond) for item in ret['items']: video_id = item['snippet']['resourceId']['videoId'] yield f"https://www.youtube.com/watch?v={video_id}" if 'nextPageToken' in ret: nextPageToken = ret['nextPageToken'] continue else: break
async def get_metadata(self, url=None): ret = self.URL_MATCH.search(url) uid, fid = ret.groups() page = 1 api_url = f"https://api.bilibili.com/medialist/gateway/base/spaceDetail?media_id={fid}&pn={page}&ps=1&keyword=&order=mtime&type=0&tid=0&jsonp=jsonp" apirespond = None async with aiohttp.ClientSession() as session: async with session.get(api_url) as resp: if resp.status == 200: apirespond = await resp.text() else: log_ne(op='bilibili_playlist_run_async', level='WARN', obj={ 'msg': 'FETCH_FAILED', 'fid': fid, 'uid': uid, 'playlist_url': url, 'resp': apirespond.content, 'url': api_url }) raise Exception('failed to fetch playlist') resp_obj = loads(apirespond) return { "desc": "Playlist created from " + url + "\nCreated at " + str(datetime.now()), "title": resp_obj['data']['info']['title'] }
def run(self, content, xpath, link): if 'youtube.com' in link: vidid = link[link.rfind('=') + 1:] elif 'youtu.be' in link: if 'watch?v=' in link: vidid = link[link.rfind('=') + 1:] else: vidid = link[link.rfind('/') + 1:] for key in Config.YOUTUBE_API_KEYS.split(","): api_url = "https://www.googleapis.com/youtube/v3/videos?id=" + vidid + "&key=" + key + "&part=snippet,contentDetails,statistics,status" apirespond = requests.get(api_url) # 得到api响应 if apirespond.status_code == 200: break else: log_ne(op='youtube_run', level='WARN', obj={ 'msg': 'FETCH_FAILED', 'key': key, 'resp': apirespond.content, 'url': api_url }) player_response = apirespond.json() player_response = player_response['items'][0] player_response = player_response['snippet'] publishedAt_time = player_response['publishedAt'] uploadDate = parse(publishedAt_time).astimezone( timezone.utc) #上传时间 格式:2019-04-27 04:58:45+00:00 title = player_response['title'] #标题 desc = player_response['description'] #描述 thumbnailsurl0 = player_response['thumbnails'] thumbnailsurl1 = thumbnailsurl0['medium'] thumbnailURL = thumbnailsurl1['url'] #缩略图url size:320 180 utags = player_response['tags'] if 'tags' in player_response else [] return makeResponseSuccess({ 'thumbnailURL': thumbnailURL, 'title': title, 'desc': desc, 'site': 'youtube', 'uploadDate': uploadDate, "unique_id": "youtube:%s" % vidid, "user_space_urls": [ f"https://www.youtube.com/channel/{player_response['channelId']}" ], "utags": utags })
async def run_async(self, content, xpath, link, update_video_detail): if 'youtube.com' in link: vidid = link[link.rfind('=') + 1:] elif 'youtu.be' in link: if 'watch?v=' in link: vidid = link[link.rfind('=') + 1:] else: vidid = link[link.rfind('/') + 1:] keys = Config.YOUTUBE_API_KEYS.split(",") while keys: key = random.choice(keys) api_url = "https://www.googleapis.com/youtube/v3/videos?id=" + vidid + "&key=" + key + "&part=snippet,contentDetails,statistics,status" async with aiohttp.ClientSession() as session: async with session.get(api_url, headers=self.HEADERS_NO_UTF8) as resp: apirespond = await resp.text() if resp.status == 200: break else: log_ne(op='youtube_run_async', level='WARN', obj={ 'msg': 'FETCH_FAILED', 'key': key, 'resp': apirespond, 'url': api_url }) keys.remove(key) player_response = loads(apirespond) player_response = player_response['items'][0] player_response = player_response['snippet'] publishedAt_time = player_response['publishedAt'] uploadDate = parse(publishedAt_time).astimezone(timezone.utc) title = player_response['title'] desc = player_response['description'] thumbnailsurl0 = player_response['thumbnails'] thumbnailsurl1 = thumbnailsurl0['medium'] thumbnailURL = thumbnailsurl1['url'] utags = player_response['tags'] if 'tags' in player_response else [] return makeResponseSuccess({ 'thumbnailURL': thumbnailURL, 'title': title, 'desc': desc, 'site': 'youtube', 'uploadDate': uploadDate, "unique_id": "youtube:%s" % vidid, "utags": utags })
async def run(self, url = None, website_pid = None) : if website_pid and not url : url = f"https://www.nicovideo.jp/mylist/{website_pid}" async with aiohttp.ClientSession() as session: async with session.get(url) as resp: if resp.status == 200 : web_content = await resp.text() else : log_ne(op = 'nicovideo_mylist_run', level = 'WARN', obj = {'msg': 'FETCH_FAILED', 'url': url, 'resp': web_content.content}) raise Exception('failed to fetch playlist') ret = self.DATA_MATCH_CONTENT.search(web_content) data_json_str, = ret.groups() data_json = loads(data_json_str) for item in data_json : yield item['item_data']['video_id']
async def get_metadata(self, url = None) : async with aiohttp.ClientSession() as session: async with session.get(url) as resp: if resp.status == 200 : web_content = await resp.text() else : log_ne(op = 'nicovideo_mylist_get_metadata', level = 'WARN', obj = {'msg': 'FETCH_FAILED', 'url': url, 'resp': web_content.content}) raise Exception('failed to fetch playlist') ret = self.DATA_MATCH_META.search(web_content) meta_json_str, = ret.groups() meta_json_str = meta_json_str.replace('\t', ' ') meta_json = yaml.safe_load(meta_json_str) if not meta_json["description"] : meta_json["description"] = f'Playlist from from {url}\nat {str(datetime.now())}' return { "desc": meta_json["description"], "title": meta_json["name"] }
async def run(self, url=None, website_pid=None): if website_pid and not url: url = f"https://nvapi.nicovideo.jp/v2/mylists/{website_pid}?pageSize=1000&page=1" async with aiohttp.ClientSession() as session: async with session.get(url, headers=self.headers) as resp: if resp.status == 200: web_content = await resp.text() else: web_content = await resp.text() log_ne(op='nicovideo_mylist_run', level='WARN', obj={ 'msg': 'FETCH_FAILED', 'url': url, 'resp': web_content.content }) raise Exception('failed to fetch playlist') data_json = loads(web_content) for item in data_json['data']['mylist']['items']: yield item['watchId']
async def get_metadata(self, url=None): if not url: return {"desc": "not available", "title": "not available"} website_pid = self.get_pid(self, url) api_url = f"https://nvapi.nicovideo.jp/v2/mylists/{website_pid}?pageSize=1&page=1" async with aiohttp.ClientSession() as session: async with session.get(api_url, headers=self.headers) as resp: if resp.status == 200: web_content = await resp.text() else: web_content = await resp.text() log_ne(op='nicovideo_mylist_get_metadata', level='WARN', obj={ 'msg': 'FETCH_FAILED', 'url': url, 'resp': web_content.content }) raise Exception('failed to fetch playlist') meta_json = loads(web_content)['data']['mylist'] if not meta_json["description"]: meta_json[ "description"] = f'Playlist from from {url}\nat {str(datetime.now())}' return {"desc": meta_json["description"], "title": meta_json["name"]}