def episodes_listing(self, url): html = client.request(url) div = client.parseDOM(html, 'div', attrs={'class': 'row listrow list2 ?'})[0] listing = client.parseDOM(div, 'div', attrs={'class': '.+?list-item color.+?'}) for item in listing: title = client.parseDOM(item, 'h3')[0].replace('<br/>', ' ').replace('<br>', ' ') image = client.parseDOM(item, 'img', ret='src')[0] url = ''.join( [self.base_link, client.parseDOM(item, 'a', ret='href')[0]]) self.list.append({'title': title, 'url': url, 'image': image}) return self.list
def episodes_listing(self, url): html = client.request(url) div = client.parseDOM(html, 'div', attrs={'class': 'row listrow list2 ?'})[0] listing = [i.text for i in itertags(div, 'div')] for item in listing: try: title = client.parseDOM(item, 'h3')[0].replace('<br/>', ' ').replace( '<br>', ' ') except Exception: continue image = client.parseDOM(item, 'img', ret='src')[0] url = ''.join( [self.base_link, client.parseDOM(item, 'a', ret='href')[0]]) self.list.append({'title': title, 'url': url, 'image': image}) return self.list
def ert(url): from resources.lib.modules.helpers import geo_loc from resources.lib.modules.constants import yt_url html = client.request(url) iframes = client.parseDOM(html, 'iframe', ret='src') try: if geo_loc() == 'Greece' and 'HLSLink' in html: raise IndexError elif geo_loc() != 'Greece': result = iframes[0] else: result = iframes[-1] if not result: raise IndexError except IndexError: result = client.parseDOM(html, 'script', attrs={'type': 'text/javascript'})[0] result = re.search(r'HLSLink = \'(.+?)\'', result).group(1) return result vid = result.rpartition('/')[2][:11] video = yt_url + vid return video
def event_list(self, url): html = client.request(url) items = client.parseDOM(html, 'div', attrs={'style': 'margin-bottom: 10px'}) for item in items: title = client.parseDOM(item, 'a', attrs={'class': 'btn btn-default'})[0] image = client.parseDOM( html, 'img', attrs={'class': 'thumbnail img-responsive pull-right'}, ret='src')[0] image = urljoin(GM_BASE, image) link = client.parseDOM(item, 'a', attrs={'class': 'btn btn-default'}, ret='href')[0] link = urljoin(GM_BASE, link) plot = client.parseDOM(item, 'span', attrs={'class': 'pull-right'})[0] self.list.append({ 'title': title, 'url': link, 'plot': plot, 'image': image }) return self.list
def news_episodes_listing(self, query): threads = [] for i in list(range(1, 101)): threads.append(workers.Thread(self.thread, i, self.newsgr_link_ajax.format(page=str(i), category=query))) self.data.append('') [i.start() for i in threads] [i.join() for i in threads] html = '\n'.join(self.data) items = client.parseDOM(html, 'div', attrs={'class': 'newsItem'}) for item in items: label = client.replaceHTMLCodes(client.parseDOM(item, 'a')[1]) title = u'[CR]'.join([label, client.parseDOM(item, 'time')[0]]) image = client.parseDOM(item, 'img', ret='src')[0] url = client.parseDOM(item, 'a', ret='href')[1] self.list.append({'title': title, 'image': image, 'url': url}) return self.list
def gm_sports(self): if CACHE_DEBUG: html = root(SPORTS) else: html = cache.get(root, 48, SPORTS) options = re.compile('(<option value.+?</option>)', re.U).findall(html) icons = [ 'https://www.shareicon.net/data/256x256/2015/11/08/157712_sport_512x512.png', 'https://www.shareicon.net/data/256x256/2015/12/07/196797_ball_256x256.png' ] items = zip(options, icons) for item, image in items: title = client.parseDOM(item, 'option')[0] url = client.parseDOM(item, 'option', ret='value')[0] url = client.replaceHTMLCodes(url) index = urljoin(GM_BASE, url) data = { 'title': title, 'action': 'listing', 'url': index, 'image': image } self.list.append(data) directory.add(self.list)
def pod_listing(self, url): html = client.request(url) listing = client.parseDOM( html, 'div', attrs={'class': 'row border-bottom pt-4 m-0 show-item'}) nexturl = re.sub(r'\d(?!\d)', lambda x: str(int(x.group(0)) + 1), url) for item in listing: title = client.parseDOM(item, 'h3')[0].replace(''', '\'') image = ''.join( [self.radio_base, client.parseDOM(item, 'img', ret='src')[0]]) url = ''.join( [self.radio_base, client.parseDOM(item, 'a', ret='href')[0]]) self.list.append({ 'title': title, 'image': image, 'url': url, 'nextaction': 'podcasts', 'next': nexturl, 'nextlabel': 32500 }) return self.list
def pod_episodes(self, url): html = client.request(url) select = client.parseDOM(html, 'div', attrs={'class': 'col-8 col-sm-4 p-0'})[0] image = re.search(r'background-image: url\("(.+?)"\)', html).group(1) pods = re.findall(r'(<option.+?option>)', select, re.S) for pod in pods: date = re.search(r'(\d{2}/\d{2}/\d{4})', pod).group(1) title = ' - '.join([ client.parseDOM(html, 'h2', attrs={'class': 'mb-3.+?'})[0], date ]) url = ''.join([ self.radio_base, re.search(r'data-url = "([\w\-/]+)"', pod).group(1) ]) self.list.append({'title': title, 'image': image, 'url': url}) return self.list
def resolve(self, url): try: id = url.split('?v=')[-1].split('/')[-1].split('?')[0].split( '&')[0] result = client.request('http://www.youtube.com/watch?v=%s' % id) message = client.parseDOM(result, 'div', attrs={'id': 'unavailable-submessage'}) message = ''.join(message) alert = client.parseDOM(result, 'div', attrs={'id': 'watch7-notification-area'}) if len(alert) > 0: raise Exception if re.search('[a-zA-Z]', message): raise Exception url = self.play_link.format(id) return url except Exception: return
def _cartoon_various(self, url): if url is None: url = '{0}/genre/gamato/'.format(GK_BASE) html = client.request(url) next_link = client.parseDOM(html, 'a', attrs={'class': 'arrow_pag'}, ret='href')[-1] html = client.parseDOM(html, 'div', attrs={'class': 'items'})[0] items = client.parseDOM(html, 'article', attrs={'id': r'post-\d+'}) for item in items: h3 = client.parseDOM(item, 'h3')[0] title = client.parseDOM(h3, 'a')[0] title = client.replaceHTMLCodes(title) url = client.parseDOM(h3, 'a', ret='href')[0] meta = client.parseDOM(item, 'div', attrs={'class': 'metadata'})[0] try: span = client.parseDOM(meta, 'span') etos = [s for s in span if len(s) == 4][0] plot = client.parseDOM(item, 'div', attrs={'class': 'texto'})[0] duration = [s for s in span if s.endswith('min')][0] duration = int(re.search(r'(\d+)', duration).group(1)) * 60 except IndexError: plot = u'Μεταγλωτισμένο' etos = '2022' duration = 3600 year = ''.join(['(', etos, ')']) label = ' '.join([title, year]) image = client.parseDOM(item, 'img', ret='data-lazy-src')[0] i = { 'title': label, 'url': url, 'image': image, 'nextlabel': 30334, 'next': next_link, 'plot': plot, 'duration': duration, 'year': int(etos), 'nexticon': iconname('next') } self.list.append(i) return self.list
def items_list(self, url): page = url result = client.request(page) try: if "contentContainer_totalpages" in result: totalPages = int( re.search(r'contentContainer_totalpages = (\d+);', result).group(1)) seriesId = re.search(r'/templates/data/morevideos\?aid=(\d+)', result).group(1) items = [] threads = [] for i in list(range(1, totalPages + 1)): threads.append( workers.Thread( self.thread, self.more_videos + seriesId + "&p=" + str(i), i - 1)) self.data.append('') [i.start() for i in threads] [i.join() for i in threads] for i in self.data: items.extend(client.parseDOM(i, "article")) else: items = client.parseDOM(result, "article") except: pass for item in items: try: title = client.parseDOM(item, "h2")[0] title = client.replaceHTMLCodes(title) title = title.encode('utf-8') link = client.parseDOM(item, "a", ret="href")[0] if re.match(r'/.+/(\d+)/.+', link) is not None: episodeId = re.search(r'/.+/(\d+)/.+', link).group(1) episodeJSON = client.request(self.episodes_link + episodeId) episodeJSON = json.loads(episodeJSON) url = episodeJSON['url'] url = client.replaceHTMLCodes(url) url = url.encode('utf-8') else: url = self.base_link + link + '/videos' image = client.parseDOM(item, "img", ret="src")[0] image = client.replaceHTMLCodes(image) image = image.encode('utf-8') self.list.append({'title': title, 'url': url, 'image': image}) except: pass return self.list
def generic_listing(self, url): html = client.request(url) if url == self.news_link: new = 'row m-0 listrow new-videos' new_items = 'col-12 pl-0 pr-0 list1 list-item color_enimerosi' archive = 'row m-0 listrow s234 ' archived_items = 'col-12 pl-0 pr-0 list1 list-item color_enimerosi' elif url == self.entertainment_link: new = 'row listrow list2 ' new_items = 'd-none d-md-block col-md-4 listimg color_psuchagogia' archive = 'row listrow list2 s234 ' archived_items = 'd-none d-md-block col-md-3 listimg color_psuchagogia' else: new = 'row listrow list2 ' new_items = 'd-none d-md-block col-md-4 listimg color_seires' archive = 'row listrow list2 s234 ' archived_items = 'd-none d-md-block col-md-3 listimg color_seires' div = client.parseDOM(html, 'div', attrs={'class': new})[0] listing = client.parseDOM(div, 'div', attrs={'class': new_items}) for item in listing: title = client.parseDOM(item, 'h3')[0] image = client.parseDOM(item, 'img', ret='src')[0] url = ''.join( [self.base_link, client.parseDOM(item, 'a', ret='href')[0]]) self.list.append({'title': title, 'url': url, 'image': image}) if 's234' in html: div = client.parseDOM(html, 'div', attrs={'class': archive})[0] items = client.parseDOM(div, 'div', attrs={'class': archived_items}) for item in items: title = ' - '.join( [client.parseDOM(item, 'h3')[0], control.lang(32013)]) image = client.parseDOM(item, 'img', ret='src')[0] url = ''.join([ self.base_link, client.parseDOM(item, 'a', ret='href')[0] ]) self.list.append({'title': title, 'url': url, 'image': image}) return self.list
def music_list(self, url): html = client.request(url) try: html = html.decode('utf-8') except Exception: pass if 'albumlist' in html: artist = [client.parseDOM(html, 'h4')[0].partition(' <a')[0]] else: artist = None if control.setting('audio_only') == 'true' and control.condVisibility('Window.IsVisible(music)') and artist is not None: artist = ''.join(artist) if 'songlist' in html: songlist = client.parseDOM(html, 'div', attrs={'class': 'songlist'})[0] items = client.parseDOM(songlist, 'li') elif 'albumlist' in html: albumlist = client.parseDOM(html, 'div', attrs={'class': 'albumlist'})[0] items = client.parseDOM(albumlist, 'li') else: artistlist = client.parseDOM(html, 'div', attrs={'class': 'artistlist'})[0] items = client.parseDOM(artistlist, 'li') if 'icon/music' in html: icon = client.parseDOM(html, 'img', attrs={'class': 'img-responsive'}, ret='src')[-1] icon = urljoin(gm.GM_BASE, icon) else: icon = iconname('music') for item in items: title = client.parseDOM(item, 'a')[0] link = client.parseDOM(item, 'a', ret='href')[0] link = urljoin(gm.GM_BASE, link) if 'gapi.client.setApiKey' in html: if CACHE_DEBUG: link = gm.source_maker(url)['links'][0] else: link = cache.get(gm.source_maker, 48, url)['links'][0] data = {'title': title, 'url': link, 'image': icon} if artist: data.update({'artist': artist}) self.list.append(data) return self.list
def main_menu(): xml = client.request('http://s135598769.onlinehome.us/mgtv.xml') mgtv = client.parseDOM(xml, 'title')[0] livetv_url = 'http://94.130.180.175:8081/live/greektv/playlist.m3u8' mgr = client.parseDOM(xml, 'title')[1] radio_url = 'http://94.130.180.175:8000/live' center_ville_url = 'http://mediacast.b2b2c.ca:8010/' menu = [{ 'title': mgtv.replace('Live', control.lang(30004)), 'action': 'play', 'url': livetv_url, 'icon': 'livetv.png', 'isFolder': 'False' }, { 'title': mgr.replace('Live', control.lang(30004)), 'action': 'play', 'url': radio_url, 'icon': 'radio.png', 'isFolder': 'False' }, { 'title': u'Montreal Greek TV - {0}'.format(control.lang(30001)), 'action': 'youtube', 'icon': 'youtube.png' }, { 'title': u'Radio Centre-Ville - Live', 'action': 'play', 'url': center_ville_url, 'icon': 'Radio_Centre-ville-live.png', 'isFolder': 'False' }, { 'title': control.lang(30007), 'action': 'play', 'url': 'broadcasts', 'icon': 'center_ville.png', 'isFolder': 'False' }, { 'title': u'Radio Centre-Ville - {0}'.format(control.lang(30005)), 'action': 'audio_addon', 'icon': 'pod_icon.png', 'fanart': 'pod_fanart.jpg' }, { 'title': control.lang(30002), 'action': 'news_addon', 'icon': 'newspaper_icon.png', 'fanart': 'xronika_fanart.png' }] for item in menu: cache_clear = {'title': 30006, 'query': {'action': 'cache_clear'}} item.update({'cm': [cache_clear]}) directory.add(menu)
def remote(url): if ('pastebin' in url or 'hastebin' in url or 'osmc.tv' in url) and not 'raw' in url: address = re.sub(r'(^.+?\.(?:com|tv)/)(\w+)', r'\1raw/\2', url) elif 'debian' in url and not 'plain' in url: address = re.sub(r'(^.+?\.net/)(\w+)', r'\1plain/\2', url) else: address = url if 'ubuntu' in url and not 'plain' in url: html = client.request(address) text = client.parseDOM(html, 'pre')[1] text = client.replaceHTMLCodes(text) else: text = client.request(address) if not text: return text = text.strip('\r\n') if len(text.splitlines()) in (3, 4): keys = text.splitlines() elif text.startswith('<?xml'): keys = [ client.parseDOM(text, 'id')[0], client.parseDOM(text, 'api_key')[0], client.parseDOM(text, 'secret')[0] ] elif address.endswith('.json') or 'installed' in text: payload = json.loads(text) if 'installed' in payload: payload = payload['installed'] if 'api_key' not in payload: control.okDialog(heading='Youtube Setup', line1=control.lang(30023)) api_key = control.inputDialog() if not api_key: return else: api_key = payload['api_key'] keys = [payload['client_id'], api_key, payload['client_secret']] else: keys = None else: keys = None return keys
def video_listing(self, url): html = client.request(url) try: nexturl = ''.join([ self.old_base, '/videos', client.parseDOM(html, 'a', attrs={'rel': 'next'}, ret='href')[0].replace('&', '&') ]) except IndexError: nexturl = None video_list = client.parseDOM(html, 'div', attrs={'class': 'videoItem cell'}, ret='data-video-url') thumbnails = client.parseDOM(html, 'div', attrs={'class': 'videoItem cell'}, ret='data-video-poster') titles = client.parseDOM(html, 'div', attrs={'class': 'videoItem cell'}, ret='data-video-name') dates = client.parseDOM(html, 'div', attrs={'class': 'videoItem cell'}, ret='data-video-date') listing = list(zip(titles, dates, thumbnails, video_list)) for title, date, image, video in listing: title = client.replaceHTMLCodes(title) label = ''.join([title, ' ', '(', date, ')']) self.list.append({ 'title': label, 'image': image, 'url': video, 'next': nexturl, 'nextlabel': 32500, 'nextaction': 'videos' }) return self.list
def _paper_index(link): base_img_url = 'https://image.isu.pub/' html = client.request(link) script = client.parseDOM(html, 'script', attrs={'type': 'application/javascript'})[-2] data = json.loads(script.partition(' = ')[2].rstrip(';')) document = data['document'] total_pages = int(document['pageCount']) menu = [] for page in list(range(1, total_pages + 1)): title = document['title'] + ' - ' + control.lang(30003) + ' ' + str( page) page_img = base_img_url + document[ 'id'] + '/jpg/page_{0}_thumb_large.jpg'.format(str(page)) page_url = base_img_url + document['id'] + '/jpg/page_{0}.jpg'.format( str(page)) data = {'title': title, 'image': page_img, 'url': page_url} menu.append(data) return menu
def broadcasts(): xml = client.request('http://greektimes.ca/feed/psa/') url = client.parseDOM(xml, 'enclosure', ret='url')[0] return url
def cookie(self): try: login = '******' token = client.request(login) token = client.parseDOM(token, 'input', ret='value', attrs={'name': 'csrfmiddlewaretoken'})[0] headers = {'Cookie': 'csrftoken=%s' % token} post = { 'username': self.user, 'password': self.password, 'csrfmiddlewaretoken': token, 'next': '' } post = urllib.urlencode(post) c = client.request(login, post=post, headers=headers, output='cookie') return c except: pass
def cookie(self): try: login = '******' token = client.request(login) token = client.parseDOM(token, 'input', ret='value', attrs={'name': 'csrfmiddlewaretoken'})[0] headers = {'Cookie': 'csrftoken={0}'.format(token)} post = { 'username': self.user, 'password': self.password, 'csrfmiddlewaretoken': token, 'next': '' } post = urlencode(post) c = client.request(login, post=post, headers=headers, output='cookie') return c except Exception as e: log.log('Xsubs.tv failed at cookie function, reason: ' + str(e)) return
def cache(self, url): try: result = client.request(url) result = re.sub(r'[^\x00-\x7F]+', ' ', result) result = zip(client.parseDOM(result, 'series', ret='srsid'), client.parseDOM(result, 'series')) result = [(i[0], cleantitle.get(i[1])) for i in result] return result except: pass
def radio_list(): html = client.request(LIVE_RADIO) items = client.parseDOM(html, 'div', attrs={'data-index': '\d'}) self_list = [] for item in items: title = client.parseDOM(item, 'span')[0] image = client.parseDOM(item, 'img', attrs={'class': 'sc-3izq3s-0 gRHEnj'}, ret='src')[0] url = client.parseDOM(item, 'a', ret='href')[0] self_list.append({'title': title, 'url': url, 'image': image}) return self_list
def _webtv(self): html = client.request(self.webtv_link) web_tv_nav = client.parseDOM(html, 'ul', attrs={'class': 'menu menu--web-tv nav'})[0] items = client.parseDOM(web_tv_nav, 'li')[1:] for item in items: title = client.parseDOM(item, 'a')[0] url = client.parseDOM(item, 'a', ret='href')[0] url = ''.join([self.base_link, url]) self.list.append({'title': title, 'url': url}) return self.list
def gm_debris(link): html = client.request(urljoin(base_link, link)) button = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"})[0] return button
def gm_music(self): html = cache.get(gm.root, 96, gm.music_link) options = re.compile('(<option value=.+?</option>)', re.U).findall(html) for option in options: title = client.parseDOM(option, 'option')[0] link = client.parseDOM(option, 'option', ret='value')[0] link = urljoin(gm.base_link, link) data = {'title': title, 'url': link, 'image': iconname('music'), 'action': 'artist_index'} self.list.append(data) directory.add(self.list, argv=self.argv)
def resolve(self, url): referer = url if '.m3u8' in url or '.mp4' in url or url.startswith('plugin'): return url html = client.request(url) if url == self.live_link_gr: url = client.parseDOM(html, 'div', attrs={'class': 'livePlayer'}, ret='data-liveurl')[0] elif url == self.live_link_cy: url = re.search(r'hls: [\'"](.+?)[\'"]', html).group(1) elif 'cloudskep' in html: url = client.parseDOM(html, 'a', {'class': 'player-play-inline hidden'}, ret='href')[0] signature = client.parseDOM(html, 'footer', {'class': 'footer'}, ret='player-signature') if signature: url = '?wmsAuthSign='.join([url, signature[0]]) else: if 'data-plugin-player' not in html: qs = parse_qs(urlparse(url).query) video_id = qs['vid'][0] year = qs['year'][0] show_id = qs['showId'][0] html = client.request(self.player_query.format(video_id=video_id, show_id=show_id, year=year)) try: object_ = client.parseDOM(html, 'div', attrs={'id': 'Video-1'}, ret='data-plugin-player')[0] except Exception: object_ = client.parseDOM(html, 'div', attrs={'id': 'currentvideourl'}, ret='data-plugin-player')[0] url = json.loads(client.replaceHTMLCodes(object_))['Url'] if len(url) == 11: return self.yt_session(url) return url + user_agents.spoofer(referer=True, ref_str=referer)
def news_index(self, url): html = client.request(url) items = client.parseDOM(html, 'li', attrs={'class': 'dropable.*?'}) for item in items: title = client.parseDOM(item, 'a')[0].strip().capitalize() if title.endswith(u'σ'): title = title[:-1] + u'ς' category = client.parseDOM(item, 'a', ret='data-catid')[0] # url = self.newsgr_link_ajax.format(page='1', category=category) self.list.append({'title': title, 'query': category}) return self.list
def index_cy(self, url): html = client.request(url) items = [i for i in client.parseDOM(html, 'div', attrs={'class': 'box'}) if urlparse(url).path in i] try: next_link = client.parseDOM(html, 'a', attrs={'class': 'pager__link pager__link--next'}, ret='href')[0] next_link = urljoin(url.partition('?')[0], next_link) except Exception: next_link = None for item in items: try: title_field = client.parseDOM(item, 'div', {'class': 'box__overlay-title'})[0] except IndexError: continue title = client.replaceHTMLCodes(client.parseDOM(title_field, 'a')[0]).replace(u'ᵒ', u' μοίρες').strip() subtitle = client.replaceHTMLCodes(client.parseDOM(item, 'div', {'class': 'box__overlay-subtitle'})[0]) label = ' | '.join([title, subtitle]) url = client.parseDOM(title_field, 'a', ret='href')[0] url = urljoin(self.basecy_link, url + '/webtv') image = client.parseDOM(item, 'img', ret='src')[0] data = {'title': label, 'image': image, 'url': url, 'name': title} if next_link: data.update({'next': next_link}) self.list.append(data) return self.list
def event(self, url): html = client.request(url) event_id = client.parseDOM(html, 'div', attrs={'id': 'event_id'})[0] teama_id = client.parseDOM(html, 'div', attrs={'id': 'teama_id'})[0] teamb_id = client.parseDOM(html, 'div', attrs={'id': 'teamb_id'})[0] items = client.request(self.event_link.format(event=event_id, team_a=teama_id, team_b=teamb_id), output='json') videos = [i for i in items if ('Has_Videos' in i and i['Has_Videos']) or ('MediaType' in i and i['MediaType'] == 'video')] for video in videos: title = client.replaceHTMLCodes(video['Title']) try: image = video['ImageLowQuality'] if image: image = ''.join([self.base_link, image]) else: image = control.icon() fanart = video['Image'] if fanart: fanart = ''.join([self.base_link, fanart]) else: fanart = None except KeyError: image = video['Image'] if image: image = ''.join([self.base_link, image]) else: image = control.icon() fanart = None url = ''.join([self.base_link, video['Link']]) data = {'title': title, 'image': image, 'url': url, 'action': 'play', 'isFolder': 'False'} if fanart: data.update({'fanart': fanart}) self.list.append(data) directory.add(self.list)
def stargr(url): """Alternative method""" html = client.request(url) script = client.parseDOM(html, 'script')[5] return re.search(r"'(?P<url>.+?\.m3u8)'", script).group('url')