def cookie(self): try: login = '******' token = client.request(login) token = client.parseDOM(token, 'input', ret='value', attrs={'name': 'csrfmiddlewaretoken'})[0] headers = {'Cookie': 'csrftoken=%s' % token} post = { 'username': self.user, 'password': self.password, 'csrfmiddlewaretoken': token, 'next': '' } post = urllib.urlencode(post) c = client.request(login, post=post, headers=headers, output='cookie') return c except: pass
def resolve_live(self, lang): try: result = client.request(self.live_link.format(lang)) result = json.loads(result)['url'] if result.startswith('//'): result = 'http:' + result result = client.request(result) if control.setting('backup_live') == 'false': stream = json.loads(result)['primary'] else: stream = json.loads(result)['backup'] if stream.startswith('//'): stream = ''.join(['http:', stream]) if control.setting('quality_live') in ['0', '2']: return stream else: from resources.lib.loader import m3u8_picker return m3u8_picker(stream) except: pass
def district_list(self): try: try: result = client.request(self.district_link).decode('windows-1253') except AttributeError: result = client.request(self.district_link) radios = parseDOM(result, 'td') radios = [r for r in radios if r] except Exception: return for radio in radios: title = parseDOM(radio, 'a')[0] href = parseDOM(radio, 'a', ret='href')[0] html = client.request(href) link = parseDOM(html, 'iframe', ret='src')[0] embed = client.request(link) url = re.search(r'mp3: [\'"](.+?)[\'"]', embed).group(1).replace('https', 'http') image = parseDOM(html, 'img', ret='src')[0] self.list.append( {'title': title, 'image': image, 'url': url} ) return self.list
def resolve_live(self, lang): result = client.request(self.live_link.format(lang), error=True) result = json.loads(result)['url'] if result.startswith('//'): result = 'http:' + result result = client.request(result, error=True) _json = json.loads(result) try: if _json.get('status') == 'ko': control.infoDialog(_json.get('msg').capitalize(), time=5000) log_debug(_json.get('msg').capitalize()) return except Exception: return if control.setting('backup_live') == 'false': stream = _json['primary'] else: stream = _json['backup'] if stream.startswith('//'): stream = ''.join(['http:', stream]) if control.setting('quality_live') in ['0', '2']: return stream else: from resources.lib.loader import m3u8_picker return m3u8_picker(stream)
def gk_source_maker(link): html = client.request(link) urls = client.parseDOM(html, 'tr', attrs={'id': 'link-\d+'}) item_data = client.parseDOM(html, 'div', attrs={'class': 'data'})[0] title = client.parseDOM(item_data, 'h1')[0] year = client.parseDOM(item_data, 'span', attrs={'itemprop': 'dateCreated'})[0] year = re.search(r'(\d{4})', year).group(1) image = client.parseDOM(html, 'img', attrs={'itemprop': 'image'}, ret='src')[0] urls = [u for u in client.parseDOM(urls, 'a', ret='href')] urls = [client.request(u, output='geturl') for u in urls] hosts = [ ''.join([control.lang(30015), urlsplit(url).netloc]) for url in urls ] data = { 'links': list(zip(hosts, urls)), 'title': title, 'year': int(year), 'image': image } return data
def cookie(self): try: login = '******' token = client.request(login) token = client.parseDOM(token, 'input', ret='value', attrs={'name': 'csrfmiddlewaretoken'})[0] headers = {'Cookie': 'csrftoken={0}'.format(token)} post = { 'username': self.user, 'password': self.password, 'csrfmiddlewaretoken': token, 'next': '' } post = urlencode(post) c = client.request(login, post=post, headers=headers, output='cookie') return c except Exception as e: log.log('Xsubs.tv failed at cookie function, reason: ' + str(e)) return
def download(self, path, url): try: cookie = None anonymous = (self.user == '' or self.password == '') code, result = client.request(url, output='response', error=True) if code == '429' and anonymous is True: control.dialog.ok(str('xsubs.tv'), str(result), str('')) return elif anonymous is False: cookie = cache.get(self.cookie, 6) result, headers, content, cookie = client.request( url, cookie=cookie, output='extended') subtitle = content['Content-Disposition'] subtitle = re.findall('"(.+?)"', subtitle)[0] subtitle = os.path.join(path, subtitle.decode('utf-8')) if not subtitle.endswith('.srt'): raise Exception() with open(subtitle, 'wb') as subFile: subFile.write(result) return subtitle except: pass
def items_list(self, url): page = url result = client.request(page) try: if "contentContainer_totalpages" in result: totalPages = int( re.search(r'contentContainer_totalpages = (\d+);', result).group(1)) seriesId = re.search(r'/templates/data/morevideos\?aid=(\d+)', result).group(1) items = [] threads = [] for i in list(range(1, totalPages + 1)): threads.append( workers.Thread( self.thread, self.more_videos + seriesId + "&p=" + str(i), i - 1)) self.data.append('') [i.start() for i in threads] [i.join() for i in threads] for i in self.data: items.extend(client.parseDOM(i, "article")) else: items = client.parseDOM(result, "article") except: pass for item in items: try: title = client.parseDOM(item, "h2")[0] title = client.replaceHTMLCodes(title) title = title.encode('utf-8') link = client.parseDOM(item, "a", ret="href")[0] if re.match(r'/.+/(\d+)/.+', link) is not None: episodeId = re.search(r'/.+/(\d+)/.+', link).group(1) episodeJSON = client.request(self.episodes_link + episodeId) episodeJSON = json.loads(episodeJSON) url = episodeJSON['url'] url = client.replaceHTMLCodes(url) url = url.encode('utf-8') else: url = self.base_link + link + '/videos' image = client.parseDOM(item, "img", ret="src")[0] image = client.replaceHTMLCodes(image) image = image.encode('utf-8') self.list.append({'title': title, 'url': url, 'image': image}) except: pass return self.list
def download(self, path, url): try: cookie = None anonymous = (self.user == '' or self.password == '') code, result = client.request(url, output='response', error=True) if code == '429' and anonymous is True: control.dialog.ok(str('xsubs.tv'), str(result), str('')) return elif anonymous is False: cookie = self.cookie() result, headers, content, cookie = client.request( url, cookie=cookie, output='extended') subtitle = content['Content-Disposition'] subtitle = re.findall('"(.+?)"', subtitle)[0] try: subtitle = subtitle.decode('utf-8') except Exception: pass subtitle = control.join(path, subtitle) if not subtitle.endswith('.srt'): raise Exception() with open(subtitle, 'wb') as subFile: subFile.write(result) fileparts = os_split(subtitle)[1].split('.') result = control.join( os_split(subtitle)[0], 'subtitles.' + fileparts[len(fileparts) - 1]) rename(subtitle, result) return result except Exception as e: _, __, tb = sys.exc_info() print(traceback.print_tb(tb)) log_debug( 'Xsubstv subtitle download failed for the following reason: ' + str(e)) return
def category_list(url): if BASE_API_LINK in url: _json = client.request(url, output='json') list_of_lists = _json['sectionContents'] codename = parse_qs(split(url)[1])['pageCodename'][0] page = _json['pagination']['page'] total_pages = _json['pagination']['totalPages'] else: html = client.request(url) script = [ i for i in client.parseDOM(html, 'script') if 'INITIAL_STATE' in i ][0] script = re.sub(r'var _*?\w+_*? = ', '', script).partition(';</script>')[0] if script.endswith(';'): script = script[:-1] _json = json.loads(script) pages = _json['pages'] list_of_lists = [ i for i in list(pages['sectionsByCodename'].values()) if 'adman' not in i['sectionContentCodename'] ] codename = list(pages.keys())[-1] page = 1 total_pages = pages[codename]['totalPages'] next_url = GET_PAGE_CONTENT.format(page + 1, codename) self_list = [] for list_ in list_of_lists: title = list_['portalName'] section_codename = list_['sectionContentCodename'] if not list_['tilesIds']: continue url = LIST_OF_LISTS_LINK.format( title=quote(section_codename), pagecodename=codename, backurl=codename, sectioncodename=list_['sectionContentCodename']) data = {'title': title, 'url': url} if page < total_pages: data.update({ 'nextaction': 'categories', 'nextlabel': 30500, 'nexticon': control.addonmedia('next.jpg'), 'next': next_url }) self_list.append(data) return self_list
def _top50(self, url): if control.setting('debug') == 'false': playlists = client.request(thgiliwt(url), headers={ 'User-Agent': 'AliveGR, version: ' + control.version() }) else: if control.setting('local_remote') == '0': local = control.setting('top50_local') try: with open(local, encoding='utf-8') as xml: playlists = xml.read() except Exception: with open(local) as xml: playlists = xml.read() elif control.setting('local_remote') == '1': playlists = client.request(control.setting('top50_remote')) else: playlists = client.request(url) self.data = client.parseDOM(playlists, 'item') for item in self.data: title = client.parseDOM(item, 'title')[0] genre = client.parseDOM(item, 'genre')[0] url = client.parseDOM(item, 'url')[0] image = thumb_maker(url.rpartition('=')[2]) plot = client.parseDOM(item, 'description')[0] duration = client.parseDOM(item, 'duration')[0].split(':') duration = (int(duration[0]) * 60) + int(duration[1]) item_data = ({ 'label': title, 'title': title.partition(' - ')[2], 'image': image, 'url': url, 'plot': plot, 'comment': plot, 'duration': duration, 'genre': genre }) self.list.append(item_data) return self.list
def ant1cy(link): """Alternative method""" api_url = 'https://www.ant1.com.cy/ajax.aspx?m=Atcom.Sites.Ant1iwo.Modules.TokenGenerator&videoURL={0}' html = client.request(link) m3u8 = re.findall("'(.+?)'", list(itertags(html.text, 'script'))[-2].text)[1] stream = client.request(api_url.format(m3u8)) return stream + spoofer()
def geo_loc(): json_obj = client.request('https://extreme-ip-lookup.com/json/', output='json') if not json_obj or 'error' in json_obj: json_obj = client.request('https://ip-api.com/json/', output='json') if not json_obj or 'error' in json_obj: json_obj = client.request('https://geoip.siliconweb.com/geo.json', output='json') country = json_obj.get('country', 'Worldwide') return country
def omegacy(link): """ALternative method""" cookie = client.request(link, close=False, output='cookie') html = client.request(link, cookie=cookie) tags = list(itertags(html, 'script')) m3u8 = [i for i in tags if i.text.startswith(u'var playerInstance')][0].text stream = re.findall('"(.+?)"', m3u8)[1] return spoofer(url=stream, referer=True, ref_str=link)
def cached_resolve(url): if RADIO_BASE in url: html = client.request(url) url = re.search(r'"stationAACStream":"(.+?)\?', html).group(1) else: vid = re.search(r'watch/(\d+)/', url).group(1) json_ = client.request(PLAYER_LINK.format(vid), output='json') url = json_['url'] return url
def _playlist(self, url, limit): try: result = client.request(url) result = json.loads(result) items = result['items'] except Exception: log_debug( 'Youtube: Could not fetch items from the cdn, invalid key or no quota left' ) return for i in list(range(1, limit)): try: if not 'nextPageToken' in result: raise Exception next = url + '&pageToken=' + result['nextPageToken'] result = client.request(next) result = json.loads(result) items += result['items'] except Exception: pass for item in items: try: title = item['snippet']['title'] try: title = py2_enc(title) except AttributeError: pass url = item['id'] try: url = py2_enc(url) except AttributeError: pass image = item['snippet']['thumbnails']['high']['url'] if '/default.jpg' in image: raise Exception try: image = py2_enc(image) except AttributeError: pass self.list.append({'title': title, 'url': url, 'image': image}) except Exception: pass return self.list
def geo_loc(): json_obj = client.request('http://extreme-ip-lookup.com/json/') if not json_obj or 'error' in json_obj: json_obj = client.request('http://ip-api.com/json/') if control.setting('geoloc_override') == '0': country = json.loads(json_obj)['country'] return country elif control.setting('geoloc_override') == '1': return 'Greece' else: return 'Worldwide'
def remote(url): if ('pastebin' in url or 'hastebin' in url or 'osmc.tv' in url) and not 'raw' in url: address = re.sub(r'(^.+?\.(?:com|tv)/)(\w+)', r'\1raw/\2', url) elif 'debian' in url and not 'plain' in url: address = re.sub(r'(^.+?\.net/)(\w+)', r'\1plain/\2', url) else: address = url if 'ubuntu' in url and not 'plain' in url: html = client.request(address) text = client.parseDOM(html, 'pre')[1] text = client.replaceHTMLCodes(text) else: text = client.request(address) if not text: return text = text.strip('\r\n') if len(text.splitlines()) in (3, 4): keys = text.splitlines() elif text.startswith('<?xml'): keys = [ client.parseDOM(text, 'id')[0], client.parseDOM(text, 'api_key')[0], client.parseDOM(text, 'secret')[0] ] elif address.endswith('.json') or 'installed' in text: payload = json.loads(text) if 'installed' in payload: payload = payload['installed'] if 'api_key' not in payload: control.okDialog(heading='Youtube Setup', line1=control.lang(30023)) api_key = control.inputDialog() if not api_key: return else: api_key = payload['api_key'] keys = [payload['client_id'], api_key, payload['client_secret']] else: keys = None else: keys = None return keys
def _radio_loop(station): title = parseDOM(station, 'a')[0] href = parseDOM(station, 'a', ret='href')[0] html = client.request(href, as_bytes=True) html = html.decode('windows-1253') link = parseDOM(html, 'iframe', ret='src')[0] embed = client.request(link) url = re.search(r'mp3: [\'"](.+?)[\'"]', embed).group(1).replace('https', 'http') image = parseDOM(html, 'img', ret='src')[0] data = {'title': title, 'image': image, 'url': url} return data
def district_list(): result = client.request(DISTRICT_LINK, as_bytes=True) result = result.decode('windows-1253') _radios = parseDOM(result, 'td') stations = [r for r in _radios if r] self_list = [] with concurrent_futures.ThreadPoolExecutor(5) as executor: threads = [ executor.submit(_radio_loop, station) for station in stations ] for future in concurrent_futures.as_completed(threads): item = future.result() if not item: continue self_list.append(item) return self_list
def reddit_subs(action, sr_name): if action is None: action = 'sub' sleep = True else: sleep = False if sr_name is None: from tulip.bookmarks import get bookmarks = get(file_=saved_subrs) if not bookmarks: return sr_name = ','.join([i['sr_name'] for i in bookmarks]) post_data = {'action': action, 'sr_name': sr_name} result = client.request(base_link() + '/api/subscribe', post=post_data, headers=request_headers(), output='response') if control.setting('debugging.toggle') == 'true': log_debug(result) if action == 'unsub' or sleep: if sleep: control.sleep(200) control.refresh()
def dev(): if control.setting('toggler') == 'false': dwp = control.dialog.input('I hope you know what you\'re doing!', type=control.password_input, option=control.verify) text = client.request(thgiliwt('=' + leved)) if text == dwp: control.setSetting('toggler', 'true') cache.clear(withyes=False) else: import sys control.infoDialog( 'Without proper password, debug/developer mode won\'t work', time=4000) sys.exit() elif control.setting('toggler') == 'true': control.setSetting('toggler', 'false')
def pod_episodes(self, url): html = client.request(url) select = client.parseDOM(html, 'div', attrs={'class': 'col-8 col-sm-4 p-0'})[0] image = re.search(r'background-image: url\("(.+?)"\)', html).group(1) pods = re.findall(r'(<option.+?option>)', select, re.S) for pod in pods: date = re.search(r'(\d{2}/\d{2}/\d{4})', pod).group(1) title = ' - '.join([ client.parseDOM(html, 'h2', attrs={'class': 'mb-3.+?'})[0], date ]) url = ''.join([ self.radio_base, re.search(r'data-url = "([\w\-/]+)"', pod).group(1) ]) self.list.append({'title': title, 'image': image, 'url': url}) return self.list
def event_list(self, url): html = client.request(url) items = client.parseDOM(html, 'div', attrs={'style': 'margin-bottom: 10px'}) for item in items: title = client.parseDOM(item, 'a', attrs={'class': 'btn btn-default'})[0] image = client.parseDOM( html, 'img', attrs={'class': 'thumbnail img-responsive pull-right'}, ret='src')[0] image = urljoin(GM_BASE, image) link = client.parseDOM(item, 'a', attrs={'class': 'btn btn-default'}, ret='href')[0] link = urljoin(GM_BASE, link) plot = client.parseDOM(item, 'span', attrs={'class': 'pull-right'})[0] self.list.append({ 'title': title, 'url': link, 'plot': plot, 'image': image }) return self.list
def blacklister(): result = client.request('https://pastebin.com/raw/eh5pPA6K') kids_urls = [''.join([GM_BASE, i]) for i in evaluate(result)] return kids_urls
def thread(self, url, i): try: result = client.request(url) self.data[i] = result except Exception: return
def pod_listing(self, url): html = client.request(url) listing = client.parseDOM( html, 'div', attrs={'class': 'row border-bottom pt-4 m-0 show-item'}) nexturl = re.sub(r'\d(?!\d)', lambda x: str(int(x.group(0)) + 1), url) for item in listing: title = client.parseDOM(item, 'h3')[0].replace(''', '\'') image = ''.join( [self.radio_base, client.parseDOM(item, 'img', ret='src')[0]]) url = ''.join( [self.radio_base, client.parseDOM(item, 'a', ret='href')[0]]) self.list.append({ 'title': title, 'image': image, 'url': url, 'nextaction': 'podcasts', 'next': nexturl, 'nextlabel': 32500 }) return self.list
def obtain_authorization(_cookie, _uh): data = { 'authorize': 'Allow', 'state': state, 'redirect_uri': redirect_uri, 'response_type': 'code', 'client_id': client_id, 'duration': 'permanent', 'scope': ' '.join(scope), 'uh': _uh } headers = client.request(api_link('authorize'), cookie=_cookie, post=data, redirect=False, output='headers') geturl = dict([ line.partition(': ')[::2] for line in str(headers).splitlines() ]).get('location') token = dict(parse_qsl(urlparse(geturl).query)).get('code') if not token: return get_tokens(code=token)
def episodes_listing(self, url): html = client.request(url) div = client.parseDOM(html, 'div', attrs={'class': 'row listrow list2 ?'})[0] listing = client.parseDOM(div, 'div', attrs={'class': '.+?list-item color.+?'}) for item in listing: title = client.parseDOM(item, 'h3')[0].replace('<br/>', ' ').replace('<br>', ' ') image = client.parseDOM(item, 'img', ret='src')[0] url = ''.join( [self.base_link, client.parseDOM(item, 'a', ret='href')[0]]) self.list.append({'title': title, 'url': url, 'image': image}) return self.list
def index_cy(self, url): html = client.request(url) items = [i for i in client.parseDOM(html, 'div', attrs={'class': 'box'}) if urlparse(url).path in i] try: next_link = client.parseDOM(html, 'a', attrs={'class': 'pager__link pager__link--next'}, ret='href')[0] next_link = urljoin(url.partition('?')[0], next_link) except Exception: next_link = None for item in items: try: title_field = client.parseDOM(item, 'div', {'class': 'box__overlay-title'})[0] except IndexError: continue title = client.replaceHTMLCodes(client.parseDOM(title_field, 'a')[0]).replace(u'ᵒ', u' μοίρες').strip() subtitle = client.replaceHTMLCodes(client.parseDOM(item, 'div', {'class': 'box__overlay-subtitle'})[0]) label = ' | '.join([title, subtitle]) url = client.parseDOM(title_field, 'a', ret='href')[0] url = urljoin(self.basecy_link, url + '/webtv') image = client.parseDOM(item, 'img', ret='src')[0] data = {'title': label, 'image': image, 'url': url, 'name': title} if next_link: data.update({'next': next_link}) self.list.append(data) return self.list