def spoofer(headers=None, _agent=True, age_str=None, referer=False, ref_str='', url=None): if age_str is None: from tulip import cache age_str = cache.get(randomagent, 12) pipe = '|' if not headers: headers = {} if _agent and age_str and not headers: headers.update({'User-Agent': age_str}) if referer and ref_str: headers.update({'Referer': ref_str}) if headers: string = pipe + urlencode(headers) if url: url += string return url else: return string else: return ''
def cookie(self): try: login = '******' token = client.request(login) token = client.parseDOM(token, 'input', ret='value', attrs={'name': 'csrfmiddlewaretoken'})[0] headers = {'Cookie': 'csrftoken={0}'.format(token)} post = { 'username': self.user, 'password': self.password, 'csrfmiddlewaretoken': token, 'next': '' } post = urlencode(post) c = client.request(login, post=post, headers=headers, output='cookie') return c except Exception as e: log.log('Xsubs.tv failed at cookie function, reason: ' + str(e)) return
def videos_list(self, url, lang): try: request = urlencode({'request': self.post_link % (url, lang)}) result = client.request(self.api_link, post=request) result = json.loads(result) items = [] if 'themedetailslist' in result: items = result['themedetailslist'] elif 'programDetailsList' in result: items = result['programDetailsList'] elif 'homelist' in result: items = result['homelist'] except: return for item in items: try: title = item['title'] title = client.replaceHTMLCodes(title) title = title.encode('utf-8') url = str(item['id']) url = url.encode('utf-8') image = self.img2_link % (url, url) image = image.encode('utf-8') self.list.append({'title': title, 'url': url, 'image': image}) except: pass threads = [] for i in list(range(0, len(self.list))): threads.append(workers.Thread(self.list_worker, i, lang)) [i.start() for i in threads] [i.join() for i in threads] self.list = [ i for i in self.list if 'check' in i and not (i['check'] == '' or i['check'] is None) ] return self.list
def resolve(self, url, lang): url = self.resolve_link % url request = urlencode({'request': self.post_link % (url, lang)}) result = client.request(self.api_link, post=request) url = json.loads(result)['articlelist']['videoUri'] return url
def programs_list(self, url, lang): try: request = urlencode({'request': self.post_link % (url, lang)}) result = client.request(self.api_link, post=request) result = json.loads(result) items = result['programs'] except: return for item in items: try: title = item['title'] title = client.replaceHTMLCodes(title) title = title.encode('utf-8') url = str(item['pId']) url = self.program_link % url url = url.encode('utf-8') image = item['img'] image = self.img1_link % image image = image.encode('utf-8') self.list.append({'title': title, 'url': url, 'image': image}) except: pass return self.list
def cookie(self): try: login = ''.join([self.base_link, '/xforum/account/signin/']) token = client.request(login) token = client.parseDOM(token, 'input', ret='value', attrs={'name': 'csrfmiddlewaretoken'})[0] headers = {'Cookie': 'csrftoken={0}'.format(token)} post = { 'username': self.user, 'password': self.password, 'csrfmiddlewaretoken': token, 'next': '' } post = urlencode(post) c = client.request(login, post=post, headers=headers, output='cookie') return c except Exception as e: _, __, tb = sys.exc_info() print(traceback.print_tb(tb)) log_debug('Xsubs.tv failed at cookie function, reason: ' + str(e)) return
def run(self, query=None): if 'Greek' not in str(self.langs).split(','): control.directory(self.syshandle) control.infoDialog(control.lang(30002)) return dup_removal = False if not query: title = match_title = control.infoLabel( '{0}.Title'.format(infolabel_prefix)) with concurrent_futures.ThreadPoolExecutor(5) as executor: if re.search(r'[^\x00-\x7F]+', title) is not None: title = control.infoLabel( '{0}.OriginalTitle'.format(infolabel_prefix)) title = unicodedata.normalize('NFKD', title).encode('ascii', 'ignore') title = py3_dec(title) year = control.infoLabel('{0}.Year'.format(infolabel_prefix)) tvshowtitle = control.infoLabel( '{0}.TVshowtitle'.format(infolabel_prefix)) season = control.infoLabel( '{0}.Season'.format(infolabel_prefix)) if len(season) == 1: season = '0' + season episode = control.infoLabel( '{0}.Episode'.format(infolabel_prefix)) if len(episode) == 1: episode = '0' + episode if 's' in episode.lower(): season, episode = '0', episode[-1:] if tvshowtitle != '': # episode title_query = '{0} {1}'.format(tvshowtitle, title) season_episode_query = '{0} S{1} E{2}'.format( tvshowtitle, season, episode) season_episode_query_nospace = '{0} S{1}E{2}'.format( tvshowtitle, season, episode) threads = [ executor.submit(self.subtitlesgr, season_episode_query_nospace), executor.submit(self.xsubstv, season_episode_query), executor.submit(self.podnapisi, season_episode_query), executor.submit(self.vipsubs, season_episode_query) ] dup_removal = True log_debug('Dual query used for subtitles search: ' + title_query + ' / ' + season_episode_query) if control.setting('queries') == 'true': threads.extend([ executor.submit(self.subtitlesgr, title_query), executor.submit(self.vipsubs, title_query), executor.submit(self.podnapisi, title_query), executor.submit(self.subtitlesgr, season_episode_query) ]) elif year != '': # movie query = '{0} ({1})'.format(title, year) threads = [ executor.submit(self.subtitlesgr, query), executor.submit(self.xsubstv, query), executor.submit(self.vipsubs, query), executor.submit(self.podnapisi, query) ] else: # file query, year = control.cleanmovietitle(title) if year != '': query = '{0} ({1})'.format(query, year) threads = [ executor.submit(self.subtitlesgr, query), executor.submit(self.xsubstv, query), executor.submit(self.vipsubs, query), executor.submit(self.podnapisi, query) ] for future in concurrent_futures.as_completed(threads): item = future.result() if not item: continue self.list.extend(item) if not dup_removal: log_debug('Query used for subtitles search: ' + query) self.query = query self.query = py3_dec(self.query) else: # Manual query with concurrent_futures.ThreadPoolExecutor(5) as executor: query = match_title = py3_dec(query) threads = [ executor.submit(self.subtitlesgr, query), executor.submit(self.xsubstv, query), executor.submit(self.vipsubs, query), executor.submit(self.podnapisi, query) ] for future in concurrent_futures.as_completed(threads): item = future.result() if not item: continue self.list.extend(item) if len(self.list) == 0: control.directory(self.syshandle) return f = [] # noinspection PyUnresolvedReferences f += [i for i in self.list if i['source'] == 'xsubstv'] f += [i for i in self.list if i['source'] == 'subtitlesgr'] f += [i for i in self.list if i['source'] == 'podnapisi'] f += [i for i in self.list if i['source'] == 'vipsubs'] self.list = f if dup_removal: self.list = [ dict(t) for t in {tuple(d.items()) for d in self.list} ] for i in self.list: try: if i['source'] == 'xsubstv': i['name'] = u'[xsubstv] {0}'.format(i['name']) elif i['source'] == 'podnapisi': i['name'] = u'[podnapisi] {0}'.format(i['name']) elif i['source'] == 'vipsubs': i['name'] = u'[vipsubs] {0}'.format(i['name']) except Exception: pass if control.setting('sorting') == '1': key = 'source' elif control.setting('sorting') == '2': key = 'downloads' elif control.setting('sorting') == '3': key = 'rating' else: key = 'title' self.list = sorted(self.list, key=lambda k: k[key].lower(), reverse=control.setting('sorting') in ['1', '2', '3']) for i in self.list: u = {'action': 'download', 'url': i['url'], 'source': i['source']} u = '{0}?{1}'.format(self.sysaddon, urlencode(u)) item = control.item(label='Greek', label2=i['name']) item.setArt({'icon': str(i['rating'])[:1], 'thumb': 'el'}) if ratio( splitext(i['title'].lower())[0], splitext(match_title)[0]) >= int( control.setting('sync_probability')): item.setProperty('sync', 'true') else: item.setProperty('sync', 'false') item.setProperty('hearing_imp', 'false') control.addItem(handle=self.syshandle, url=u, listitem=item, isFolder=False) control.directory(self.syshandle)
def items_list(self, link): if not link.startswith('http'): link = base_link() + link link = client.quote_paths(link) link = link.replace('old.', 'oauth.' if access_boolean() else 'www.') link = link.replace('www.', 'oauth.' if access_boolean() else 'www.') #### Start of nested helper functions #### # Pulls images and thumbnails def image_generator(children_data): print(children_data) image = control.addonInfo('icon') fanart = control.fanart() try: try: m_thumb = children_data.get('media').get('oembed').get( 'thumbnail_url') except AttributeError: m_thumb = None try: s_thumb = children_data.get('secure_media').get( 'oembed').get('thumbnail_url') except AttributeError: s_thumb = None try: p_thumb = children_data.get('preview').get('oembed').get( 'thumbnail_url') except AttributeError: p_thumb = None try: u_thumb = children_data.get('preview').get( 'images')[0].get('source').get('url') except AttributeError: u_thumb = None images = [ children_data.get('community_icon'), children_data.get('icon_img'), children_data.get('header_img'), children_data.get('thumbnail'), children_data.get('icon_img'), children_data.get('header_img'), children_data.get('banner_img'), children_data.get('url') ] if m_thumb: images.insert(-2, m_thumb) if s_thumb: images.insert(-2, s_thumb) if p_thumb: images.insert(-2, p_thumb) if u_thumb: images.insert(-2, u_thumb) for i in images: if i in ['default', 'spoiler', 'image', 'self'] or not i: continue elif '.jpg' in i or '.png' in i: image = i break if '?' in image: image = image.partition('?')[0] except (KeyError, IndexError, TypeError): pass if 'embed.ly' in image: image = dict(parse_qsl(urlparse(image).query))['url'] try: try: p_fanart = children_data.get('preview').get( 'images')[0].get('source').get('url') except AttributeError: p_fanart = None try: s_fanart = children_data.get('secure_media').get( 'oembed').get('thumbnail_url') except AttributeError: s_fanart = None fanarts = [children_data.get('banner_background_image')] if p_fanart: fanarts.insert(0, p_fanart) if s_fanart: fanarts.insert(-1, s_fanart) for f in fanarts: if not f: continue elif f: fanart = f break if '?' in fanart: fanart = fanart.partition('?')[0] except (KeyError, IndexError): pass return image, fanart # Comment def t1_kind(children_data, next_url): author = children_data['author'] body = legacy_replace(children_data['body']) short = legacy_replace(body[:50] + '...') image = control.addonInfo('icon') subreddit = children_data['subreddit'] subreddit_id = children_data['subreddit_id'] name = children_data['name'] if children_data['replies']: reply_json = children_data['replies'] replies_children = reply_json['data']['children'] replies = len(replies_children) try: comprehension = [ base_link() + client.quote_paths(r['data']['permalink']) for r in replies_children ] replies_urls = json.dumps(comprehension) except KeyError: replies_urls = None else: replies_urls = None replies = 0 replies_num = ' | ' + control.lang(30102) + str( replies) if replies > 0 else '' title = short.replace( '\n', '') + self.formatting + '[I]' + author + '[/I]' + replies_num url = permalink = base_link() + children_data['permalink'] link_id = children_data['link_id'] pairs = { 'title': title, 'url': url, 'permalink': permalink, 'image': image, 'subreddit': subreddit, 'kind': 't1', 'subreddit_url': base_link() + '/r/' + subreddit, 'next': next_url, 'subreddit_id': subreddit_id, 'name': name, 'body': body, 'plot': body, 'query': replies_urls, 'replies_urls': replies_urls, 'link_id': link_id } return pairs # Link/Thread def t3_kind(children_data, next_url): title = client.replaceHTMLCodes(children_data['title']) name = children_data['name'] author = children_data['author'] domain = children_data['domain'] num_comments = str(children_data['num_comments']) try: if domain.startswith('self.'): selftext = legacy_replace(children_data['selftext']) if selftext == '': selftext = title else: selftext = None except KeyError: selftext = None subreddit = children_data['subreddit'] subreddit_id = children_data['subreddit_id'] url = children_data['url'] permalink = base_link() + children_data['permalink'] image, fanart = image_generator(children_data) if access_boolean() and 'reddit' in url and not 'video' in url: url = url.replace('www.reddit', 'oauth.reddit') label = title + ' | ' + subreddit + ' | ' + '[B]' + author + '[/B]' + self.formatting + '[I]' + domain + '[/I]' + ' | ' + '[B]' + control.lang( 30103) + num_comments + '[/B]' pairs = { 'label': label, 'title': title, 'url': url, 'image': image, 'fanart': fanart, 'next': next_url, 'subreddit_id': subreddit_id, 'subreddit': subreddit, 'subreddit_url': base_link() + '/r/' + subreddit, 'kind': 't3', 'permalink': permalink, 'domain': domain, 'name': name, 'selftext': selftext, 'author': author, 'plot': selftext, 'query': client.quote_paths(permalink) } return pairs # Subreddit def t5_kind(children_data, next_url): display_name = client.replaceHTMLCodes( children_data['display_name']) title = client.replaceHTMLCodes(children_data['title']) public_description = legacy_replace( children_data['public_description']) description = legacy_replace(children_data['description']) plot = json.dumps({ 'title': title, 'public_description': public_description, 'description': description }) subscribers = str(children_data['subscribers']) url = base_link() + children_data['url'] name = children_data['name'] image, fanart = image_generator(children_data) pairs = { 'title': title + ' | ' + subscribers + self.formatting + '[I]' + display_name + '[/I]', 'url': url, 'image': image, 'next': next_url, 'fanart': fanart, 'display_name': display_name, 'name': name, 'kind': 't5', 'plot': plot } return pairs # Multi def lm_kind(children_data): display_name = children_data['display_name'] name = children_data['name'] # description = html_processor(children_data['description_html']) try: image = children_data['icon_url'] if not image: raise KeyError except KeyError: image = control.addonInfo('icon') path = base_link() + children_data['path'] subreddits = json.dumps(children_data['subreddits']) pairs = { 'title': display_name, 'url': path, 'image': image, 'subreddits': subreddits, 'kind': 'LabeledMulti', 'name': name } return pairs def more_kind(children_data): # title = '' if children_data['depth'] == 0 else '>' * children_data['depth'] + ' ' + control.lang(30117) title = control.lang(30144) name, id = (children_data['name'], children_data['id']) if len(name) < 10: name = children_data['parent_id'] if len(id) < 7: id = children_data['parent_id'][3:] parsed = urlparse(link) permalink = urlunparse(parsed._replace(path=parsed.path + id)) if children_data['children']: replies_urls = json.dumps([ urlunparse(parsed._replace(path=parsed.path + u)) for u in children_data['children'] ]) else: replies_urls = None image = control.addonInfo('icon') pairs = { 'title': title, 'name': name, 'id': id, 'image': image, 'kind': 'more', 'permalink': permalink, 'replies_urls': replies_urls } return pairs def next_appender(json_data): try: next_id = json_data['after'] if not next_id: raise KeyError elif '&after=' in parsed.query: _next_url = urlunparse( parsed._replace( query=re.sub(r'&after=\w{8,9}', r'&after=' + next_id, parsed.query))) else: _next_url = urlunparse( parsed._replace(query=parsed.query + '&after=' + next_id)) except KeyError: _next_url = '' return _next_url def processor(_json): if isinstance(_json, list): for j in _json: data = j['data'] kind = j['kind'] if kind == 'LabeledMulti': pairs = lm_kind(data) self.data.append(pairs) else: children = data['children'] nu = next_appender(data) for c in children: kind = c['kind'] data = c['data'] if kind == 't3': pairs = t3_kind(data, nu) elif kind == 't1': pairs = t1_kind(data, nu) elif kind == 'more': pairs = more_kind(data) else: pairs = None self.data.append(pairs) return self.data else: data = _json['data'] children = data['children'] nu = next_appender(data) for d in children: item_data = d['data'] kind = d['kind'] # Link: if kind == 't3': pairs = t3_kind(item_data, nu) # Subreddit: elif kind == 't5': pairs = t5_kind(item_data, nu) # Comment: elif kind == 't1': pairs = t1_kind(item_data, nu) elif kind == 'more': pairs = more_kind(data) else: pairs = {'title': 'Null', 'action': None} self.data.append(pairs) return self.data #### End of nested helper functions #### parsed = urlparse(link) query = dict(parse_qsl(parsed.query)) path = parsed.path if 'limit' not in query: query.update({'limit': control.setting('items.limit')}) query = urlencode(query) if not access_boolean() and not path.endswith('.json'): path += dotjson link = urlunparse(parsed._replace(path=path, query=query)) json_object = client.request(link, headers=request_headers()) loaded = json.loads(json_object) self.list = processor(loaded) return self.list
def run_builtin(addon_id=control.addonInfo('id'), action=None, mode=None, content_type=None, url=None, query=None, path_history='', get_url=False, command=('ActivateWindow', 'Container.Update'), *args): """ This function will construct a url starting with plugin:// attached to the addon_id, then passed into either the ActivateWindow built-in command or Container.Update for listing/container manipulation. You have to either pass action, mode, content_type or query, otherwise TypeError will be raised. Can also apply the "PlayMedia". Query will override action, mode, url and content_type arguments if passed as dictionary path_history can also be either ",return" or ",replace" """ if not query and not action and not mode and not content_type: raise TypeError('Cannot manipulate container without arguments') if isinstance(query, dict): query_string = urlencode(query) else: query_string = '' if content_type: query_string += 'content_type={0}{1}'.format( content_type, '' if action is None and mode is None and query is None else '&') if action: query_string += 'action={0}'.format(action) if mode: query_string += 'mode={0}'.format(mode) if url: query_string += '&url={0}'.format(quote_plus(url)) if query: query_string += '&query={0}'.format(query) if args: query_string += '&' + '&'.join(args) if 'content_type=video' in query_string: window_id = 'videos' elif 'content_type=audio' in query_string: window_id = 'music' elif 'content_type=image' in query_string: window_id = 'pictures' elif 'content_type=executable' in query_string: window_id = 'programs' elif 'content_type' in query_string and dict( parse_qsl(query_string))['content_type'] not in [ 'video', 'audio', 'image', 'executable' ]: raise AttributeError('Incorrect content_type specified') addon_id = ''.join(['plugin://', addon_id, '/']) if 'content_type' in query_string and isinstance(command, tuple): # noinspection PyUnboundLocalVariable executable = '{0}({1},"{2}?{3}"{4})'.format( command[0], window_id, addon_id, query_string, ',return' if not path_history else path_history) else: if isinstance(command, tuple): executable = '{0}({1}?{2}{3})'.format( command[1], addon_id, query_string, ',return' if not path_history else path_history) else: executable = '{0}({1}?{2}{3})'.format( command, addon_id, query_string, ',return' if not path_history else path_history) if get_url: return executable else: control.execute(executable)
def player(url, params, do_not_resolve=False): if url is None: log_debug('Nothing playable was found') return url = url.replace('&', '&') log_debug('Attempting to play this url: ' + url) if 'ustream' in url: log_debug('Opening browser window for this url: {0}'.format(url)) control.open_web_browser(url) while not control.wait(1): if control.condVisibility('Window.IsActive(okdialog)'): control.execute('Dialog.Close(all)') break return if do_not_resolve: stream = url else: stream = conditionals(url, params) if not stream or (len(stream) == 2 and not stream[0]): log_debug('Failed to resolve this url: {0}'.format(url)) control.execute('Dialog.Close(all)') return plot = None try: if isinstance(stream, tuple): plot = stream[1] stream = stream[0] else: try: plot = params.get('plot').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): plot = params.get('plot') except TypeError: pass else: log_debug('Plot obtained') dash, m3u8_dash, mimetype, manifest_type = dash_conditionals(stream) if not m3u8_dash and control.setting('m3u8_quality_picker') in [ '1', '2' ] and '.m3u8' in stream: try: stream = m3u8_loader.m3u8_picker(stream) except TypeError: pass if isinstance(stream, OrderedDict): try: try: args = stream['best'].args except Exception: args = None try: json_dict = json.loads(stream['best'].json) except Exception: json_dict = None for h in args, json_dict: if 'headers' in h: headers = h['headers'] break else: headers = None if headers: try: del headers['Connection'] del headers['Accept-Encoding'] del headers['Accept'] except KeyError: pass append = ''.join(['|', urlencode(headers)]) else: append = '' except AttributeError: append = '' if control.setting('sl_quality_picker') == '0' or len(stream) == 3: stream = stream['best'].to_url() + append else: keys = stream.keys()[::-1] values = [u.to_url() + append for u in stream.values()][::-1] stream = stream_picker(keys, values) dash, m3u8_dash, mimetype, manifest_type = dash_conditionals(stream) if stream != url: log_debug('Stream has been resolved: ' + stream) if '|' in stream or '|' in url: from tulip.compat import parse_qsl log_debug('Appending custom headers: ' + repr(dict(parse_qsl(stream.rpartition('|')[2])))) try: image = params.get('image').encode('latin-1') title = params.get('title').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): image = params.get('image') title = params.get('title') meta = {'title': title} if plot: meta.update({'plot': plot}) try: directory.resolve(stream, meta=meta, icon=image, dash=dash, manifest_type=manifest_type, mimetype=mimetype) except: control.execute('Dialog.Close(all)') control.infoDialog(control.lang(30112))
def add(items, cacheToDisc=True, content=None, mediatype=None, infotype='video', argv=None, as_playlist=False, pd_heading=None, pd_message='', clear_first=True, progress=False, category=None): if argv is None: from tulip.init import sysaddon, syshandle else: sysaddon = argv[0] syshandle = int(argv[1]) if items is None or len(items) == 0: return # sysicon = control.join(control.addonInfo('path'), 'resources', 'media') sysimage = control.addonInfo('icon') sysfanart = control.addonInfo('fanart') if progress: pd = control.progressDialogGB pd.create(heading=control.name() if not pd_heading else pd_heading, message=pd_message) else: pd = None if as_playlist and clear_first: control.playlist(1 if infotype == 'video' else 0).clear() meta_tags = [ 'count', 'size', 'date', 'genre', 'country', 'year', 'episode', 'season', 'sortepisode', 'sortseason', 'episodeguide', 'showlink', 'top250', 'setid', 'tracknumber', 'rating', 'userrating', 'watched', 'playcount', 'overlay', 'cast', 'castandrole', 'director', 'mpaa', 'plot', 'plotoutline', 'title', 'originaltitle', 'sorttitle', 'duration', 'studio', 'tagline', 'writer', 'tvshowtitle', 'premiered', 'status', 'set', 'gameclient', 'setoverview', 'tag', 'imdbnumber', 'code', 'aired', 'credits', 'lastplayed', 'album', 'artist', 'votes', 'path', 'trailer', 'dateadded', 'mediatype', 'dbid', 'tracknumber', 'discnumber', 'lyrics', 'listeners', 'musicbrainztrackid', 'comment', 'picturepath', 'platform', 'genres', 'publisher', 'developer', 'overview' ] for c, i in list(enumerate(items)): try: if progress: p = control.per_cent(c, len(items)) pd.update(p) try: label = control.lang(i['title']).encode('utf-8') except Exception: label = i['title'] if 'label' in i and not i['label'] == '0': label = i['label'] if 'image' in i and not i['image'] == '0': image = i['image'] elif 'poster' in i and not i['poster'] == '0': image = i['poster'] elif 'icon' in i and not i['icon'] == '0': image = control.addonmedia(i['icon']) else: image = sysimage if 'banner' in i and not i['banner'] == '0': banner = i['banner'] elif 'fanart' in i and not i['fanart'] == '0': banner = i['fanart'] else: banner = image fanart = i['fanart'] if 'fanart' in i and not i[ 'fanart'] == '0' else sysfanart isFolder = False if 'isFolder' in i and not i[ 'isFolder'] == '0' else True try: is_play_boolean = i.get('isPlayable') in [ 'True', 'true', '1', 'yes', 'Yes' ] except Exception: is_play_boolean = False isPlayable = True if not isFolder and 'isPlayable' not in i else is_play_boolean if isPlayable: isFolder = False try: action = '{0}?action={1}'.format(sysaddon, i['action']) except KeyError: return try: url = 'url={0}'.format(quote_plus(i['url'])) except Exception: url = None try: title = 'title={0}'.format(quote_plus(i['title'])) except KeyError: try: title = 'title={0}'.format( quote_plus(i['title'].encode('utf-8'))) except KeyError: title = None except Exception: title = None try: icon = 'image={0}'.format(quote_plus(i['image'])) except KeyError: try: icon = 'image={0}'.format( quote_plus(i['image'].encode('utf-8'))) except KeyError: icon = None except Exception: icon = None try: name = 'name={0}'.format(quote_plus(i['name'])) except KeyError: try: name = 'name={0}'.format( quote_plus(i['name'].encode('utf-8'))) except KeyError: name = None except Exception: name = None try: year = 'year={0}'.format(quote_plus(i['year'])) except Exception: year = None try: plot = 'plot={0}'.format(quote_plus(i['plot'])) except KeyError: try: plot = 'plot={0}'.format( quote_plus(i['plot'].encode('utf-8'))) except KeyError: plot = None except Exception: plot = None try: genre = 'genre={0}'.format(quote_plus(i['genre'])) except KeyError: try: genre = 'genre={0}'.format( quote_plus(i['genre'].encode('utf-8'))) except KeyError: genre = None except Exception: genre = None try: dash = 'dash={0}'.format(quote_plus(i['dash'])) except Exception: dash = None try: query = 'query={0}'.format(quote_plus(i['query'])) except Exception: query = None parts = [ foo for foo in [ action, url, title, icon, name, year, plot, genre, dash, query ] if foo ] uri = '&'.join(parts) cm = [] menus = i['cm'] if 'cm' in i else [] for menu in menus: try: try: tmenu = control.lang(menu['title']).encode('utf-8') except Exception: tmenu = menu['title'] try: qmenu = urlencode(menu['query']) except Exception: qmenu = urlencode( dict((k, v.encode('utf-8')) for k, v in menu['query'].items())) cm.append( (tmenu, 'RunPlugin({0}?{1})'.format(sysaddon, qmenu))) except Exception: pass meta = dict((k, v) for k, v in iteritems(i) if k in meta_tags and (not v == '0' or v is None)) if mediatype is not None: meta['mediatype'] = mediatype item = control.item(label=label) item.setArt({ 'icon': image, 'thumb': image, 'poster': image, 'tvshow.poster': image, 'season.poster': image, 'banner': banner, 'tvshow.banner': banner, 'season.banner': banner, 'fanart': fanart }) item.addContextMenuItems(cm) item.setInfo( type=infotype if 'infotype' not in i else i['infotype'], infoLabels=meta) if isPlayable: if not i['action'] == 'pvr_client': item.setProperty('IsPlayable', 'true') else: item.setProperty('IsPlayable', 'false') if not i['action'] == 'pvr_client': if 'streaminfo' not in i and infotype == 'video': item.addStreamInfo(infotype, {'codec': 'h264'}) else: item.addStreamInfo(infotype, i.get('streaminfo')) if as_playlist and isPlayable: control.playlist(1 if infotype == 'video' else 0).add( url=uri, listitem=item, index=c) else: control.addItem(handle=syshandle, url=uri, listitem=item, isFolder=isFolder, totalItems=len(items)) except Exception as reason: log('Directory not added, reason of failure: ' + repr(reason)) if progress: pd.update(100) pd.close() if as_playlist: control.openPlaylist(1 if infotype == 'video' else 0) return try: i = items[0] if i['next'] == '': raise Exception() url = '{0}?action={1}&url={2}'.format(sysaddon, i['nextaction'], quote_plus(i['next'])) icon = i['nexticon'] if 'nexticon' in i else control.addonmedia( 'next.png') fanart = i['nextfanart'] if 'nextfanart' in i else sysfanart try: label = control.lang(i['nextlabel']).encode('utf-8') except Exception: label = 'Next' item = control.item(label=label) item.setArt({ 'icon': icon, 'thumb': icon, 'poster': icon, 'tvshow.poster': icon, 'season.poster': icon, 'banner': icon, 'tvshow.banner': icon, 'season.banner': icon, 'fanart': fanart }) control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True, totalItems=len(items)) except Exception: pass if content is not None: control.content(syshandle, content) if category is not None: control.setcategory(syshandle, category) control.directory(syshandle, cacheToDisc=cacheToDisc)
def resolve(url, meta=None, icon=None, dash=False, manifest_type=None, inputstream_type='adaptive', headers=None, mimetype=None, resolved_mode=True, live=False): """ Prepares a resolved url into a listitem for playback :param url: Requires a string or unicode, append required urlencoded headers with pipe | :param meta: a dictionary with listitem keys: values :param icon: String :param dash: Boolean :param manifest_type: String :param inputstream_type: String 99.9% of the time it is adaptive :param headers: dictionary or urlencoded string :param mimetype: String :return: None """ from tulip.init import syshandle # Fail gracefully instead of making Kodi complain. if url is None: from kodi_six.xbmc import log log('URL was not provided, failure to resolve stream') return if not headers and '|' in url: url, sep, headers = url.rpartition('|') elif headers: if isinstance(headers, basestring): if headers.startswith('|'): headers = headers[1:] elif isinstance(headers, dict): headers = urlencode(headers) if not dash and headers: url = '|'.join([url, headers]) item = control.item(path=url) if icon is not None: item.setArt({'icon': icon, 'thumb': icon}) if meta is not None: item.setInfo(type='Video', infoLabels=meta) krypton_plus = int( control.infoLabel('System.AddonVersion(xbmc.python)').replace( '.', '')) >= 2250 try: isa_enabled = control.addon_details('inputstream.adaptive').get( 'enabled') except KeyError: isa_enabled = False if dash and krypton_plus and isa_enabled: if not manifest_type: manifest_type = 'mpd' if not mimetype: mimetype = 'application/xml+dash' item.setContentLookup(False) item.setMimeType('{0}'.format(mimetype)) item.setProperty('inputstreamaddon', 'inputstream.{}'.format(inputstream_type)) item.setProperty( 'inputstream.{0}.manifest_type'.format(inputstream_type), manifest_type) if headers: item.setProperty( "inputstream.{0}.stream_headers".format(inputstream_type), headers) elif mimetype: item.setContentLookup(False) item.setMimeType('{0}'.format(mimetype)) if dash and live: item.setProperty( 'inputstream.{}.manifest_update_parameter'.format( inputstream_type), '&start_seq=$START_NUMBER$') if resolved_mode: control.resolve(syshandle, True, item) else: control.player().play(url, item)
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, limit=None, referer=None, cookie=None, output='', timeout='30', username=None, password=None, verify=True, as_bytes=False): try: url = url.decode('utf-8') except Exception: pass if isinstance(post, dict): post = bytes(urlencode(post), encoding='utf-8') elif isinstance(post, basestring) and is_py3: post = bytes(post, encoding='utf-8') try: handlers = [] if username is not None and password is not None and not proxy: passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() passmgr.add_password(None, uri=url, user=username, passwd=password) handlers += [urllib2.HTTPBasicAuthHandler(passmgr)] opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) if proxy is not None: if username is not None and password is not None: passmgr = urllib2.ProxyBasicAuthHandler() passmgr.add_password(None, uri=url, user=username, passwd=password) handlers += [ urllib2.ProxyHandler({'http': '{0}'.format(proxy)}), urllib2.HTTPHandler, urllib2.ProxyBasicAuthHandler(passmgr) ] else: handlers += [ urllib2.ProxyHandler({'http': '{0}'.format(proxy)}), urllib2.HTTPHandler ] opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) if output == 'cookie' or output == 'extended' or close is not True: cookies = cookielib.LWPCookieJar() handlers += [ urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies) ] opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) try: import platform is_XBOX = platform.uname()[1] == 'XboxOne' except Exception: is_XBOX = False if not verify and sys.version_info >= (2, 7, 12): try: import ssl ssl_context = ssl._create_unverified_context() handlers += [urllib2.HTTPSHandler(context=ssl_context)] opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) except Exception: pass elif verify and ((2, 7, 8) < sys.version_info < (2, 7, 12) or is_XBOX): try: import ssl try: import _ssl CERT_NONE = _ssl.CERT_NONE except Exception: CERT_NONE = ssl.CERT_NONE ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = CERT_NONE handlers += [urllib2.HTTPSHandler(context=ssl_context)] opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) except Exception: pass try: headers.update(headers) except Exception: headers = {} if 'User-Agent' in headers: pass elif mobile is not True: #headers['User-Agent'] = agent() headers['User-Agent'] = cache.get(randomagent, 12) else: headers['User-Agent'] = cache.get(random_mobile_agent, 12) if 'Referer' in headers: pass elif referer is None: headers['Referer'] = '%s://%s/' % (urlparse(url).scheme, urlparse(url).netloc) else: headers['Referer'] = referer if not 'Accept-Language' in headers: headers['Accept-Language'] = 'en-US' if 'Cookie' in headers: pass elif cookie is not None: headers['Cookie'] = cookie if redirect is False: class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, reqst, fp, code, msg, head): infourl = addinfourl(fp, head, reqst.get_full_url()) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 opener = urllib2.build_opener(NoRedirectHandler()) urllib2.install_opener(opener) try: del headers['Referer'] except Exception: pass req = urllib2.Request(url, data=post, headers=headers) try: response = urllib2.urlopen(req, timeout=int(timeout)) except urllib2.HTTPError as response: if response.code == 503: if 'cf-browser-verification' in response.read(5242880): netloc = '{0}://{1}'.format( urlparse(url).scheme, urlparse(url).netloc) cf = cache.get(cfcookie, 168, netloc, headers['User-Agent'], timeout) headers['Cookie'] = cf req = urllib2.Request(url, data=post, headers=headers) response = urllib2.urlopen(req, timeout=int(timeout)) elif error is False: return elif error is False: return if output == 'cookie': try: result = '; '.join( ['%s=%s' % (i.name, i.value) for i in cookies]) except Exception: pass try: result = cf except Exception: pass elif output == 'response': if limit == '0': result = (str(response.code), response.read(224 * 1024)) elif limit is not None: result = (str(response.code), response.read(int(limit) * 1024)) else: result = (str(response.code), response.read(5242880)) elif output == 'chunk': try: content = int(response.headers['Content-Length']) except Exception: content = (2049 * 1024) if content < (2048 * 1024): return result = response.read(16 * 1024) elif output == 'extended': try: cookie = '; '.join( ['%s=%s' % (i.name, i.value) for i in cookies]) except Exception: pass try: cookie = cf except Exception: pass content = response.headers result = response.read(5242880) return result, headers, content, cookie elif output == 'geturl': result = response.geturl() elif output == 'headers': content = response.headers return content else: if limit == '0': result = response.read(224 * 1024) elif limit is not None: result = response.read(int(limit) * 1024) else: result = response.read(5242880) if close is True: response.close() if is_py3 and not as_bytes and isinstance(result, bytes): return result.decode('utf-8') else: return result except Exception as reason: log('Client module failed, reason of failure: ' + repr(reason)) return
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, limit=None, referer=None, cookie=None, output='', timeout='30', username=None, password=None, verify=True, as_bytes=False, allow_caching=True): try: url = url.decode('utf-8') except Exception: pass if isinstance(post, dict): post = bytes(urlencode(post), encoding='utf-8') elif isinstance(post, str) and is_py3: post = bytes(post, encoding='utf-8') try: handlers = [] if username is not None and password is not None and not proxy: passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() passmgr.add_password(None, uri=url, user=username, passwd=password) handlers += [urllib2.HTTPBasicAuthHandler(passmgr)] opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) if proxy is not None: if username is not None and password is not None: if is_py3: passmgr = urllib2.HTTPPasswordMgr() passmgr.add_password(None, uri=url, user=username, passwd=password) else: passmgr = urllib2.ProxyBasicAuthHandler() passmgr.add_password(None, uri=url, user=username, passwd=password) handlers += [ urllib2.ProxyHandler({'http': '{0}'.format(proxy)}), urllib2.HTTPHandler, urllib2.ProxyBasicAuthHandler(passmgr) ] else: handlers += [ urllib2.ProxyHandler({'http': '{0}'.format(proxy)}), urllib2.HTTPHandler ] opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) if output == 'cookie' or output == 'extended' or close is not True: cookies = cookielib.LWPCookieJar() handlers += [ urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies) ] opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) if not verify or ((2, 7, 8) < sys.version_info < (2, 7, 12)): try: ssl_context = ssl.create_default_context() ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE handlers += [urllib2.HTTPSHandler(context=ssl_context)] opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) except Exception: pass try: headers.update(headers) except Exception: headers = {} if 'User-Agent' in headers: pass elif mobile is not True: if allow_caching: from tulip import cache headers['User-Agent'] = cache.get(randomagent, 12) else: headers['User-Agent'] = CHROME else: if allow_caching: from tulip import cache headers['User-Agent'] = cache.get(random_mobile_agent, 12) else: headers['User-Agent'] = ANDROID if 'Referer' in headers: pass elif referer is None: headers['Referer'] = '%s://%s/' % (urlparse(url).scheme, urlparse(url).netloc) else: headers['Referer'] = referer if not 'Accept-Language' in headers: headers['Accept-Language'] = 'en-US' if 'Cookie' in headers: pass elif cookie is not None: headers['Cookie'] = cookie if redirect is False: class NoRedirectHandler(urllib2.HTTPRedirectHandler): def http_error_302(self, reqst, fp, code, msg, head): infourl = addinfourl(fp, head, reqst.get_full_url()) infourl.status = code infourl.code = code return infourl http_error_300 = http_error_302 http_error_301 = http_error_302 http_error_303 = http_error_302 http_error_307 = http_error_302 opener = urllib2.build_opener(NoRedirectHandler()) urllib2.install_opener(opener) try: del headers['Referer'] except Exception: pass req = urllib2.Request(url, data=post, headers=headers) try: response = urllib2.urlopen(req, timeout=int(timeout)) except HTTPError as response: if response.code == 503: if 'cf-browser-verification' in response.read(5242880): if log_debug: log_debug( 'This request cannot be handled due to human verification gate' ) else: print( 'This request cannot be handled due to human verification gate' ) return elif error is False: return elif error is False: return if output == 'cookie': try: result = '; '.join( ['{0}={1}'.format(i.name, i.value) for i in cookies]) except Exception: pass elif output == 'response': if limit == '0': result = (str(response.code), response.read(224 * 1024)) elif limit is not None: result = (str(response.code), response.read(int(limit) * 1024)) else: result = (str(response.code), response.read(5242880)) elif output == 'chunk': try: content = int(response.headers['Content-Length']) except Exception: content = (2049 * 1024) if content < (2048 * 1024): return result = response.read(16 * 1024) elif output == 'extended': try: cookie = '; '.join( ['%s=%s' % (i.name, i.value) for i in cookies]) except Exception: pass content = response.headers result = response.read(5242880) if not as_bytes: result = py3_dec(result) return result, headers, content, cookie elif output == 'geturl': result = response.geturl() elif output == 'headers': content = response.headers if close: response.close() return content elif output == 'file_size': try: content = int(response.headers['Content-Length']) except Exception: content = '0' response.close() return content elif output == 'json': content = json.loads(response.read(5242880)) response.close() return content else: if limit == '0': result = response.read(224 * 1024) elif limit is not None: if isinstance(limit, int): result = response.read(limit * 1024) else: result = response.read(int(limit) * 1024) else: result = response.read(5242880) if close is True: response.close() if not as_bytes: result = py3_dec(result) return result except Exception as reason: _, __, tb = sys.exc_info() print(traceback.print_tb(tb)) if log_debug: log_debug('Request failed, reason: ' + repr(reason) + ' on url: ' + url) else: print('Request failed, reason: ' + repr(reason) + ' on url: ' + url) return
def resolver(url, quality=None): try: if '.mpd' in url: return url custom_plugins = control.join(control.addonPath, 'resources', 'lib', 'resolvers') session = streamlink.session.Streamlink() session.load_plugins(custom_plugins) # session.set_plugin_option('', '', '') plugin = session.resolve_url(url) # plugin.set_option() streams = plugin.streams() if not streams: return url try: try: args = streams['best'].args except Exception: args = None try: json_dict = json.loads(streams['best'].json) except Exception: json_dict = None for h in args, json_dict: if 'headers' in h: headers = h['headers'] break else: headers = None # if json_dict: # # try: # headers = json_dict['headers'] # except KeyError: # headers = None # # elif args: # # try: # headers = args['headers'] # except KeyError: # headers = None # # else: # # headers = None if headers and control.setting('args_append') == 'true': try: del headers['Connection'] del headers['Accept-Encoding'] del headers['Accept'] except KeyError: pass append = ''.join(['|', urlencode(headers)]) else: append = '' except AttributeError: append = '' if quality is None: if control.setting('quality_choice') == '0': playable = streams['best'].to_url() + append return playable else: keys = streams.keys()[::-1] values = [u.to_url() + append for u in streams.values()][::-1] return stream_picker(keys, values) else: if quality == 'manual': keys = streams.keys()[::-1] values = [u.to_url() + append for u in streams.values()][::-1] return stream_picker(keys, values) else: try: return streams[quality].to_url() + append except KeyError: return streams['best'].to_url() + append except streamlink.session.NoPluginError: return url except streamlink.session.PluginError as e: _, __, tb = sys.exc_info() print traceback.print_tb(tb) control.infoDialog(e, time=5000)
def run(self, query=None): if 'Greek' not in str(langs).split(','): control.directory(syshandle) control.infoDialog(control.lang(32002)) return if not control.conditional_visibility( 'System.HasAddon(vfs.libarchive)') and float( control.addon('xbmc.addon').getAddonInfo('version') [:4]) >= 18.0: control.execute('InstallAddon(vfs.libarchive)') threads = [ workers.Thread(self.xsubstv), workers.Thread(self.subzxyz), workers.Thread(self.subtitlesgr) ] dup_removal = False if not query: if control.condVisibility('Player.HasVideo'): infolabel_prefix = 'VideoPlayer' else: infolabel_prefix = 'ListItem' title = control.infoLabel('{0}.Title'.format(infolabel_prefix)) if re.search(r'[^\x00-\x7F]+', title) is not None: title = control.infoLabel( '{0}.OriginalTitle'.format(infolabel_prefix)) year = control.infoLabel('{0}.Year'.format(infolabel_prefix)) tvshowtitle = control.infoLabel( '{0}.TVshowtitle'.format(infolabel_prefix)) season = control.infoLabel('{0}.Season'.format(infolabel_prefix)) if len(season) == 1: season = '0' + season episode = control.infoLabel('{0}.Episode'.format(infolabel_prefix)) if len(episode) == 1: episode = '0' + episode if 's' in episode.lower(): season, episode = '0', episode[-1:] if tvshowtitle != '': # episode title_query = '{0} {1}'.format(tvshowtitle, title) season_episode_query = '{0} S{1} E{2}'.format( tvshowtitle, season, episode) threads = [ workers.Thread(self.xsubstv, title_query), workers.Thread(self.subzxyz, title_query), workers.Thread(self.subtitlesgr, title_query), workers.Thread(self.xsubstv, season_episode_query), workers.Thread(self.subzxyz, season_episode_query), workers.Thread(self.subtitlesgr, season_episode_query) ] dup_removal = True log.log('Dual query used for subtitles search: ' + title_query + ' / ' + season_episode_query) elif year != '': # movie query = '{0} ({1})'.format(title, year) else: # file query, year = getCleanMovieTitle(title) if year != '': query = '{0} ({1})'.format(query, year) if not dup_removal: log.log('Query used for subtitles search: ' + query) self.query = query [i.start() for i in threads] for c, i in list(enumerate(range(0, 40))): is_alive = [x.is_alive() for x in threads] if all(x is False for x in is_alive): log.log('Reached count : ' + str(c)) break if control.aborted is True: log.log('Aborted, reached count : ' + str(c)) break control.sleep(750) if len(self.list) == 0: control.directory(syshandle) return f = [] # noinspection PyUnresolvedReferences f += [i for i in self.list if i['source'] == 'xsubstv'] f += [i for i in self.list if i['source'] == 'subzxyz'] f += [i for i in self.list if i['source'] == 'subtitlesgr'] self.list = f if dup_removal: self.list = [ dict(t) for t in {tuple(d.items()) for d in self.list} ] for i in self.list: try: if i['source'] == 'subzxyz': i['name'] = '[subzxyz] {0}'.format(i['name']) elif i['source'] == 'xsubstv': i['name'] = '[xsubstv] {0}'.format(i['name']) except Exception: pass for i in self.list: u = {'action': 'download', 'url': i['url'], 'source': i['source']} u = '{0}?{1}'.format(sysaddon, urlencode(u)) item = control.item(label='Greek', label2=i['name'], iconImage=str(i['rating']), thumbnailImage='el') item.setProperty('sync', 'false') item.setProperty('hearing_imp', 'false') control.addItem(handle=syshandle, url=u, listitem=item, isFolder=False) control.directory(syshandle)
def player(url, params): global skip_directory if url is None: log_debug('Nothing playable was found') return if url.startswith('alivegr://'): log_debug('Attempting pseudo live playback') skip_directory = True pseudo_live(url) return url = url.replace('&', '&') skip_directory = params.get('action') == 'play_skipped' directory_boolean = MOVIES in url or SHORTFILMS in url or THEATER in url or GK_BASE in url or ( 'episode' in url and GM_BASE in url) if directory_boolean and control.setting( 'action_type') == '1' and not skip_directory: directory.run_builtin(action='directory', url=url) return log_debug('Attempting to play this url: ' + url) if params.get('action') == 'play_resolved': stream = url elif params.get('query') and control.setting('check_streams') == 'true': sl = json.loads(params.get('query')) index = int(control.infoLabel('Container.CurrentItem')) - 1 stream = check_stream(sl, False, start_from=index, show_pd=True, cycle_list=False) else: stream = conditionals(url) if not stream: log_debug('Failed to resolve this url: {0}'.format(url)) return try: plot = params.get('plot').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): plot = params.get('plot') if not plot and 'greek-movies.com' in url: plot = gm_source_maker(url).get('plot') dash, m3u8_dash, mimetype, manifest_type = dash_conditionals(stream) if not m3u8_dash and control.setting( 'm3u8_quality_picker') == '1' and '.m3u8' in stream: try: stream = m3u8_picker(stream) except TypeError: pass if stream != url: log_debug('Stream has been resolved: ' + stream) else: log_debug('Attempting direct playback: ' + stream) # process headers if necessary: if '|' in stream: stream, sep, headers = stream.rpartition('|') headers = dict(parse_qsl(headers)) log_debug('Appending custom headers: ' + repr(headers)) stream = sep.join([stream, urlencode(headers)]) try: image = params.get('image').encode('latin-1') title = params.get('title').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): image = params.get('image') title = params.get('title') meta = {'title': title} if plot: meta.update({'plot': plot}) try: directory.resolve(stream, meta=meta, icon=image, dash=dash, manifest_type=manifest_type, mimetype=mimetype) if url.startswith('iptv://') or 'kineskop.tv' in url: control.execute('PlayerControl(RepeatOne)') except: control.execute('Dialog.Close(all)') control.infoDialog(control.lang(30112))
def _listing(self, url): if self.ajax_url in url: result = client.request(url.partition('?')[0], post=url.partition('?')[2]) else: result = client.request(url) try: header = parseDOM(result, 'h2')[0] except IndexError: header = None next_url = None override = False if self.base_link + '/?s=' in url or control.setting('pagination') == 'true': override = True threads_1 = [] threads_2 = [] # Nest the function to work on either of the two cases def _exec(_items, _next_url=None): if control.setting('threading') == 'true': for count, _item in list(enumerate(_items, start=1)): threads_2.append(workers.Thread(self.loop(_item, header, count, _next_url))) [i.start() for i in threads_2] [i.join() for i in threads_2] else: for count, _item in list(enumerate(_items, start=1)): self.loop(_item, header, count, _next_url) if 'enimerosi-24' not in url and self.ajax_url not in url: ajaxes = [i for i in parseDOM(result, 'script', attrs={'type': 'text/javascript'}) if 'ajaxurl' in i] ajax1 = json.loads(re.search(r'var loadmore_params = ({.+})', ajaxes[-1]).group(1)) ajax2 = json.loads(re.search(r'var cactus = ({.+})', ajaxes[0]).group(1)) ajax = self._ajax_merge(ajax1, ajax2) pages = int(ajax['max_page']) posts = ajax['posts'] try: posts = posts.encode('utf-8') except Exception: pass if control.setting('threading') == 'true' and not override: for i in range(0, pages + 1): threads_1.append( workers.Thread( self.thread(self.ajax_url, post=self.load_more.format(query=quote(posts), page=str(i))) ) ) [i.start() for i in threads_1] [i.join() for i in threads_1] else: for i in range(0, pages + 1): a = client.request(self.ajax_url, post=self.load_more.format(query=quote(posts), page=str(i))) self.data.append(a) if i == 0 and override: next_url = '?'.join([self.ajax_url, self.load_more.format(query=quote(posts), page='1')]) break html = '\n'.join(self.data) items = itertags_wrapper(html, 'div', attrs={'class': 'item item-\d+'}) if len(items) < 20: next_url = None _exec(items, next_url) elif self.ajax_url in url: items = itertags_wrapper(result, 'div', attrs={'class': 'item item-\d+'}) parsed = dict(parse_qsl(url.partition('?')[2])) next_page = int(parsed['page']) + 1 parsed['page'] = next_page if len(items) >= 20: next_url = '?'.join([url.partition('?')[0], urlencode(parsed)]) _exec(items, next_url) else: items = itertags_wrapper(result, 'div', attrs={'class': 'item item-\d+'}) for item in items: text = item.text img = item.attributes['style'] image = re.search(r'url\((.+)\)', img).group(1) title = client.replaceHTMLCodes(parseDOM(text, 'a')[0].strip()) url = parseDOM(text, 'a', ret='href')[0] self.list.append({'title': title, 'image': image, 'url': url}) return self.list
def router(url): try: if '.mpd' in url: return url session = streamlink.session.Streamlink() # session.set_plugin_option('', '', '') plugin = session.resolve_url(url) # plugin.set_option() streams = plugin.streams() try: json_list = [streams[i].json for i in streams.keys()] [log_debug(j) for j in json_list] except AttributeError: pass # TODO: Use json object exclusively on next version to obtain all items if not streams: return url try: args = streams['best'].args append = '|' if 'headers' in args: headers = streams['best'].args['headers'] append += urlencode(headers) else: append = '' except AttributeError: append = '' if quality is None: if control.setting('quality.choice') == '0': playable = streams['best'].to_url() + append return playable else: keys = streams.keys()[::-1] values = [u.to_url() + append for u in streams.values()][::-1] return stream_picker(keys, values) else: if quality == 'manual': keys = streams.keys()[::-1] values = [u.to_url() + append for u in streams.values()][::-1] return stream_picker(keys, values) else: try: return streams[quality].to_url() + append except KeyError: return streams['best'].to_url() + append except streamlink.session.NoPluginError: return url except streamlink.session.PluginError as e: control.infoDialog(e, time=5000)