def _plot(url): load = client.request(url.partition('?')[0], post=url.partition('?')[2], timeout=20) description = parseDOM(load, 'div', {'class': 'video-description'})[-1] paragraphs = [client.stripTags(p) for p in parseDOM(description, 'p')] plot = client.replaceHTMLCodes('[CR]'.join(paragraphs)) return plot
def resolve(self, url): if url == self.live_link: html = client.request(self.live_link) json_ = re.search(r'var data = ({.+?});', html).group(1) json_ = json.loads(json_) return json_['now']['livestream'] elif len(url) == 11: link = self.yt_session(url) return link elif 'episode' in url: html = client.request(url) if url.startswith(self.radio_base): url = re.search(r'["\'](.+?\.mp3)["\']', html).group(1) return url else: json_ = re.search(r'var data = ({.+?});', html).group(1) json_ = json.loads(json_) url = ''.join([ self.play_link, 'skaivod/_definst_/mp4:skai/', json_['episode'][0]['media_item_file'], '/chunklist.m3u8' ]) plot = client.stripTags(json_['episode'][0]['descr']) return url, plot else: return url
def episodes_list_gr(self, url, title): html = client.request(url) link = client.parseDOM(html, 'a', ret='data-url')[1] html = client.request(link) div_season = client.parseDOM(html, 'div', attrs={'class': 'mbSeasonsList'})[0] years = client.parseDOM(div_season, 'a') show_id = [i for i in client.parseDOM(html, 'script') if 'window.Environment.showId' in i][0] show_id = re.search(r'showId = (\d+);', show_id).group(1) for y in years: h = client.request(self.episodeslist_gr_query.format(year=y, pages='366', show_id=show_id)) self.data.append(h) html = ''.join(self.data) items = client.parseDOM(html, 'div', attrs={'class': 'episodeItem flexClm4'}) try: title = title.decode('utf-8') except Exception: pass for item in items: descr = client.parseDOM(item, 'a', attrs={'class': 'openVideoPopUp'})[0] descr = client.stripTags(descr).strip() if descr.endswith((u'Nέο', u'Νεο', u'New')): descr = descr[:-3] label = u' - '.join([title, descr]) image = client.parseDOM(item, 'div', attrs={'class': 'epImg'}, ret='style')[0] image = re.search(r'\([\'"](.+?)[\'"]\)', image).group(1) try: video = re.search(r'WebTvVideoId":(\d+).+?Year":(\d{4})}', item) url = self.player_query.format(video_id=video.group(1), show_id=show_id, year=video.group(2)) self.list.append({'title': label, 'image': image, 'url': url}) except Exception: pass return self.list
def loop(self, item, header, count, next_url=None): data_id = item.attributes['data-id'] img = item.attributes['style'] image = re.search(r'url\((.+)\)', img).group(1) url = [i for i in itertags_wrapper(item.text, 'a', ret='href') if 'https' in i][0] meta_url = '?'.join([self.ajax_url, self.load_search.format(data_id=data_id)]) if 'inside-page-thumb-titles' in item.text and control.setting('metadata') == 'false': fanart = None plot = None title = parseDOM(item.text, 'div', attrs={'class': 'inside-page-thumb-titles'})[0] title = client.replaceHTMLCodes(parseDOM(title, 'a')[0]) else: load = client.request(self.ajax_url, post=self.load_search.format(data_id=data_id), timeout=20) title = parseDOM(load, 'p', {'class': 'video-title'})[0].strip() title = client.replaceHTMLCodes(title) description = parseDOM(load, 'div', {'class': 'video-description'})[-1] paragraphs = [client.stripTags(p) for p in parseDOM(description, 'p')] plot = client.replaceHTMLCodes('[CR]'.join(paragraphs)) f = parseDOM(load, 'div', attrs={'class': 'cover'}, ret='style')[0] fanart = re.search(r'url\((.+)\)', f).group(1) data = {'title': title, 'image': image, 'url': url, 'code': count, 'meta_url': meta_url} if next_url: data.update({'next': next_url}) if header in [ u'ΞΕΝΕΣ ΣΕΙΡΕΣ ΠΛΗΡΕΙΣ', u'ΨΥΧΑΓΩΓΙΑ', u'ΣΥΝΕΝΤΕΥΞΕΙΣ', u'ΕΛΛΗΝΙΚΑ ΝΤΟΚΙΜΑΝΤΕΡ', u'ΞΕΝΑ ΝΤΟΚΙΜΑΝΤΕΡ', u'ΠΑΙΔΙΚΑ', u'Η ΕΡΤ ΘΥΜΑΤΑΙ', u'ΑΘΛΗΤΙΚΑ', u'ΞΕΝΕΣ ΣΕΙΡΕΣ CATCH-UP', u'WEB ΣΕΙΡΕΣ', u'ΝΕΕΣ ΕΛΛΗΝΙΚΕΣ ΣΕΙΡΕΣ' ] and not 'archeio' in url and header is not None: data.update({'playable': 'false'}) if fanart: data.update({'fanart': fanart}) if plot: data.update({'plot': plot}) self.list.append(data)
def episode_resolver(self, url): html = client.request(url) if url.startswith(self.radio_base): url = re.search(r'["\'](.+?\.mp3)["\']', html).group(1) return url else: json_ = re.search(r'var data = ({.+})', html).group(1) json_ = json.loads(json_) url = ''.join([ self.play_link, json_['episode'][0]['media_item_file'], '/chunklist.m3u8' ]) plot = client.stripTags(json_['episode'][0]['descr']) return url, plot
def recursive_list_items(url): page = 1 if url.startswith('https'): if BASE_API_LINK not in url: html = client.request(url) script = [ i for i in client.parseDOM(html, 'script') if 'INITIAL_STATE' in i ][0] script = re.sub(r'var _*?\w+_*? = ', '', script).replace(';</script>', '') if script.endswith(';'): script = script[:-1] _json = json.loads(script) else: _json = client.request(url, output='json') if '/list' in url: codename = split(url)[1].partition('=')[2] total_pages = _json['pages']['sectionsByCodename'][codename][ 'totalPages'] page = _json['pages']['sectionsByCodename'][codename][ 'fetchedPage'] tiles = _json['pages']['sectionsByCodename'][codename]['tilesIds'] tiles_post_list = [{'id': i} for i in tiles] else: tiles = [] if 'GetSeriesDetails' in url: episode_groups = _json['episodeGroups'] for group in episode_groups: episodes = group['episodes'] for episode in episodes: codename = episode['id'] tiles.append(codename) tiles_post_list = [{'id': i} for i in tiles] total_pages = 1 else: codenames = list(_json['pages']['sectionsByCodename'].keys()) for codename in codenames: tiles_list = _json['pages']['sectionsByCodename'][ codename]['tilesIds'] tiles.extend(tiles_list) tiles_post_list = [{'id': i} for i in tiles] total_pages = 1 else: if url.startswith('{"platformCodename":"www"'): collection_json = json.loads(url) url = collection_json['orCollectionCodenames'] page = collection_json['page'] filter_tiles = client.request(FILTER_TILES, post=collection_post(url, page), output='json') total_pages = filter_tiles['pagination']['totalPages'] page = filter_tiles['pagination']['page'] tiles = filter_tiles['tiles'] tiles_post_list = [{'id': i['id']} for i in tiles] if total_pages > 1 and page < total_pages: page = page + 1 next_post = collection_post(url, page) else: next_post = None get_tiles = client.request(GET_TILES, post=tiles_post(tiles_post_list), output='json') tiles_list = get_tiles['tiles'] self_list = [] for tile in tiles_list: if tile['isRegionRestrictionEnabled'] and not geo_detect: continue title = tile['title'] if 'subtitle' in tile: title = ' - '.join([title, tile['subtitle']]) try: if tile.get('isEpisode'): try: season = ' '.join([ control.lang(30063), str(tile['season']['seasonNumber']) ]) except KeyError: season = None if not season: subtitle = ' '.join( [control.lang(30064), str(tile['episodeNumber'])]) else: try: subtitle = ''.join([ season, ', ', control.lang(30064), ' ', str(tile['episodeNumber']) ]) except KeyError: subtitle = tile['publishDate'].partition('T')[0] subtitle = '/'.join(subtitle.split('-')[::-1]) title = '[CR]'.join([title, subtitle]) except Exception: pass images = tile['images'] fanart = control.fanart() if len(images) == 1: image = images[0]['url'] else: image_list = [ [i['url'] for i in images if i['isMain']], [i['url'] for i in images if i['role'] == 'hbbtv-icon'], [i['url'] for i in images if i['role'] == 'photo'], [i['url'] for i in images if i['role'] == 'hbbtv-background'] ] image = images[0]['url'] for i in image_list: if i: image = i[0] break fanart_list = [[ i['url'] for i in images if i['role'] == 'photo-details' ], [i['url'] for i in images if i['role'] == 'hbbtv-background'], [ i['url'] for i in images if i['role'] == 'photo' and 'ertflix-background' in i['url'] ]] for f in fanart_list: if f and len(f) > 1: fanart = f[1] break elif f and len(f) == 1: fanart = f[0] break codename = tile['codename'] vid = tile['id'] plots = [ tile.get('description'), tile.get('shortDescription'), tile.get('tinyDescription'), tile.get('subtitle'), tile.get('subTitle') ] plot = control.lang(30014) for p in plots: if p: plot = client.stripTags(p) plot = client.replaceHTMLCodes(plot) break year = tile.get('year') if not year: try: year = int(tile.get('productionYears')[:4]) except Exception: year = 2021 if tile.get('hasPlayableStream') and not tile.get('type') == 'ser': url = VOD_LINK.format('-'.join([vid, codename])) else: url = GET_SERIES_DETAILS.format(vid) data = { 'title': title, 'image': image, 'fanart': fanart, 'url': url, 'plot': plot, 'year': year } if tile.get('durationSeconds'): data.update({'duration': tile.get('durationSeconds')}) if next_post: data.update({ 'next': next_post, 'nextaction': 'listing', 'nextlabel': 30500, 'nexticon': control.addonmedia('next.jpg') }) if tile.get('hasPlayableStream') and not tile.get('type') == 'ser': data.update({'action': 'play', 'isFolder': 'False'}) else: data.update({'action': 'listing'}) self_list.append(data) return self_list
def sub_index_listing(url): html = client.request(url) name = client.parseDOM(html, 'h1', attrs={'class': 'tdb-title-text'})[0] name = client.replaceHTMLCodes(name) links = [ l for l in list(itertags(html, 'a')) if 'su-button' in l.attributes.get('class', '') ] if not links: links = [ l for l in list(itertags(html, 'a')) if l.text and u'Επεισόδια' in l.text ] description = client.replaceHTMLCodes( client.stripTags( client.parseDOM(html, 'div', attrs={'class': 'tdb-block-inner td-fix-index'})[-2])) if '</div>' in description: description = client.stripTags(description.partition('</div>')[2]) else: description = client.stripTags(description) image_div = [i for i in list(itertags(html, 'div')) if 'sizes' in i.text] image = re.search(r'w, (http.+?\.(?:jpg|png)) 300w', image_div[0].text).group(1) fanart = re.search(r'(http.+?\.(?:jpg|png))', image_div[0].text).group(1) self_list = [] for link in links: title = ' - '.join([name, client.stripTags(link.text).strip()]) url = client.replaceHTMLCodes(link.attributes['href']) action = 'listing' if 'series' in link.attributes['href']: url = split(url)[1].split('-')[0] url = GET_SERIES_DETAILS.format(url) elif 'vod' in link.attributes['href']: action = 'play' data = { 'title': title, 'url': url, 'image': image, 'fanart': fanart, 'plot': description, 'action': action } if data['action'] == 'play': data.update({'title': name, 'label': title, 'isFolder': 'False'}) self_list.append(data) if not self_list: self_list.append({ 'title': ''.join([name, ' - ', control.lang(30022)]), 'action': 'read_plot', 'isFolder': 'False', 'isPlayable': 'False', 'plot': description, 'image': image, 'fanart': fanart }) plot_item = { 'title': ''.join(['[B]', name, ' - ', control.lang(30021), '[/B]']), 'action': 'read_plot', 'isFolder': 'False', 'isPlayable': 'False', 'plot': description, 'image': image, 'fanart': fanart } self_list.append(plot_item) return self_list
def source_maker(url): if 'episode' in url: html = client.request(url=url.partition('?')[0], post=url.partition('?')[2]) else: html = client.request(url) try: html = html.decode('utf-8') except Exception: pass if 'episode' in url: episodes = re.findall(r'''(?:<a.+?/a>|<p.+?/p>)''', html) hl = [] links = [] for episode in episodes: if '<p style="margin-top:0px; margin-bottom:4px;">' in episode: host = client.parseDOM(episode, 'p')[0].split('<')[0] pts = client.parseDOM(episode, 'a') lks = client.parseDOM(episode, 'a', ret='href') for p in pts: hl.append(u''.join([host, control.lang(30225), p])) for l in lks: links.append(l) else: pts = client.parseDOM(episode, 'a') lks = client.parseDOM(episode, 'a', ret='href') for p in pts: hl.append(p) for l in lks: links.append(l) links = [urljoin(GM_BASE, link) for link in links] hosts = [ host.replace(u'προβολή στο ', control.lang(30015)) for host in hl ] data = {'links': links, 'hosts': hosts} if '<p class="text-muted text-justify">' in html: plot = client.parseDOM(html, 'p')[0] data.update({'plot': plot}) return data elif 'view' in url: link = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"})[0] return { 'links': [link], 'hosts': [''.join([control.lang(30015), 'Youtube'])] } elif 'music' in url: keys_registration() title = re.search(r'''search\(['"](.+?)['"]\)''', html).group(1) link = get_search(q=title, search_type='video', addon_id=control.addonInfo('id'))[0]['id']['videoId'] link = YT_URL + link return { 'links': [link], 'hosts': [''.join([control.lang(30015), 'Youtube'])] } else: try: info = client.parseDOM(html, 'h4', attrs={'style': 'text-indent:10px;'}) if ',' in info[1]: genre = info[1].lstrip(u'Είδος:').split(',') genre = random.choice(genre) genre = genre.strip() else: genre = info[1].lstrip(u'Είδος:').strip() except: genre = control.lang(30147) div_tags = parsers.itertags(html, 'div') buttons = [ i.text for i in list(div_tags) if 'margin: 0px 0px 10px 10px;' in i.attributes.get('style', '') ] links = [] hl = [] for button in buttons: if 'btn btn-primary dropdown-toggle' in button: h = client.stripTags(client.parseDOM(button, 'button')[0]).strip() parts = client.parseDOM(button, 'li') for part in parts: p = client.parseDOM(part, 'a')[0] link = client.parseDOM(part, 'a', ret='href')[0] hl.append(', '.join([h, p])) links.append(link) else: h = client.parseDOM(button, 'a')[0] link = client.parseDOM(button, 'a', ret='href')[0] hl.append(h) links.append(link) links = [urljoin(GM_BASE, link) for link in links] hosts = [ host.replace(u'προβολή στο ', control.lang(30015)).replace( u'προβολή σε ', control.lang(30015)).replace(u'μέρος ', control.lang(30225)) for host in hl ] data = {'links': links, 'hosts': hosts, 'genre': genre} if 'text-align: justify' in html: plot = client.parseDOM(html, 'p', attrs={'style': 'text-align: justify'})[0] elif 'text-justify' in html: plot = client.parseDOM(html, 'p', attrs={'class': 'text-justify'})[0] else: plot = control.lang(30085) data.update({'plot': plot}) imdb_code = re.search(r'imdb.+?/title/([\w]+?)/', html) if imdb_code: code = imdb_code.group(1) data.update({'code': code}) return data
def kodi_auth(): aspect_ratio = control.infoLabel('Skin.AspectRatio') def obtain_authorization(_cookie, _uh): data = { 'authorize': 'Allow', 'state': state, 'redirect_uri': redirect_uri, 'response_type': 'code', 'client_id': client_id, 'duration': 'permanent', 'scope': ' '.join(scope), 'uh': _uh } headers = client.request(api_link('authorize'), cookie=_cookie, post=data, redirect=False, output='headers') geturl = dict([ line.partition(': ')[::2] for line in str(headers).splitlines() ]).get('location') token = dict(parse_qsl(urlparse(geturl).query)).get('code') if not token: return get_tokens(code=token) class Prompt(pyxbmct.AddonDialogWindow): pyxbmct.skin.estuary = control.setting('pyxbmct.estuary') == 'true' if aspect_ratio == '4:3': geometry = (506, 380, 5, 5) else: geometry = (676, 380, 5, 5) def __init__(self, title, description, _cookie, _uh): super(Prompt, self).__init__(title) self.allow_button = None self.deny_button = None self.text_box = None self.text = description self.cookie = _cookie self.uh = _uh self.setGeometry(*self.geometry) self.set_controls() self.set_navigation() self.connect(pyxbmct.ACTION_NAV_BACK, self.close) def set_controls(self): # Text box self.text_box = pyxbmct.TextBox() self.placeControl(self.text_box, 0, 0, 4, 5) self.text_box.setText(self.text) self.text_box.autoScroll(1000, 1000, 1000) # Allow self.allow_button = pyxbmct.Button(control.lang(30150)) self.placeControl(self.allow_button, 4, 1) self.connect(self.allow_button, lambda: self.authorize()) # Deny self.deny_button = pyxbmct.Button(control.lang(30151)) self.placeControl(self.deny_button, 4, 3) self.connect(self.deny_button, self.close) def set_navigation(self): self.allow_button.controlRight(self.deny_button) self.deny_button.controlLeft(self.allow_button) self.setFocus(self.allow_button) def authorize(self): obtain_authorization(self.cookie, self.uh) self.close() class UserPass(pyxbmct.AddonDialogWindow): pyxbmct.skin.estuary = control.setting('pyxbmct.estuary') == 'true' if aspect_ratio == '4:3': geometry = (341, 296, 6, 1) else: geometry = (455, 296, 6, 1) def __init__(self, title): super(UserPass, self).__init__(title) self.username_label = None self.user_input = None self.password_label = None self.pass_input = None self.submit_button = None self.cancel_button = None self.setGeometry(*self.geometry) self.set_controls() self.set_navigation() self.connect(pyxbmct.ACTION_NAV_BACK, self.close) def set_controls(self): # Username label self.username_label = pyxbmct.Label(control.lang(30152)) self.placeControl(self.username_label, 0, 0) # Username input self.user_input = pyxbmct.Edit(control.lang(30152)) self.placeControl(self.user_input, 1, 0) # Password label self.password_label = pyxbmct.Label(control.lang(30153)) self.placeControl(self.password_label, 2, 0) # Password input self.pass_input = pyxbmct.Edit(control.lang(30153), isPassword=True) self.placeControl(self.pass_input, 3, 0) # Submit button self.submit_button = pyxbmct.Button(control.lang(30154)) self.placeControl(self.submit_button, 4, 0) self.connect(self.submit_button, lambda: self.submit(True)) # Cancel button self.cancel_button = pyxbmct.Button(control.lang(30064)) self.placeControl(self.cancel_button, 5, 0) self.connect(self.cancel_button, self.close) def set_navigation(self): self.user_input.controlDown(self.pass_input) self.pass_input.controlUp(self.user_input) self.pass_input.controlDown(self.submit_button) self.submit_button.controlUp(self.pass_input) self.submit_button.controlDown(self.cancel_button) self.cancel_button.controlUp(self.submit_button) self.setFocus(self.user_input) def credentials(self): return self.user_input.getText(), self.pass_input.getText() def submit(self, _submitted=False): if _submitted: self.close() return True userpass_window = UserPass(control.name()) userpass_window.doModal() username, password = userpass_window.credentials() if not username or not password: return login_url = base_link(True) + '/api/login/' + username data = { 'form_is_compact': 'true', 'dest': authorization_link(True), 'passwd': password, 'user': username, 'rem': 'on', 'api_type': 'json', 'op': 'login' } del userpass_window cookie = client.request(login_url, close=False, post=data, output='cookie') html = client.request(authorization_link(True), cookie=cookie) try: uh = client.parseDOM(html, 'input', attrs={'name': 'uh'}, ret='value')[0] permissions = client.parseDOM(html, 'div', attrs={'class': 'access-permissions'})[0] notice = client.parseDOM(html, 'p', attrs={'class': 'notice'})[0] text = client.replaceHTMLCodes( client.stripTags(permissions + '[CR]' + notice)) text = substitute(r'([.:]) ?', r'\1[CR]', text).partition('[CR]') prompt_window = Prompt(title=text[0], description=text[2], _cookie=cookie, _uh=uh) prompt_window.doModal() del prompt_window except IndexError: control.okDialog(control.name(), control.lang(30114))
def gm_directory(url, params): sources = gm_source_maker(url) lists = sources['links'] items = [] try: description = sources['plot'] except KeyError: try: description = params.get('plot').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): description = params.get('plot') if not description: description = control.lang(30085) try: genre = sources['genre'] except KeyError: genre = control.lang(30147) for h, l in lists: html = client.request(l) button = client.parseDOM(html, 'a', attrs={'role': 'button'}, ret='href')[0] image = client.parseDOM(html, 'img', attrs={'class': 'thumbnail img-responsive'}, ret='src')[0] image = urljoin(GM_BASE, image) title = client.parseDOM(html, 'h3')[0] year = [ y[-4:] for y in client.parseDOM(html, 'h4') if str(y[-4:]).isdigit() ][0] try: episode = client.stripTags(client.parseDOM(html, 'h4')[-1]) if episode[-4:].isdigit(): raise IndexError episode = episode.partition(': ')[2].strip() label = title + ' - ' + episode + SEPARATOR + h title = title + ' - ' + episode except IndexError: label = title + SEPARATOR + h # plot = title + '[CR]' + control.lang(30090) + ': ' + year + '[CR]' + description if is_py2: title = title + ' ({})'.format(year) data = { 'label': label, 'title': title, 'url': button, 'image': image, 'plot': description, 'year': int(year), 'genre': genre, 'name': title } if control.setting('check_streams') == 'true': data.update({'query': json.dumps(sources['links'])}) items.append(data) return items
def _top20(self, url): from youtube_requests import get_search cookie = client.request(url, close=False, output='cookie') html = client.request(url, cookie=cookie) if url == self.rythmos_top20_url: attributes = {'class': 'va-title'} elif url == self.plus_url: attributes = {'class': 'element element-itemname first last'} elif url == self.radiopolis_url_gr or url == self.radiopolis_url_other: attributes = {'class': 'thetopdata'} items = client.parseDOM( html, 'td' if 'radiopolis' in url else 'div', attrs=attributes ) year = str(datetime.now().year) for item in items: if url == self.rythmos_top20_url: label = client.parseDOM(item, 'span', attrs={'class': 'toptitle'})[0] label = client.replaceHTMLCodes(label) label = re.sub('\s? ?-\s? ?', ' - ', label) image = client.parseDOM(item, 'img', ret='src')[0] image = image.replace(' ', '%20') title = label.partition(' - ')[2] if control.setting('audio_only') == 'true' and control.condVisibility('Window.IsVisible(music)'): artist = label.partition(' - ')[0] else: artist = [label.partition(' - ')[0]] elif url == self.plus_url: label = item.partition('.')[2].strip() title = label.partition('-')[2] if control.setting('audio_only') == 'true' and control.condVisibility('Window.IsVisible(music)'): artist = label.partition('-')[0] else: artist = [label.partition('-')[0]] elif url == self.radiopolis_url_gr or url == self.radiopolis_url_other: a_href = client.parseDOM(item, 'a') a_href = ' - '.join(a_href) if len(a_href) == 2 else a_href[0] label = client.stripTags(a_href.replace('\"', '').replace('&', '&').replace('\n', ' - ')) title = label.partition(' - ')[2] if control.setting('audio_only') == 'true' and control.condVisibility('Window.IsVisible(music)'): artist = label.partition(' - ')[0] else: artist = [label.partition(' - ')[0]] if any([url == self.rythmos_top20_url, url == self.plus_url]): search = get_search(q=title + ' ' + 'official', search_type='video')[0] description = search['snippet']['description'] year = search['snippet']['publishedAt'][:4] vid = search['id']['videoId'] image = search['snippet']['thumbnails']['default']['url'] link = yt_url + vid elif url == self.radiopolis_url_gr or url == self.radiopolis_url_other: links = client.parseDOM(item, 'a', ret='href') link = links[1] if len(links) == 2 else links[0] image = thumb_maker(link.rpartition('/' if 'youtu.be' in link else '=')[2]) description = None self.list.append( { 'label': label, 'url': link, 'image': image, 'title': title, 'artist': artist, 'plot': description, 'year': int(year) } ) return self.list
def items_directory(url, params): sources = cache.get(gm_source_maker, 6, url) lists = zip(sources[1], sources[2]) items = [] try: description = sources[3] except IndexError: try: description = params.get('plot').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): description = params.get('plot') if not description: description = control.lang(30085) try: genre = sources[4] except IndexError: genre = control.lang(30147) separator = ' - ' if control.setting('wrap_labels') == '1' else '[CR]' for h, l in lists: html = client.request(l) button = client.parseDOM(html, 'a', attrs={'role': 'button'}, ret='href')[0] image = client.parseDOM(html, 'img', attrs={'class': 'thumbnail img-responsive'}, ret='src')[0] image = urljoin(base_link, image) title = client.parseDOM(html, 'h3')[0] year = [ y[-4:] for y in client.parseDOM(html, 'h4') if str(y[-4:]).isdigit() ][0] try: episode = client.stripTags(client.parseDOM(html, 'h4')[-1]) if episode[-4:].isdigit(): raise IndexError episode = episode.partition(': ')[2] label = title + ' - ' + episode + separator + h title = title + ' - ' + episode except IndexError: label = title + separator + h # plot = title + '[CR]' + control.lang(30090) + ': ' + year + '[CR]' + description data = dict(label=label, title=title + ' ({})'.format(year), url=button, image=image, plot=description, year=int(year), genre=genre, name=title) items.append(data) return items
def gm_source_maker(url): if 'episode' in url: html = client.request(url=url.partition('?')[0], post=url.partition('?')[2]) links = client.parseDOM(html, 'a', ret='href') links = [urljoin(base_link, link) for link in links] hl = client.parseDOM(html, 'a') hosts = [ host.replace(u'προβολή στο ', control.lang(30015)) for host in hl ] return 'episode', hosts, links elif 'view' in url: html = client.request(url) link = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"})[0] return 'view', link elif 'music' in url: html = client.request(url) link = client.parseDOM(html, 'iframe', ret='src', attrs={"class": "embed-responsive-item"})[0] return 'music', link else: html = client.request(url) try: info = client.parseDOM(html, 'h4', attrs={'style': 'text-indent:10px;'}) if ',' in info[1]: genre = info[1].lstrip(u'Είδος:').split(',') genre = random.choice(genre) genre = genre.strip() else: genre = info[1].lstrip(u'Είδος:').strip() except: genre = control.lang(30147) links = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"}) hl = client.parseDOM(html, 'a', attrs={"class": "btn btn-primary"}) if not links or not hl: buttons = client.parseDOM(html, 'div', attrs={"class": "btn-group"}) hl = [ client.stripTags( client.parseDOM(h, 'button', attrs={"type": "button" })[0]).strip('"') + p for h in buttons for p in client.parseDOM(h, 'a', attrs={'target': '_blank'}) ] links = [ l for b in buttons for l in client.parseDOM(b, 'a', ret='href') ] links = [urljoin(base_link, link) for link in links] hosts = [ host.replace(u'προβολή στο ', control.lang(30015)).replace( u'προβολή σε ', control.lang(30015)).replace(u'μέρος ', ', ' + control.lang(30225)) for host in hl ] if 'text-align: justify' in html: plot = client.parseDOM(html, 'p', attrs={'style': 'text-align: justify'})[0] elif 'text-justify' in html: plot = client.parseDOM(html, 'p', attrs={'class': 'text-justify'})[0] else: plot = control.lang(30085) code = None imdb_code = re.search('imdb.+?/title/([\w]+?)/', html) if imdb_code: code = imdb_code.group(1) return 'movies', hosts, links, plot, genre, code
def gm_source_maker(url): if 'episode' in url: html = client.request(url=url.partition('?')[0], post=url.partition('?')[2]) else: html = client.request(url) html = py2_uni(html) if 'episode' in url: episodes = re.findall(r'''(?:<a.+?/a>|<p.+?/p>)''', html) hl = [] links = [] for episode in episodes: if '<p style="margin-top:0px; margin-bottom:4px;">' in episode: host = client.parseDOM(episode, 'p')[0].split('<')[0] pts = client.parseDOM(episode, 'a') lks = client.parseDOM(episode, 'a', ret='href') for p in pts: hl.append(u''.join([host, control.lang(30225), p])) for link_ in lks: links.append(link_) else: pts = client.parseDOM(episode, 'a') lks = client.parseDOM(episode, 'a', ret='href') for p in pts: hl.append(p) for link_ in lks: links.append(link_) links = [urljoin(GM_BASE, link) for link in links] hosts = [host.replace(u'προβολή στο ', control.lang(30015)) for host in hl] links_list = list(zip(hosts, links)) data = {'links': links_list} if '<p class="text-muted text-justify">' in html: plot = client.parseDOM(html, 'p')[0] data.update({'plot': plot}) return data elif 'view' in url: link = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"})[0] host = urlparse(link).netloc.replace('www.', '').capitalize() return {'links': [(''.join([control.lang(30015), host]), link)]} elif 'music' in url: title = re.search(r'''search\(['"](.+?)['"]\)''', html).group(1) link = list_search(query=title, limit=1)[0]['url'] return {'links': [(''.join([control.lang(30015), 'Youtube']), link)]} else: try: info = client.parseDOM(html, 'h4', attrs={'style': 'text-indent:10px;'}) if ',' in info[1]: genre = info[1].lstrip(u'Είδος:').split(',') genre = random.choice(genre) genre = genre.strip() else: genre = info[1].lstrip(u'Είδος:').strip() except: genre = control.lang(30147) div_tags = parsers.itertags(html, 'div') buttons = [i.text for i in list(div_tags) if 'margin: 0px 0px 10px 10px;' in i.attributes.get('style', '')] links = [] hl = [] for button in buttons: if 'btn btn-primary dropdown-toggle' in button: host = client.stripTags(client.parseDOM(button, 'button')[0]).strip() parts = client.parseDOM(button, 'li') for part in parts: part_ = client.parseDOM(part, 'a')[0] link = client.parseDOM(part, 'a', ret='href')[0] hl.append(', '.join([host, part_])) links.append(link) else: host = client.parseDOM(button, 'a')[0] link = client.parseDOM(button, 'a', ret='href')[0] hl.append(host) links.append(link) links = [urljoin(GM_BASE, link) for link in links] hosts = [host.replace( u'προβολή στο ', control.lang(30015) ).replace( u'προβολή σε ', control.lang(30015) ).replace( u'μέρος ', control.lang(30225) ) for host in hl] links_list = list(zip(hosts, links)) data = {'links': links_list, 'genre': genre} if 'text-align: justify' in html: plot = client.parseDOM(html, 'p', attrs={'style': 'text-align: justify'})[0] elif 'text-justify' in html: plot = client.parseDOM(html, 'p', attrs={'class': 'text-justify'})[0] else: plot = control.lang(30085) data.update({'plot': plot}) imdb_code = re.search(r'imdb.+?/title/([\w]+?)/', html) if imdb_code: code = imdb_code.group(1) data.update({'code': code}) return data
def gm_filler(url, params): if CACHE_DEBUG: sources = source_maker(url) else: sources = cache.get(source_maker, 6, url) lists = list(zip(sources['hosts'], sources['links'])) items = [] try: description = sources['plot'] except KeyError: try: description = params.get('plot').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): description = params.get('plot') if not description: description = control.lang(30085) try: genre = sources['genre'] except KeyError: genre = control.lang(30147) for h, l in lists: html = client.request(l) button = client.parseDOM(html, 'a', attrs={'role': 'button'}, ret='href')[0] image = client.parseDOM(html, 'img', attrs={'class': 'thumbnail img-responsive'}, ret='src')[0] image = urljoin(GM_BASE, image) title = client.parseDOM(html, 'h3')[0] year = [ y[-4:] for y in client.parseDOM(html, 'h4') if str(y[-4:]).isdigit() ][0] try: episode = client.stripTags(client.parseDOM(html, 'h4')[-1]) if episode[-4:].isdigit(): raise IndexError episode = episode.partition(': ')[2] label = title + ' - ' + episode + SEPARATOR + h title = title + ' - ' + episode except IndexError: label = title + SEPARATOR + h # plot = title + '[CR]' + control.lang(30090) + ': ' + year + '[CR]' + description data = { 'label': label, 'title': title + ' ({})'.format(year), 'url': button, 'image': image, 'plot': description, 'year': int(year), 'genre': genre, 'name': title } items.append(data) return items