def event_list(self, url): html = client.request(url) items = client.parseDOM(html, 'div', attrs={'style': 'margin-bottom: 10px'}) for item in items: title = client.parseDOM(item, 'a', attrs={'class': 'btn btn-default'})[0] image = client.parseDOM( html, 'img', attrs={'class': 'thumbnail img-responsive pull-right'}, ret='src')[0] image = urljoin(GM_BASE, image) link = client.parseDOM(item, 'a', attrs={'class': 'btn btn-default'}, ret='href')[0] link = urljoin(GM_BASE, link) plot = client.parseDOM(item, 'span', attrs={'class': 'pull-right'})[0] self.list.append({ 'title': title, 'url': link, 'plot': plot, 'image': image }) return self.list
def magazine_list(): control.content(int(sys.argv[1]), 'images') index_txt = client.request(variables.mags_base_url) splitted = index_txt.splitlines() number = re.sub(r'Volume (\d{1,3}).+', r'\1', splitted[-1]) if len(number) == 1: number = '00' + number elif len(number) == 2: number = '0' + number voice_img = urljoin(variables.mags_base_url, 'mag_thumb_{0}.jpg'.format(number)) magazines = [] for line in splitted: title = line.replace('Volume', control.lang(30025)) image = line.partition(' - ')[0].replace('Volume ', 'vol') image = urljoin(variables.mags_base_url, image + '/thumbs' + '/thumb-01.jpg') url = '{0}?action=mag_index&url={1}'.format(sys.argv[0], image.partition('/thumbs')[0]) data = {'title': title, 'image': image, 'url': url} magazines.append(data) return magazines, voice_img
def index_cy(self, url): html = client.request(url) items = [i for i in client.parseDOM(html, 'div', attrs={'class': 'box'}) if urlparse(url).path in i] try: next_link = client.parseDOM(html, 'a', attrs={'class': 'pager__link pager__link--next'}, ret='href')[0] next_link = urljoin(url.partition('?')[0], next_link) except Exception: next_link = None for item in items: try: title_field = client.parseDOM(item, 'div', {'class': 'box__overlay-title'})[0] except IndexError: continue title = client.replaceHTMLCodes(client.parseDOM(title_field, 'a')[0]).replace(u'ᵒ', u' μοίρες').strip() subtitle = client.replaceHTMLCodes(client.parseDOM(item, 'div', {'class': 'box__overlay-subtitle'})[0]) label = ' | '.join([title, subtitle]) url = client.parseDOM(title_field, 'a', ret='href')[0] url = urljoin(self.basecy_link, url + '/webtv') image = client.parseDOM(item, 'img', ret='src')[0] data = {'title': label, 'image': image, 'url': url, 'name': title} if next_link: data.update({'next': next_link}) self.list.append(data) return self.list
def music_list(self, url): html = client.request(url) try: html = html.decode('utf-8') except Exception: pass if 'albumlist' in html: artist = [client.parseDOM(html, 'h4')[0].partition(' <a')[0]] else: artist = None if control.setting('audio_only') == 'true' and control.condVisibility('Window.IsVisible(music)') and artist is not None: artist = ''.join(artist) if 'songlist' in html: songlist = client.parseDOM(html, 'div', attrs={'class': 'songlist'})[0] items = client.parseDOM(songlist, 'li') elif 'albumlist' in html: albumlist = client.parseDOM(html, 'div', attrs={'class': 'albumlist'})[0] items = client.parseDOM(albumlist, 'li') else: artistlist = client.parseDOM(html, 'div', attrs={'class': 'artistlist'})[0] items = client.parseDOM(artistlist, 'li') if 'icon/music' in html: icon = client.parseDOM(html, 'img', attrs={'class': 'img-responsive'}, ret='src')[-1] icon = urljoin(gm.GM_BASE, icon) else: icon = iconname('music') for item in items: title = client.parseDOM(item, 'a')[0] link = client.parseDOM(item, 'a', ret='href')[0] link = urljoin(gm.GM_BASE, link) if 'gapi.client.setApiKey' in html: if CACHE_DEBUG: link = gm.source_maker(url)['links'][0] else: link = cache.get(gm.source_maker, 48, url)['links'][0] data = {'title': title, 'url': link, 'image': icon} if artist: data.update({'artist': artist}) self.list.append(data) return self.list
def m3u8_picker(url): m3u8_playlists = m3u8.load(url).playlists if not m3u8_playlists: return url qualities = [] urls = [] for playlist in m3u8_playlists: quality = repr(playlist.stream_info.resolution).strip('()').replace(', ', 'x') if quality == 'None': quality = 'Auto' uri = playlist.uri if not uri.startswith('http'): uri = urljoin(playlist.base_uri, uri) qualities.append(quality) urls.append(uri) return stream_picker(qualities, urls)
def gm_sports(self): if CACHE_DEBUG: html = root(SPORTS) else: html = cache.get(root, 48, SPORTS) options = re.compile('(<option value.+?</option>)', re.U).findall(html) icons = [ 'https://www.shareicon.net/data/256x256/2015/11/08/157712_sport_512x512.png', 'https://www.shareicon.net/data/256x256/2015/12/07/196797_ball_256x256.png' ] items = zip(options, icons) for item, image in items: title = client.parseDOM(item, 'option')[0] url = client.parseDOM(item, 'option', ret='value')[0] url = client.replaceHTMLCodes(url) index = urljoin(GM_BASE, url) data = { 'title': title, 'action': 'listing', 'url': index, 'image': image } self.list.append(data) directory.add(self.list)
def gm_debris(link): html = client.request(urljoin(base_link, link)) button = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"})[0] return button
def m3u8_picker(url): try: if '|' not in url: raise TypeError link, sep, head = url.rpartition('|') headers = dict(parse_qsl(head)) streams = m3u8.load(link, headers=headers).playlists except TypeError: streams = m3u8.load(url).playlists if not streams: return url qualities = [] urls = [] for stream in streams: quality = repr(stream.stream_info.resolution).strip('()').replace( ', ', 'x') if quality == 'None': quality = 'Auto' uri = stream.uri if not uri.startswith('http'): uri = urljoin(stream.base_uri, uri) qualities.append(quality) try: if '|' not in url: raise TypeError urls.append(uri + ''.join(url.rpartition('|')[1:])) except TypeError: urls.append(uri) if len(qualities) == 1: control.infoDialog(control.lang(30220).format(qualities[0])) return url return stream_picker(qualities, urls)
def music_list(self, url): html = client.request(url) if 'albumlist' in html: artist = [client.parseDOM(html, 'h4')[0].partition(' <a')[0]] else: artist = None if control.setting('audio_only') == 'true' and control.condVisibility('Window.IsVisible(music)') and artist is not None: artist = ''.join(artist) if 'songlist' in html: songlist = client.parseDOM(html, 'div', attrs={'class': 'songlist'})[0] items = client.parseDOM(songlist, 'li') elif 'albumlist' in html: albumlist = client.parseDOM(html, 'div', attrs={'class': 'albumlist'})[0] items = client.parseDOM(albumlist, 'li') else: artistlist = client.parseDOM(html, 'div', attrs={'class': 'artistlist'})[0] items = client.parseDOM(artistlist, 'li') if 'icon/music' in html: icon = client.parseDOM(html, 'img', attrs={'class': 'img-responsive'}, ret='src')[-1] icon = urljoin(gm.base_link, icon) else: icon = iconname('music') for item in items: title = client.parseDOM(item, 'a')[0] link = client.parseDOM(item, 'a', ret='href')[0] link = urljoin(gm.base_link, link) data = {'title': title, 'url': link, 'image': icon, 'artist': artist} self.list.append(data) return self.list
def get_stations(self): year = datetime.datetime.now().year xml = client.request(self.index) stations = client.parseDOM(xml, 'channel') ids = client.parseDOM(xml, 'channel', ret='id') items = zip(ids, stations) for sid, item in items: station = client.parseDOM(item, 'title')[0].partition('A[')[2][:-3] image = client.parseDOM(item, 'image')[0] urls = re.findall('<.+?pls.+?>(.+?)</.+?pls>', item) streams = json.dumps(urls) listeners = client.parseDOM(item, 'listeners')[0] now = client.parseDOM(item, 'lastPlaying')[0].partition('A[')[2][:-3] song = now.partition(' - ')[2] artist = now.partition(' - ')[0] history = urljoin(self.main, sid + '/songhistory.html') genre = client.parseDOM(item, 'genre')[0] description = client.parseDOM( item, 'description')[0].partition('A[')[2][:-3] if control.setting('caching') == 'false': title = song else: title = station data = { 'title': title, 'image': image, 'url': streams, 'listeners': int(listeners), 'history': history, 'genre': genre, 'artist': artist, 'album': station, 'year': year, 'comment': description, 'mediatype': 'music' } self.list.append(data) return self.list
def gm_music(self): html = cache.get(gm.root, 96, gm.music_link) options = re.compile('(<option value=.+?</option>)', re.U).findall(html) for option in options: title = client.parseDOM(option, 'option')[0] link = client.parseDOM(option, 'option', ret='value')[0] link = urljoin(gm.base_link, link) data = {'title': title, 'url': link, 'image': iconname('music'), 'action': 'artist_index'} self.list.append(data) directory.add(self.list, argv=self.argv)
def __init__(self, argv): self.list = []; self.data = [] self.mgreekz_id = 'UClMj1LyMRBMu_TG1B1BirqQ' self.mgreekz_url = 'http://mad.tv/' self.rythmos_url = 'https://www.rythmosfm.gr/' self.plus_url = 'http://plusradio.gr/top20' self.radiopolis_url_gr = 'http://www.radiopolis.gr/elliniko-radio-polis-top-20/' self.radiopolis_url_other = 'http://www.radiopolis.gr/to-kseno-polis-top-20/' self.rythmos_top20_url = urljoin(self.rythmos_url, 'community/top20/') if control.setting('audio_only') == 'true' and control.condVisibility('Window.IsVisible(music)'): self.content = 'songs' self.infotype = 'music' else: self.content = 'musicvideos' self.infotype = 'video' self.argv = argv
def persons_listing(self, url, post): html = client.request(url, post=post) content = client.parseDOM(html, 'div', attrs={'style': 'margin-left:20px;'})[0] persons = client.parseDOM(content, 'h4') for person in persons: title = client.parseDOM(person, 'a')[0] url = urljoin(GM_BASE, client.parseDOM(person, 'a', ret='href')[0]) i = {'title': title, 'url': url} self.list.append(i) return self.list
def gm_music(self): if CACHE_DEBUG: html = gm.root(gm.MUSIC) else: html = cache.get(gm.root, 96, gm.MUSIC) options = re.compile(r'(<option value=.+?</option>)', re.U).findall(html) for option in options: title = client.parseDOM(option, 'option')[0] link = client.parseDOM(option, 'option', ret='value')[0] link = urljoin(gm.GM_BASE, link) data = {'title': title, 'url': link, 'image': iconname('music'), 'action': 'artist_index'} self.list.append(data) directory.add(self.list)
def __init__(self): self.list = [] self.data = [] self.main = 'http://somafm.com/' self.index = urljoin(self.main, 'channels.xml')
def root(url): root_list = [] groups_list = [] html = client.request(url) if url == SPORTS: sports_index = client.parseDOM(html, 'div', attrs={'class': 'col-xs-6 text-center'})[0] return sports_index elif url == MUSIC: music_index = client.parseDOM(html, 'div', attrs={'class': 'col-sm-5 col-md-4'})[0] return music_index else: result = client.parseDOM(html, 'div', attrs={ 'class': 'row', 'style': 'margin-bottom: 20px;' })[0] items = re.findall('(<option ?value=.*?</option>)', result, re.U) groups = client.parseDOM(result, 'option', attrs={'selected value': '.+?'}) for group in groups: if group == u'ΑΡΧΙΚΑ': group = group.replace(u'ΑΡΧΙΚΑ', '30213') elif group == u'ΕΤΟΣ': group = group.replace(u'ΕΤΟΣ', '30090') elif group == u'ΚΑΝΑΛΙ': group = group.replace(u'ΚΑΝΑΛΙ', '30211') elif group == u'ΕΙΔΟΣ': group = group.replace(u'ΕΙΔΟΣ', '30200') elif group == u'ΠΑΡΑΓΩΓΗ': group = group.replace(u'ΠΑΡΑΓΩΓΗ', '30212') groups_list.append(group) for item in items: name = client.parseDOM(item, 'option', attrs={'value': '.+?.php.+?'})[0] name = name.replace(u'σήμερα', control.lang(30268)) title = name[0].capitalize() + name[1:] link = client.parseDOM(item, 'option', ret='value')[0] indexer = urlparse(link).query index = urljoin(GM_BASE, link) if indexer.startswith('l='): group = '30213' elif indexer.startswith('y='): group = '30090' elif indexer.startswith('c='): group = '30211' elif indexer.startswith('g='): group = '30200' elif indexer.startswith('p='): group = '30212' else: group = '' root_list.append({'title': title, 'group': group, 'url': index}) return root_list, groups_list
def epeisodia(self, url): html = client.request(url) image = client.parseDOM(html, 'img', attrs={'class': 'thumbnail.*?'}, ret='src')[0] image = urljoin(GM_BASE, image) year = client.parseDOM(html, 'h4', attrs={'style': 'text-indent:10px;'})[0] year = int(re.findall(r'\d{4}', year, re.U)[0]) name = client.parseDOM(html, 'h2')[0] result = client.parseDOM(html, 'div', attrs={'style': 'margin:20px 0px 20px 0px;'})[0] episodes = re.findall(r'onclick="loadEpisode(.*?)">(.*?)</button>', result) if str('text-justify') in html: plot = client.parseDOM(html, 'p', attrs={'class': 'text-justify'})[0] else: plot = control.lang(30085) info = client.parseDOM(html, 'h4', attrs={'style': 'text-indent:10px;'}) genre = info[1].lstrip(u'Είδος:').strip() dictionary = { u'Ιαν': '01', u'Φεβ': '02', u'Μάρ': '03', u'Απρ': '04', u'Μάι': '05', u'Ιούν': '06', u'Ιούλ': '07', 'Αύγ': '08', u'Σεπ': '09', u'Οκτ': '10', u'Νοέ': '11', u'Δεκ': '12' } for eid, title in episodes: link = re.search(r'\'([\w-]+)\', \'(\w{1,2})\'', eid) link = EPISODE.format(link.group(1), link.group(2)) if '\'n\')' in eid: group = '1bynumber' if '.' in title: try: season = int(title.partition('.')[0]) except Exception: season = int(title.partition('.')[0][0]) episode_num = title.partition('.')[2] title = control.lang(30066) + ' ' + str( season) + ', ' + control.lang( 30067) + ' ' + episode_num else: title = control.lang(30067) + ' ' + title elif '\'d\')' in eid: group = '2bydate' row = result.split(eid)[0] y = re.findall(r'<h4.+?bold.+?(\d{4})', row, re.U)[-1] m = re.findall(r'width:50px..?>(.+?)<', row, re.U)[-1] m = dictionary[m] prefix = '0' + title if len(title) == 1 else title title = prefix + '-' + m + '-' + y else: group = '3bytitle' separator = ' - ' if control.setting( 'wrap_labels') == '1' else '[CR]' self.list.append({ 'title': name + separator + title, 'url': link, 'group': group, 'name': name, 'image': image, 'plot': plot, 'year': year, 'genre': genre }) return self.list
import re import json import random from ast import literal_eval as evaluate from youtube_requests import get_search from tulip import cache, client, directory, control, parsers, cleantitle from tulip.log import log_debug from tulip.compat import urljoin, urlparse, range, iteritems from tulip.utils import list_divider from ..modules.themes import iconname from ..modules.constants import YT_URL, CACHE_DEBUG from ..modules.utils import keys_registration, page_menu GM_BASE = 'https://greek-movies.com/' MOVIES = urljoin(GM_BASE, 'movies.php') SHOWS = urljoin(GM_BASE, 'shows.php') SERIES = urljoin(GM_BASE, 'series.php') ANIMATION = urljoin(GM_BASE, 'animation.php') THEATER = urljoin(GM_BASE, 'theater.php') SPORTS = urljoin(GM_BASE, 'sports.php') SHORTFILMS = urljoin(GM_BASE, 'shortfilm.php') MUSIC = urljoin(GM_BASE, 'music.php') SEARCH = urljoin(GM_BASE, 'search.php') PERSON = urljoin(GM_BASE, 'person.php') EPISODE = urljoin(GM_BASE, 'ajax.php?type=episode&epid={0}&view={1}') def root(url): root_list = []
def items_list(self, url, post=None): indexer = urlparse(url).query ################################################################################################ # # if 'movies.php' in url: # length = 9 # elif all(['shortfilm.php' in url, 'theater.php' in url]): # length = 6 # # else: # length = 2 # # # ################################################################################################ for year in list(range(1, length)): if indexer.startswith('l='): p = 'y=' + str(year) + '&g=&p=' elif indexer.startswith('g='): p = 'y=' + str(year) + '&l=&p=' elif indexer.startswith('p='): p = 'y=' + str(year) + '&l=&g=' elif indexer.startswith('c='): p = 'y=' + str(year) + '&l=&g=' else: p = '' self.years.append(p) if indexer.startswith( ('l=', 'g=', 's=', 'p=', 'c=') ) and 'movies.php' in url or 'shortfilm.php' in url or 'theater.php' in url: for content in self.years: links = GM_BASE + url.rpartition('/')[2].partition( '&')[0] + '&' + content try: htmls = client.request(links).decode('utf-8') except AttributeError: htmls = client.request(links) self.data.append(htmls) result = u''.join(self.data) content = client.parseDOM( result, 'div', attrs={'class': 'col-xs-6 col-sm-4 col-md-3'}) else: html = client.request(url, post=post) content = client.parseDOM( html, 'div', attrs={'class': 'col-xs-6 col-sm-4 col-md-3'}) contents = ''.join(content) items = re.findall('(<a.*?href.*?div.*?</a>)', contents, re.U) for item in items: title = client.parseDOM(item, 'h4')[0] image = client.parseDOM(item, 'img', ret='src')[0] name = title.rpartition(' (')[0] image = urljoin(GM_BASE, image) link = client.parseDOM(item, 'a', ret='href')[0] link = urljoin(GM_BASE, link) year = re.findall(r'.*?\((\d{4})', title, re.U)[0] self.list.append({ 'title': title, 'url': link, 'image': image, 'year': int(year), 'name': name }) return self.list
def gm_source_maker(url): if 'episode' in url: html = client.request(url=url.partition('?')[0], post=url.partition('?')[2]) links = client.parseDOM(html, 'a', ret='href') links = [urljoin(base_link, link) for link in links] hl = client.parseDOM(html, 'a') hosts = [ host.replace(u'προβολή στο ', control.lang(30015)) for host in hl ] return 'episode', hosts, links elif 'view' in url: html = client.request(url) link = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"})[0] return 'view', link elif 'music' in url: html = client.request(url) link = client.parseDOM(html, 'iframe', ret='src', attrs={"class": "embed-responsive-item"})[0] return 'music', link else: html = client.request(url) try: info = client.parseDOM(html, 'h4', attrs={'style': 'text-indent:10px;'}) if ',' in info[1]: genre = info[1].lstrip(u'Είδος:').split(',') genre = random.choice(genre) genre = genre.strip() else: genre = info[1].lstrip(u'Είδος:').strip() except: genre = control.lang(30147) links = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"}) hl = client.parseDOM(html, 'a', attrs={"class": "btn btn-primary"}) if not links or not hl: buttons = client.parseDOM(html, 'div', attrs={"class": "btn-group"}) hl = [ client.stripTags( client.parseDOM(h, 'button', attrs={"type": "button" })[0]).strip('"') + p for h in buttons for p in client.parseDOM(h, 'a', attrs={'target': '_blank'}) ] links = [ l for b in buttons for l in client.parseDOM(b, 'a', ret='href') ] links = [urljoin(base_link, link) for link in links] hosts = [ host.replace(u'προβολή στο ', control.lang(30015)).replace( u'προβολή σε ', control.lang(30015)).replace(u'μέρος ', ', ' + control.lang(30225)) for host in hl ] if 'text-align: justify' in html: plot = client.parseDOM(html, 'p', attrs={'style': 'text-align: justify'})[0] elif 'text-justify' in html: plot = client.parseDOM(html, 'p', attrs={'class': 'text-justify'})[0] else: plot = control.lang(30085) code = None imdb_code = re.search('imdb.+?/title/([\w]+?)/', html) if imdb_code: code = imdb_code.group(1) return 'movies', hosts, links, plot, genre, code
def _parsed_url(url): parsed_url = urlparse(url) prefix = parsed_url.scheme + '://' + parsed_url.netloc base_path = posixpath.normpath(parsed_url.path + '/..') return urljoin(prefix, base_path)
def episodes_list_cy(self, url, title, image): if title: try: title = title.decode('utf-8') title = title.partition('|')[0] except Exception: title = title.partition('|')[0] if url.startswith(self.views_ajax): html = client.request(url.partition('#')[0], post=url.partition('#')[2]) _json = json.loads(html) html = _json[4]['data'] view_path = dict(parse_qsl(url.partition('#')[2]))['view_path'] view_args = dict(parse_qsl(url.partition('#')[2]))['view_args'] page = str(int(dict(parse_qsl(url.partition('#')[2]))['page']) + 1) else: html = client.request(url) view_path = urlparse(url).path view_args = '/'.join(view_path.split('/')[2:4]) page = '1' next_link = '#'.join( [self.views_ajax, self.ajax_post_episodes.format(view_args=view_args, view_path=view_path, page=page)] ) try: items = [i for i in client.parseDOM(html, 'div', {'class': 'box'}) if 'play-big' in i] if not items: raise Exception for item in items: itemtitle = client.parseDOM(item, 'a')[-1] if title: label = ' - '.join([title, itemtitle]) else: label = itemtitle url = client.parseDOM(item, 'a', ret='href')[0] url = urljoin(self.basecy_link, url) image = client.parseDOM(item, 'img', ret='src')[0] data = {'title': label, 'image': image, 'url': url, 'next': next_link} if title: data.update({'name': title}) self.list.append(data) except Exception: self.list = [ { 'title': u' - '.join([title, control.lang(30014)]), 'action': 'back', 'image': image, 'isFolder': 'False', 'isPlayable': 'False' } , { 'title': control.lang(30013), 'action': 'back', 'image': control.icon(), 'isFolder': 'False', 'isPlayable': 'False' } ] return self.list
def source_maker(url): if 'episode' in url: html = client.request(url=url.partition('?')[0], post=url.partition('?')[2]) else: html = client.request(url) try: html = html.decode('utf-8') except Exception: pass if 'episode' in url: episodes = re.findall(r'''(?:<a.+?/a>|<p.+?/p>)''', html) hl = [] links = [] for episode in episodes: if '<p style="margin-top:0px; margin-bottom:4px;">' in episode: host = client.parseDOM(episode, 'p')[0].split('<')[0] pts = client.parseDOM(episode, 'a') lks = client.parseDOM(episode, 'a', ret='href') for p in pts: hl.append(u''.join([host, control.lang(30225), p])) for l in lks: links.append(l) else: pts = client.parseDOM(episode, 'a') lks = client.parseDOM(episode, 'a', ret='href') for p in pts: hl.append(p) for l in lks: links.append(l) links = [urljoin(GM_BASE, link) for link in links] hosts = [ host.replace(u'προβολή στο ', control.lang(30015)) for host in hl ] data = {'links': links, 'hosts': hosts} if '<p class="text-muted text-justify">' in html: plot = client.parseDOM(html, 'p')[0] data.update({'plot': plot}) return data elif 'view' in url: link = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"})[0] return { 'links': [link], 'hosts': [''.join([control.lang(30015), 'Youtube'])] } elif 'music' in url: keys_registration() title = re.search(r'''search\(['"](.+?)['"]\)''', html).group(1) link = get_search(q=title, search_type='video', addon_id=control.addonInfo('id'))[0]['id']['videoId'] link = YT_URL + link return { 'links': [link], 'hosts': [''.join([control.lang(30015), 'Youtube'])] } else: try: info = client.parseDOM(html, 'h4', attrs={'style': 'text-indent:10px;'}) if ',' in info[1]: genre = info[1].lstrip(u'Είδος:').split(',') genre = random.choice(genre) genre = genre.strip() else: genre = info[1].lstrip(u'Είδος:').strip() except: genre = control.lang(30147) div_tags = parsers.itertags(html, 'div') buttons = [ i.text for i in list(div_tags) if 'margin: 0px 0px 10px 10px;' in i.attributes.get('style', '') ] links = [] hl = [] for button in buttons: if 'btn btn-primary dropdown-toggle' in button: h = client.stripTags(client.parseDOM(button, 'button')[0]).strip() parts = client.parseDOM(button, 'li') for part in parts: p = client.parseDOM(part, 'a')[0] link = client.parseDOM(part, 'a', ret='href')[0] hl.append(', '.join([h, p])) links.append(link) else: h = client.parseDOM(button, 'a')[0] link = client.parseDOM(button, 'a', ret='href')[0] hl.append(h) links.append(link) links = [urljoin(GM_BASE, link) for link in links] hosts = [ host.replace(u'προβολή στο ', control.lang(30015)).replace( u'προβολή σε ', control.lang(30015)).replace(u'μέρος ', control.lang(30225)) for host in hl ] data = {'links': links, 'hosts': hosts, 'genre': genre} if 'text-align: justify' in html: plot = client.parseDOM(html, 'p', attrs={'style': 'text-align: justify'})[0] elif 'text-justify' in html: plot = client.parseDOM(html, 'p', attrs={'class': 'text-justify'})[0] else: plot = control.lang(30085) data.update({'plot': plot}) imdb_code = re.search(r'imdb.+?/title/([\w]+?)/', html) if imdb_code: code = imdb_code.group(1) data.update({'code': code}) return data
def gm_source_maker(url): if 'episode' in url: html = client.request(url=url.partition('?')[0], post=url.partition('?')[2]) else: html = client.request(url) html = py2_uni(html) if 'episode' in url: episodes = re.findall(r'''(?:<a.+?/a>|<p.+?/p>)''', html) hl = [] links = [] for episode in episodes: if '<p style="margin-top:0px; margin-bottom:4px;">' in episode: host = client.parseDOM(episode, 'p')[0].split('<')[0] pts = client.parseDOM(episode, 'a') lks = client.parseDOM(episode, 'a', ret='href') for p in pts: hl.append(u''.join([host, control.lang(30225), p])) for link_ in lks: links.append(link_) else: pts = client.parseDOM(episode, 'a') lks = client.parseDOM(episode, 'a', ret='href') for p in pts: hl.append(p) for link_ in lks: links.append(link_) links = [urljoin(GM_BASE, link) for link in links] hosts = [host.replace(u'προβολή στο ', control.lang(30015)) for host in hl] links_list = list(zip(hosts, links)) data = {'links': links_list} if '<p class="text-muted text-justify">' in html: plot = client.parseDOM(html, 'p')[0] data.update({'plot': plot}) return data elif 'view' in url: link = client.parseDOM(html, 'a', ret='href', attrs={"class": "btn btn-primary"})[0] host = urlparse(link).netloc.replace('www.', '').capitalize() return {'links': [(''.join([control.lang(30015), host]), link)]} elif 'music' in url: title = re.search(r'''search\(['"](.+?)['"]\)''', html).group(1) link = list_search(query=title, limit=1)[0]['url'] return {'links': [(''.join([control.lang(30015), 'Youtube']), link)]} else: try: info = client.parseDOM(html, 'h4', attrs={'style': 'text-indent:10px;'}) if ',' in info[1]: genre = info[1].lstrip(u'Είδος:').split(',') genre = random.choice(genre) genre = genre.strip() else: genre = info[1].lstrip(u'Είδος:').strip() except: genre = control.lang(30147) div_tags = parsers.itertags(html, 'div') buttons = [i.text for i in list(div_tags) if 'margin: 0px 0px 10px 10px;' in i.attributes.get('style', '')] links = [] hl = [] for button in buttons: if 'btn btn-primary dropdown-toggle' in button: host = client.stripTags(client.parseDOM(button, 'button')[0]).strip() parts = client.parseDOM(button, 'li') for part in parts: part_ = client.parseDOM(part, 'a')[0] link = client.parseDOM(part, 'a', ret='href')[0] hl.append(', '.join([host, part_])) links.append(link) else: host = client.parseDOM(button, 'a')[0] link = client.parseDOM(button, 'a', ret='href')[0] hl.append(host) links.append(link) links = [urljoin(GM_BASE, link) for link in links] hosts = [host.replace( u'προβολή στο ', control.lang(30015) ).replace( u'προβολή σε ', control.lang(30015) ).replace( u'μέρος ', control.lang(30225) ) for host in hl] links_list = list(zip(hosts, links)) data = {'links': links_list, 'genre': genre} if 'text-align: justify' in html: plot = client.parseDOM(html, 'p', attrs={'style': 'text-align: justify'})[0] elif 'text-justify' in html: plot = client.parseDOM(html, 'p', attrs={'class': 'text-justify'})[0] else: plot = control.lang(30085) data.update({'plot': plot}) imdb_code = re.search(r'imdb.+?/title/([\w]+?)/', html) if imdb_code: code = imdb_code.group(1) data.update({'code': code}) return data
def gm_directory(url, params): sources = gm_source_maker(url) lists = sources['links'] items = [] try: description = sources['plot'] except KeyError: try: description = params.get('plot').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): description = params.get('plot') if not description: description = control.lang(30085) try: genre = sources['genre'] except KeyError: genre = control.lang(30147) for h, l in lists: html = client.request(l) button = client.parseDOM(html, 'a', attrs={'role': 'button'}, ret='href')[0] image = client.parseDOM(html, 'img', attrs={'class': 'thumbnail img-responsive'}, ret='src')[0] image = urljoin(GM_BASE, image) title = client.parseDOM(html, 'h3')[0] year = [ y[-4:] for y in client.parseDOM(html, 'h4') if str(y[-4:]).isdigit() ][0] try: episode = client.stripTags(client.parseDOM(html, 'h4')[-1]) if episode[-4:].isdigit(): raise IndexError episode = episode.partition(': ')[2].strip() label = title + ' - ' + episode + SEPARATOR + h title = title + ' - ' + episode except IndexError: label = title + SEPARATOR + h # plot = title + '[CR]' + control.lang(30090) + ': ' + year + '[CR]' + description if is_py2: title = title + ' ({})'.format(year) data = { 'label': label, 'title': title, 'url': button, 'image': image, 'plot': description, 'year': int(year), 'genre': genre, 'name': title } if control.setting('check_streams') == 'true': data.update({'query': json.dumps(sources['links'])}) items.append(data) return items
def gm_filler(url, params): if CACHE_DEBUG: sources = source_maker(url) else: sources = cache.get(source_maker, 6, url) lists = list(zip(sources['hosts'], sources['links'])) items = [] try: description = sources['plot'] except KeyError: try: description = params.get('plot').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): description = params.get('plot') if not description: description = control.lang(30085) try: genre = sources['genre'] except KeyError: genre = control.lang(30147) for h, l in lists: html = client.request(l) button = client.parseDOM(html, 'a', attrs={'role': 'button'}, ret='href')[0] image = client.parseDOM(html, 'img', attrs={'class': 'thumbnail img-responsive'}, ret='src')[0] image = urljoin(GM_BASE, image) title = client.parseDOM(html, 'h3')[0] year = [ y[-4:] for y in client.parseDOM(html, 'h4') if str(y[-4:]).isdigit() ][0] try: episode = client.stripTags(client.parseDOM(html, 'h4')[-1]) if episode[-4:].isdigit(): raise IndexError episode = episode.partition(': ')[2] label = title + ' - ' + episode + SEPARATOR + h title = title + ' - ' + episode except IndexError: label = title + SEPARATOR + h # plot = title + '[CR]' + control.lang(30090) + ': ' + year + '[CR]' + description data = { 'label': label, 'title': title + ' ({})'.format(year), 'url': button, 'image': image, 'plot': description, 'year': int(year), 'genre': genre, 'name': title } items.append(data) return items
def get(self, query): try: try: match = re.findall( r'(.+?) (?!\d{4})S?(\d{1,2}) ?X?E?(\d{1,2})$', query, flags=re.IGNORECASE)[0] except Exception: match = None if not match: match = re.findall(r'(.+?) *?\(?(\d{4})?\)?$', query)[0] if len(match[1]) == 4: title, year = match[0], match[1] else: title = match[0] query = ' '.join( unquote_plus(re.sub('%\w\w', ' ', quote_plus(title))).split()) url = 'https://subz.xyz/search?q={0}'.format(quote_plus(query)) result = client.request(url) result = re.sub(r'[^\x00-\x7F]+', ' ', result) url = client.parseDOM(result, 'section', attrs={'class': 'movies'})[0] url = re.findall('(/movies/\d+)', url) url = [x for y, x in enumerate(url) if x not in url[:y]] url = [urljoin('https://subz.xyz', i) for i in url] url = url[:20][::-1] for i in url: c = cache.get(self.cache, 2200, i) if c is not None: if len(match[1]) == 4: year_check = c[1] == year else: year_check = True if cleantitle.get( c[0]) == cleantitle.get(title) and year_check: try: item = self.r except Exception: item = client.request(i) break else: self.data.append(self.r) else: title, season, episode = match season, episode = '{0}'.format(season), '{0}'.format(episode) query = ' '.join( unquote_plus(re.sub('%\w\w', ' ', quote_plus(title))).split()) url = 'https://subz.xyz/search?q={0}'.format(quote_plus(query)) result = client.request(url) result = re.sub(r'[^\x00-\x7F]+', ' ', result) url = client.parseDOM(result, 'section', attrs={'class': 'tvshows'})[0] url = re.findall('(/series/\d+)', url) url = [x for y, x in enumerate(url) if x not in url[:y]] url = [urljoin('https://subz.xyz', i) for i in url] url = url[:20][::-1] for i in url: c = cache.get(self.cache, 2200, i) if c is not None: if cleantitle.get(c[0]) == cleantitle.get(title): item = i break item = '{0}/seasons/{1}/episodes/{2}'.format( item, season, episode) item = client.request(item) if self.data: item = '\n\n'.join(self.data) item = re.sub(r'[^\x00-\x7F]+', ' ', item) items = client.parseDOM(item, 'tr', attrs={'data-id': '.+?'}) except Exception as e: log.log('Subzxyz failed at get function, reason: ' + str(e)) return for item in items: try: r = client.parseDOM(item, 'td', attrs={'class': '.+?'})[-1] url = client.parseDOM(r, 'a', ret='href')[0] url = client.replaceHTMLCodes(url) url = url.replace("'", "").encode('utf-8') name = url.split('/')[-1].strip() name = re.sub('\s\s+', ' ', name) name = name.replace('_', '').replace('%20', '.') name = client.replaceHTMLCodes(name) name = name.encode('utf-8') self.list.append({ 'name': name, 'url': url, 'source': 'subzxyz', 'rating': 5 }) except Exception as e: log.log( 'Subzxyz failed at self.list formation function, reason: ' + str(e)) return return self.list
def _urijoin(base_uri, path): if is_url(base_uri): return urljoin(base_uri, path) else: return os.path.normpath(os.path.join(base_uri, path.strip('/')))
def wrapper(self, str_input, mode): query = self.google.format(str_input.encode('utf-8'), gm.base_link) html = client.request(query.replace(' ', '+'), headers=self.UA) items = client.parseDOM(html, 'h3', attrs={'class': 'r'}) for item in items: if mode == 0: title = client.parseDOM(item, 'a')[0] else: title = client.parseDOM(item, 'a')[0].rstrip(u' ‒ Greek-Movies') title = client.replaceHTMLCodes(title) title = re.sub('</?b>', '', title) if '- Greek' in title: idx = title.rfind('- Greek') title = title[:idx].strip() elif u'‒ Greek' in title: idx = title.rfind(u'‒ Greek') title = title[:idx].strip() url = client.parseDOM(item, 'a', ret='href')[0] url = unquote_plus(url.partition('=')[2].partition('&')[0]) if mode == 0: if all( ['movies.php?m=' not in url, 'theater.php?m=' not in url]): continue else: if all(['shows.php?s=' not in url, 'series.php?s=' not in url]): continue item_html = client.request(url) try: thumb = client.parseDOM(item_html, 'img', attrs={'class': 'thumbnail.*?'}, ret='src')[0] except IndexError: thumb = client.parseDOM(item_html, 'IMG', ret='SRC')[0] image = urljoin(gm.base_link, thumb) year = client.parseDOM(item_html, 'h4', attrs={'style': 'text-indent:10px;'})[0] year = int(year.strip(u'Έτος:').strip()[:4]) if 'text-align: justify' in html: plot = client.parseDOM(html, 'p', attrs={'style': 'text-align: justify'})[0] elif 'text-justify' in html: plot = client.parseDOM(html, 'p', attrs={'class': 'text-justify'})[0] else: plot = control.lang(30085) if mode == 0: self.list.append({ 'title': title.encode('utf-8'), 'url': url, 'image': image.encode('utf-8'), 'year': year, 'plot': plot }) if control.setting('action_type') == '0' or control.setting( 'action_type') == '2': for item in self.list: item.update({'action': 'play', 'isFolder': 'False'}) else: for item in self.list: item.update({'action': 'directory'}) else: self.list.append({ 'title': title, 'url': url, 'image': image.encode('utf-8'), 'year': year, 'plot': plot, 'action': 'episodes' }) for item in self.list: bookmark = dict( (k, v) for k, v in iteritems(item) if not k == 'next') bookmark['bookmark'] = item['url'] bookmark_cm = { 'title': 30080, 'query': { 'action': 'addBookmark', 'url': json.dumps(bookmark) } } item.update({'cm': [bookmark_cm]}) if self.list is None: return self.list = sorted(self.list, key=lambda k: k['title']) return self.list
def items_directory(url, params): sources = cache.get(gm_source_maker, 6, url) lists = zip(sources[1], sources[2]) items = [] try: description = sources[3] except IndexError: try: description = params.get('plot').encode('latin-1') except (UnicodeEncodeError, UnicodeDecodeError, AttributeError): description = params.get('plot') if not description: description = control.lang(30085) try: genre = sources[4] except IndexError: genre = control.lang(30147) separator = ' - ' if control.setting('wrap_labels') == '1' else '[CR]' for h, l in lists: html = client.request(l) button = client.parseDOM(html, 'a', attrs={'role': 'button'}, ret='href')[0] image = client.parseDOM(html, 'img', attrs={'class': 'thumbnail img-responsive'}, ret='src')[0] image = urljoin(base_link, image) title = client.parseDOM(html, 'h3')[0] year = [ y[-4:] for y in client.parseDOM(html, 'h4') if str(y[-4:]).isdigit() ][0] try: episode = client.stripTags(client.parseDOM(html, 'h4')[-1]) if episode[-4:].isdigit(): raise IndexError episode = episode.partition(': ')[2] label = title + ' - ' + episode + separator + h title = title + ' - ' + episode except IndexError: label = title + separator + h # plot = title + '[CR]' + control.lang(30090) + ': ' + year + '[CR]' + description data = dict(label=label, title=title + ' ({})'.format(year), url=button, image=image, plot=description, year=int(year), genre=genre, name=title) items.append(data) return items