def cat(self, url): try: r = client.request(url) query = BeautifulSoup(r) r = query.findAll('div', attrs = {'class': 'post-thumbnail'}) for items in r: href = items.findAll('a')[0]['href'].encode('utf-8') img = items.findAll('img')[0]['src'].encode('utf-8') title = items.findAll('a')[0]['title'].encode('utf-8') title = cleantitle.get2(title) href = urlparse.urljoin(self.base_link, href) img = urlparse.urljoin(self.base_link, img) meta = {"poster": img , "title" : title} meta = urllib.quote_plus(json.dumps(meta)) control.addDirMeta(title,href,'docheaven_resolve', img, control.fanart, meta) except: pass try: n = query.findAll('div', attrs = {'class': 'numeric-nav'}) for x in n: pages = x.findAll('a') for p in pages: page = p['href'].encode('utf-8') page = urlparse.urljoin(self.base_link, page) page_title = p.string if not page == url: control.addDir("[COLOR yellow]PAGE:[/COLOR] " + page_title,page,'docheaven_cat',control.fanart,control.fanart) except: pass
def get_gam_genres(url): # 3 try: r = requests.get(url).text r = client.parseDOM(r, 'li', attrs={'id': r'menu-item-\d+'})[1:] xbmc.log('POSTs: {}'.format(r)) # r = client.parseDOM(r, 'div', attrs={'class': 'categorias'})[0] # r = client.parseDOM(r, 'li', attrs={'class': 'cat-item.+?'}) for post in r: try: xbmc.log('POST: {}'.format(post)) url = client.parseDOM(post, 'a', ret='href')[0] name = client.parseDOM(post, 'a')[0] name = clear_Title(name).encode('utf-8') if 'facebook' in url or 'imdb' in url: continue xbmc.log('NAME: {} | URL: {}'.format(name, url)) addDir('[B][COLOR white]%s[/COLOR][/B]' % name, url, 4, ART + 'movies.jpg', FANART, '') except BaseException: pass except BaseException: pass views.selectView('menu', 'menu-view')
def Get_random(url): #8 r = client.request(url) r = client.parseDOM(r, 'div', attrs={'id': 'slider1'})[0] r = client.parseDOM(r, 'div', attrs={'class': 'item'}) for post in r: try: url = client.parseDOM(post, 'a', ret='href')[0] icon = client.parseDOM(post, 'img', ret='src')[0] name = client.parseDOM(post, 'span', attrs={'class': 'ttps'})[0].encode('utf-8') name = re.sub('\d{4}', '', name) except BaseException: pass try: year = client.parseDOM(post, 'span', attrs={'class': 'ytps'})[0].encode('utf-8') except BaseException: year = 'N/A' name = clear_Title(name) if '/ ' in name: name = name.split('/ ') name = name[1] + ' ([COLORlime]' + year + '[/COLOR])' elif '\ ' in name: name = name.split('\ ') name = name[1] + ' ([COLORlime]' + year + '[/COLOR])' else: name = name + ' ([COLORlime]' + year + '[/COLOR])' if 'tvshows' in url or 'syllogh' in url: addDir('[B][COLOR white]%s[/COLOR][/B]' % name, url, 11, icon, FANART, '') else: addDir('[B][COLOR white]%s[/COLOR][/B]' % name, url, 10, icon, FANART, '') views.selectView('movies', 'movie-view')
def search(url): #35 control.busy() data = client.request(url) posts = client.parseDOM(data, 'div', attrs={'class': 'result-item'}) for post in posts: link = client.parseDOM(post, 'a', ret='href')[0] poster = client.parseDOM(post, 'img', ret='src')[0] title = client.parseDOM(post, 'img', ret='alt')[0] title = clear_Title(title).encode('utf-8') try: year = client.parseDOM(data, 'span', attrs={'class': 'year'})[0] desc = client.parseDOM(data, 'div', attrs={'class': 'contenido'})[0] desc = re.sub('<.+?>', '', desc) desc = desc.encode('utf-8', 'ignore') except BaseException: year = 'N/A' desc = 'N/A' addDir('[B][COLOR white]{0} [{1}][/COLOR][/B]'.format(title, year), link, 33, poster, FANART, str(desc)) try: np = client.parseDOM(data, 'a', ret='href', attrs={'class': 'arrow_pag'})[-1] page = np.split('/')[-1] title = '[B][COLORgold]>>>' + Lang(32011).encode('utf-8') + ' [COLORwhite]([COLORlime]%s[/COLOR])[/COLOR][/B]' % page addDir(title, np, 34, ART + 'next.jpg', FANART, '') except BaseException: pass control.idle() views.selectView('movies', 'movie-view')
def get(self): r = client.request(self.base_link) r = re.compile('<h2 class="hidden-xs">Categories</h2(.+?)/ul>', re.DOTALL).findall(r) for items in r: match = re.compile('<a href="(.+?)">(.+?)<span').findall(items) for href, title in match: url = urlparse.urljoin(self.base_link, href) control.addDir(title,url,'freedoc_cat',control.fanart,control.fanart)
def gamato_links(url, name, poster): #12 try: data = client.request(url) desc = client.parseDOM(data, 'div', attrs={'itemprop': 'description'})[0] desc = re.sub('<.+?>', '', desc) desc = desc.encode('utf-8', 'ignore') try: match = re.findall( '''file\s*:\s*['"](.+?)['"],poster\s*:\s*['"](.+?)['"]\}''', data, re.DOTALL)[0] link, _poster = match[0], match[1] except IndexError: frame = client.parseDOM(data, 'div', attrs={'id': 'option-\d+'})[0] frame = client.parseDOM(frame, 'iframe', ret='src')[0] if 'cloud' in frame: #sources: ["http://cloudb.me/4fogdt6l4qprgjzd2j6hymoifdsky3tfskthk76ewqbtgq4aml3ior7bdjda/v.mp4"], match = client.request(frame) try: from resources.lib.modules import jsunpack if jsunpack.detect(match): match = jsunpack.unpack(match) match = re.findall('sources:\s*\[[\'"](.+?)[\'"]\]', match, re.DOTALL)[0] match += '|User-Agent=%s&Referer=%s' % (urllib.quote( client.agent()), frame) except IndexError: from resources.lib.modules import jsunpack as jsun if jsun.detect(match): match = jsun.unpack(match) match = re.findall('sources:\s*\[[\'"](.+?)[\'"]\]', match, re.DOTALL)[0] match += '|User-Agent=%s&Referer=%s' % (urllib.quote( client.agent()), frame) else: match = frame link, _poster = match, poster try: fanart = client.parseDOM(data, 'div', attrs={'class': 'g-item'})[0] fanart = client.parseDOM(fanart, 'a', ret='href')[0] except IndexError: fanart = FANART try: trailer = client.parseDOM(data, 'iframe', ret='src') trailer = [i for i in trailer if 'youtube' in i][0] addDir('[B][COLOR lime]Trailer[/COLOR][/B]', trailer, 100, iconimage, fanart, str(desc)) except BaseException: pass addDir(name, link, 100, poster, fanart, str(desc)) except BaseException: return views.selectView('movies', 'movie-view')
def get(self): control.addDir('Top 100',self.base_link + '/popular/','docheaven_popular',control.fanart,control.fanart) r = client.request(self.base_link) r = BeautifulSoup(r) r = r.findAll('ul', attrs = {'class': 'cat-list'}) for items in r: u = items.findAll('a') for s in u: url = s['href'].encode('utf-8') title = s.string url = urlparse.urljoin(self.base_link, url) control.addDir(title,url,'docheaven_cat',control.fanart,control.fanart)
def years(): r = cache.get(client.request, 120, METAGLOTISMENO) r = client.parseDOM(r, 'nav', attrs={'class': 'releases'})[0] r = client.parseDOM(r, 'li') for post in r: try: url = client.parseDOM(post, 'a', ret='href')[0] year = client.parseDOM(post, 'a')[0].encode('utf-8') except BaseException: pass addDir('[B][COLOR white]%s[/COLOR][/B]' % year, url, 34, ART + 'movies.jpg', FANART, '') views.selectView('menu', 'menu-view')
def year_TV(url): r = cache.get(client.request, 120, url) r = client.parseDOM(r, 'div', attrs={'id': 'serieshome'})[0] r = client.parseDOM(r, 'div', attrs={'class': 'filtro_y'})[0] r = client.parseDOM(r, 'li') for post in r: try: url = client.parseDOM(post, 'a', ret='href')[0] year = client.parseDOM(post, 'a')[0].encode('utf-8') except BaseException: pass addDir('[B][COLOR white]%s[/COLOR][/B]' % year, url, 5, ART + 'tvshows.jpg', FANART, '') views.selectView('menu', 'menu-view')
def cat(self, url): try: r = client.request(url) query = BeautifulSoup(r) r = query.findAll('div', attrs={'class': 'item'}) for items in r: href = items.findAll('a')[0]['href'].encode('utf-8') img = items.findAll('img')[0]['src'].encode('utf-8') title = items.findAll('img')[0]['alt'].encode('utf-8') title = cleantitle.get2(title) href = urlparse.urljoin(self.base_link, href) img = urlparse.urljoin(self.base_link, img) meta = {"poster": img, "title": title} meta = urllib.quote_plus(json.dumps(meta)) control.addDirMeta(title, href, 'docstorm_resolve', img, control.fanart, meta) except: pass try: n = query.findAll('link', attrs={'rel': 'next'}) for p in n: page = p['href'].encode('utf-8') page_title = page.split('/')[-1] if page_title == '': page_title = '1' page = urlparse.urljoin(self.base_link, page) if not page == url: control.addDir("[COLOR yellow]PAGE:[/COLOR] " + page_title, page, 'docstorm_cat', control.fanart, control.fanart) except: pass try: n = query.findAll('link', attrs={'rel': 'prev'}) for p in n: page = p['href'].encode('utf-8') page_title = page.split('/')[-1] if page_title == '': page_title = '1' page = urlparse.urljoin(self.base_link, page) if not page == url: control.addDir("[COLOR yellow]PAGE:[/COLOR] " + page_title, page, 'docstorm_cat', control.fanart, control.fanart) except: pass
def year(url): r = cache.get(client.request, 120, url) r = client.parseDOM(r, 'div', attrs={'id': 'moviehome'})[0] r = client.parseDOM(r, 'div', attrs={'class': 'filtro_y'})[0] r = client.parseDOM(r, 'li') for post in r: try: url = client.parseDOM(post, 'a', ret='href')[0] year = client.parseDOM(post, 'a')[0].encode('utf-8') except IndexError: year = '[N/A]' addDir('[B][COLOR white]%s[/COLOR][/B]' % year, url, 5, ART + 'movies.jpg', FANART, '') views.selectView('menu', 'menu-view')
def gamatokids_top(url): # 21 data = requests.get(url).text posts = client.parseDOM(data, 'div', attrs={'class': 'top-imdb-item'}) for post in posts: try: title = client.parseDOM(post, 'a')[-1] title = clear_Title(title) link = client.parseDOM(post, 'a', ret='href')[0] poster = client.parseDOM(post, 'img', ret='src')[0] addDir('[B][COLOR white]{0}[/COLOR][/B]'.format(title), link, 12, poster, FANART, 'Top 100 IMDB') except IndexError: pass views.selectView('movies', 'movie-view')
def get(self): r = urlparse.urljoin(self.base_link, "/categories/") r = client.request(r) r = BeautifulSoup(r) r = r.findAll('div', attrs={'class': re.compile('snag-slider-item\s*')}) for items in r: try: url = items['data-permalink'].encode('utf-8') title = items['data-title'].encode('utf-8') img = items.findAll('img')[0]['src'].encode('utf-8') url = urlparse.urljoin(self.base_link, url) control.addDir(title, url, 'snagfilms_cat', img, control.fanart) except: pass
def gamatokids_top(url): # 21 data = requests.get(url).text posts = client.parseDOM(data, 'article', attrs={'class': 'w_item_a'}) for post in posts: try: title = client.parseDOM(post, 'h3')[0] title = clear_Title(title) link = client.parseDOM(post, 'a', ret='href')[0] poster = client.parseDOM(post, 'img', ret='src')[0] year = client.parseDOM(post, 'span', attrs={'class': 'wdate'})[0] addDir('[B][COLOR white]{0} [{1}][/COLOR][/B]'.format(title, year), link, 12, poster, FANART, 'Προτεινόμενα') except IndexError: pass views.selectView('movies', 'movie-view')
def gamatokids_top(url): data = cache.get(client.request, 4, url) posts = client.parseDOM(data, 'div', attrs={'class': 'top-imdb-item'}) for post in posts: try: title = client.parseDOM(post, 'a')[-1] title = clear_Title(title).encode('utf-8') link = client.parseDOM(post, 'a', ret='href')[0] poster = client.parseDOM(post, 'img', ret='src')[0] addDir('[B][COLOR white]{0}[/COLOR][/B]'.format(title), link, 12, poster, FANART, 'Top 100 IMDB') except BaseException: pass views.selectView('movies', 'movie-view')
def Get_TV_Genres(url): # 7 r = cache.get(client.request, 120, url) r = client.parseDOM(r, 'div', attrs={'id': 'serieshome'})[0] r = client.parseDOM(r, 'div', attrs={'class': 'categorias'})[0] r = client.parseDOM(r, 'li', attrs={'class': 'cat-item.+?'}) for post in r: try: url = client.parseDOM(post, 'a', ret='href')[0] name = client.parseDOM(post, 'a')[0] name = re.sub(r'\d{4}', '', name) items = client.parseDOM(post, 'span')[0].encode('utf-8') except BaseException: pass name = clear_Title(name) + ' ([COLORlime]' + items + '[/COLOR])' addDir('[B][COLOR white]%s[/COLOR][/B]' % name, url, 5, ART + 'tvshows.jpg', FANART, '') views.selectView('menu', 'menu-view')
def Get_epoxiakes(url): #19 try: r = client.request(url) r = client.parseDOM(r, 'div', attrs={'id': 'slider2'})[0] if r is None: control.infoDialog( 'Δεν υπάρχουν διαθέσιμοι τίτλοι αυτήν την περίοδο', NAME, ICON, 7000) else: r = client.parseDOM(r, 'div', attrs={'class': 'item'}) except BaseException: r = [] for post in r: try: url = client.parseDOM(post, 'a', ret='href')[0] icon = client.parseDOM(post, 'img', ret='src')[0] name = client.parseDOM(post, 'span', attrs={'class': 'ttps'})[0].encode('utf-8') name = re.sub('\d{4}', '', name) except BaseException: pass try: year = client.parseDOM(post, 'span', attrs={'class': 'ytps'})[0].encode('utf-8') except BaseException: year = 'N/A' name = clear_Title(name) if '/ ' in name: name = name.split('/ ') name = name[1] + ' ([COLORlime]' + year + '[/COLOR])' elif '\ ' in name: name = name.split('\ ') name = name[1] + ' ([COLORlime]' + year + '[/COLOR])' else: name = name + ' ([COLORlime]' + year + '[/COLOR])' if 'tvshows' in url or 'syllogh' in url: addDir('[B][COLOR white]%s[/COLOR][/B]' % name, url, 11, icon, FANART, '') else: addDir('[B][COLOR white]%s[/COLOR][/B]' % name, url, 10, icon, FANART, '') views.selectView('movies', 'movie-view')
def Series(): addDir('[B][COLOR orangered]' + Lang(32006) + '[/COLOR][/B]', BASEURL, 7, ART + 'genre.jpg', FANART, '') addDir('[B][COLOR orangered]' + Lang(32007) + '[/COLOR][/B]', BASEURL, 16, ART + 'etos.jpg', FANART, '') addDir('[B][COLOR orangered]' + Lang(32010) + '[/COLOR][/B]', BASEURL + 'tvshows-genre/κινούμενα-σχέδια/', 5, ART + 'tvshows.jpg', FANART, '') addDir('[B][COLOR orangered]' + Lang(32009) + '[/COLOR][/B]', BASEURL + 'tvshows/', 5, ART + 'tvshows.jpg', FANART, '') views.selectView('menu', 'menu-view')
def menu(): addDir('[B][COLOR yellow]' + Lang(32004).encode('utf-8') + '[/COLOR][/B]', Baseurl + 'genre/kids/', 34, ART + 'dub.jpg', FANART, '') addDir('[B][COLOR yellow]' + Lang(32010).encode('utf-8') + '[/COLOR][/B]', Baseurl + 'genre/κινούμενα-σχέδια/', 34, ART + 'dub.jpg', FANART, '') addDir('[B][COLOR gold]' + Lang(32022).encode('utf-8') + '[/COLOR][/B]', Baseurl + 'genre/christmas/', 34, ART + 'mas.jpg', FANART, '') addDir('[B][COLOR gold]' + Lang(32002).encode('utf-8') + '[/COLOR][/B]', Baseurl, 35, ICON, FANART, '') views.selectView('menu', 'menu-view')
def gamato_kids(url): # 4 data = requests.get(url).text posts = client.parseDOM(data, 'article', attrs={'class': 'item movies'}) for post in posts: try: plot = re.findall('''texto["']>(.+?)</div> <div''', post, re.DOTALL)[0] except IndexError: plot = 'N/A' desc = client.replaceHTMLCodes(plot) desc = six.ensure_str(desc, encoding='utf-8') try: title = client.parseDOM(post, 'h4')[0] year = re.findall(r'<span>.*?\s*(\d{4})</span>', post, re.DOTALL)[0] if not (len(year) == 4 and year.isdigit()): year = 'N/A' except IndexError: title = client.parseDOM(post, 'img', ret='alt')[0] year = 'N/A' year = '[COLORlime]{}[/COLOR]'.format(year) title = clear_Title(title) link = client.parseDOM(post, 'a', ret='href')[0] link = clear_Title(link) poster = client.parseDOM(post, 'img', ret='src')[0] poster = clear_Title(poster) addDir('[B][COLOR white]{0} [{1}][/COLOR][/B]'.format(title, year), link, 12, poster, FANART, desc) try: np = client.parseDOM(data, 'a', ret='href', attrs={'class': 'arrow_pag'})[-1] np = clear_Title(np) page = np[-2] if np.endswith('/') else re.findall(r'page/(\d+)/', np)[0] title = '[B][COLORgold]>>>' + Lang( 32011) + ' [COLORwhite]([COLORlime]%s[/COLOR])[/COLOR][/B]' % page addDir(title, np, 4, ART + 'next.jpg', FANART, '') except IndexError: pass views.selectView('movies', 'movie-view')
def metaglotismeno(url): #34 data = client.request(url) posts = client.parseDOM(data, 'div', attrs={'class': 'items'})[0] posts = client.parseDOM(posts, 'article', attrs={'id': r'post-\d+'}) for post in posts: try: plot = client.parseDOM(post, 'div', attrs={'class': 'texto'})[0] except IndexError: plot = 'N/A' desc = client.replaceHTMLCodes(plot) desc = desc.encode('utf-8') try: title = client.parseDOM(post, 'h3')[0] except BaseException: title = client.parseDOM(post, 'img', ret='alt')[0] # try: # year = client.parseDOM(data, 'div', {'class': 'metadata'})[0] # year = client.parseDOM(year, 'span')[0] # year = '[COLOR lime]({0})[/COLOR]'.format(year) # except IndexError: # year = '(N/A)' title = clear_Title(title) title = '[B][COLOR white]{}[/COLOR][/B]'.format(title) link = client.parseDOM(post, 'a', ret='href')[0] link = client.replaceHTMLCodes(link).encode('utf-8', 'ignore') poster = client.parseDOM(post, 'img', ret='src')[0] poster = client.replaceHTMLCodes(poster).encode('utf-8', 'ignore') addDir(title, link, 33, poster, FANART, desc) try: np = client.parseDOM(data, 'div', attrs={'class': 'resppages'})[0] np = dom.parse_dom(np, 'a', req='href') np = [ i.attrs['href'] for i in np if 'icon-chevron-right' in i.content ][0] page = re.findall(r'page/(\d+)/', np)[0] title = '[B][COLORgold]>>>' + Lang(32011).encode('utf-8') +\ ' [COLORwhite]([COLORlime]{}[/COLOR])[/COLOR][/B]'.format(page) addDir(title, np.encode('utf-8'), 34, ART + 'next.jpg', FANART, '') except BaseException: pass views.selectView('movies', 'movie-view')
def gamatokids(): addDir( '[B][COLOR yellow]' + Lang(32004).encode('utf-8') + '[/COLOR][/B]', GAMATO + 'genre/%ce%bc%ce%b5%cf%84%ce%b1%ce%b3%ce%bb%cf%89%cf%84%ce%b9%cf%83%ce%bc%ce%ad%ce%bd%ce%b1/', 4, ART + 'dub.jpg', FANART, '') addDir('[B][COLOR yellow]' + Lang(32006).encode('utf-8') + '[/COLOR][/B]', GAMATO, 3, ART + 'genre.jpg', FANART, '') addDir('[B][COLOR yellow]TOP 250[/COLOR][/B]', GAMATO + 'top-imdb/', 21, ART + 'top.png', FANART, '') addDir('[B][COLOR gold]' + Lang(32002).encode('utf-8') + '[/COLOR][/B]', GAMATO, 18, ICON, FANART, '') views.selectView('menu', 'menu-view')
def get_links(name, url, iconimage, description): # try: headers = {'Referer': url} data = client.request(url) try: back = client.parseDOM(data, 'div', {'id': 'dt_galery'})[0]#dt_galery back = client.parseDOM(back, 'a', ret='href')[0] except IndexError: back = FANART try: frames = client.parseDOM(data, 'div', {'id': 'playeroptions'})[0] frames = dom.parse_dom(frames, 'li', attrs={'class': 'dooplay_player_option'}, req=['data-post', 'data-nume', 'data-type']) for frame in frames: post = 'action=doo_player_ajax&post=%s&nume=%s&type=%s' % \ (frame.attrs['data-post'], frame.attrs['data-nume'], frame.attrs['data-type']) p_link = 'https://metaglotismeno.online/wp-admin/admin-ajax.php' flink = client.request(p_link, post=post, headers=headers) flink = client.parseDOM(flink, 'iframe', ret='src')[0] if '=trailer' in post and 'youtu' in flink: addDir('[B][COLOR white]%s | [B][COLOR lime]Trailer[/COLOR][/B]' % name, flink, 100, iconimage, FANART, '') else: host = __top_domain(flink) title = '{0} [B][COLOR white]| {1}[/COLOR][/B]'.format(name, host.capitalize()) addDir(title, flink, 100, iconimage, back, str(description)) except BaseException: title = '[B][COLOR white]NO LINKS[/COLOR][/B]' addDir(title, '', 'bug', iconimage, back, str(description)) # except BaseException: # pass views.selectView('movies', 'movie-view')
def search_menu(): # 6 addDir(Lang(32024), 'new', 26, ICON, FANART, '') dbcon = database.connect(control.searchFile) dbcur = dbcon.cursor() try: dbcur.execute( """CREATE TABLE IF NOT EXISTS Search (url text, search text)""") except BaseException: pass dbcur.execute("SELECT * FROM Search ORDER BY search") lst = [] delete_option = False for (url, search) in dbcur.fetchall(): url = quote_plus(url) domain = 'GAMATOKIDS' if 'gamato' in url else 'TENIES-ONLINE' title = '[B]%s[/B] - [COLORgold][B]%s[/COLOR][/B]' % (six.ensure_text( search, encoding='utf-8'), domain) delete_option = True addDir(title, url, 26, ICON, FANART, '') lst += [(search)] dbcur.close() if delete_option: addDir(Lang(32039), '', 29, ICON, FANART, '') views.selectView('movies', 'movie-view')
def search_menu(): #6 addDir(Lang(32024).encode('utf-8'), 'new', 26, ICON, FANART, '') try: from sqlite3 import dbapi2 as database except BaseException: from pysqlite2 import dbapi2 as database dbcon = database.connect(control.searchFile) dbcur = dbcon.cursor() try: dbcur.execute( """CREATE TABLE IF NOT EXISTS Search (url text, search text)""") except BaseException: pass dbcur.execute("SELECT * FROM Search ORDER BY search") lst = [] delete_option = False for (url, search) in dbcur.fetchall(): domain = 'GAMATOKIDS' if 'gamato' in url else 'PAIDIKESTAINIES' title = '[B]%s[/B] - [COLORgold][B]%s[/COLOR][/B]' % ( search.encode('utf-8'), domain) delete_option = True addDir(title, url, 26, ICON, FANART, '') lst += [(search)] dbcur.close() if delete_option: addDir(Lang(32039).encode('utf-8'), '', 29, ICON, FANART, '') views.selectView('movies', 'movie-view')
def get(self): control.addDir('Top 100', self.base_link + '/top-100-documentary-films/', 'docstorm_cat', control.fanart, control.fanart) r = client.request(self.base_link) r = BeautifulSoup(r) r = r.findAll('li', attrs={'class': re.compile('cat-item')}) for items in r: try: u = items.findAll('a') for s in u: url = s['href'].encode('utf-8') title = s.string print("DOCSTORM ITEMS", u) url = urlparse.urljoin(self.base_link, url) control.addDir(title, url, 'docstorm_cat', control.fanart, control.fanart) except: pass
def Peliculas(): addDir('[B][COLOR orangered]' + Lang(32008) + '[/COLOR][/B]', BASEURL, 5, ART + 'movies.jpg', FANART, '') addDir('[B][COLOR orangered]' + Lang(32006) + '[/COLOR][/B]', BASEURL, 3, ART + 'genre.jpg', FANART, '') addDir('[B][COLOR orangered]' + Lang(32007) + '[/COLOR][/B]', BASEURL, 15, ART + 'etos.jpg', FANART, '') views.selectView('menu', 'menu-view')
def gamato_kids(url): #4 data = client.request(url) posts = client.parseDOM(data, 'article', attrs={'class': 'item movies'}) for post in posts: try: plot = re.findall('''texto["']>(.+?)</div> <div''', post, re.DOTALL)[0] except IndexError: plot = 'N/A' desc = client.replaceHTMLCodes(plot) desc = desc.encode('utf-8') try: title = client.parseDOM(post, 'h4')[0] except BaseException: title = client.parseDOM(post, 'img', ret='alt')[0] title = clear_Title(title).encode('utf-8', 'ignore') link = client.parseDOM(post, 'a', ret='href')[0] link = client.replaceHTMLCodes(link).encode('utf-8', 'ignore') poster = client.parseDOM(post, 'img', ret='src')[0] poster = client.replaceHTMLCodes(poster).encode('utf-8', 'ignore') addDir('[B][COLOR white]%s[/COLOR][/B]' % (title), link, 12, poster, FANART, desc) try: np = client.parseDOM(data, 'a', ret='href', attrs={'class': 'arrow_pag'})[-1] page = np[-2] if np.endswith('/') else re.findall('page/(\d+)/', np)[0] title = '[B][COLORgold]>>>' + Lang(32011).encode( 'utf-8' ) + ' [COLORwhite]([COLORlime]%s[/COLOR])[/COLOR][/B]' % page addDir(title, np.encode('utf-8'), 4, ART + 'next.jpg', FANART, '') except BaseException: pass views.selectView('movies', 'movie-view')
def Get_epis_links(name, url): #11 lcookie = cache.get(_Login, 4, BASEURL) OPEN = cache.get(client.request, 4, url, True, True, False, None, None, None, False, None, None, lcookie) #Regex2 = re.compile('<a href="(http[s]?://adf.ly.+?|http[s]?://vidlox.+?|http[s]?://openload.+?|http[s]?://vidto.+?|http[s]?://streamin.+?|http[s]?://flashx.+?)".+?target="_blank".*?>(.*?)</a>', re.DOTALL).findall(OPEN) data = client.parseDOM(OPEN, 'td', attrs={'class': 'easySpoilerRow'}) links = [] for i in data: links += zip( client.parseDOM(i, 'a', ret='href', attrs={'target': '_blank'}), client.parseDOM(i, 'a')) description = Sinopsis(url) trailer = Trailer(url) addDir('[B][COLOR white]%s | [B][COLOR lime]Trailer[/COLOR][/B]' % name, trailer, 100, iconimage, FANART, '') for url, title in links: title = re.sub('\d{4}', '', title) title = clear_Title(title) title = Lang(32018).encode('utf-8') if title == "" else title.encode( 'utf-8') url = re.sub('http://adf.ly/\d+/', '', url) if 'buck' in url: continue elif 'adf.ly' in url: url = unshortenit.unshorten(url) if not url[1] == 200: continue else: url = url[0] if 'easybytez' in url: continue if 'zippy' in url: continue addDir('[B][COLOR white]%s[/COLOR][/B]' % title, url, 100, iconimage, FANART, str(description)) views.selectView('movies', 'movie-view')
def gamatokids(): addDir('[B][COLOR yellow]' + Lang(32004) + '[/COLOR][/B]', GAMATO + 'genre/gamato/', 4, ART + 'dub.jpg', FANART, '') addDir('[B][COLOR yellow]' + Lang(32010) + '[/COLOR][/B]', GAMATO + 'genre/κινούμενα-σχέδια/', 4, ART + 'genre.jpg', FANART, '') # addDir('[B][COLOR yellow]Προτεινόμενα[/COLOR][/B]', GAMATO, 21, ART + 'top.png', FANART, '') addDir('[B][COLOR gold]' + Lang(32002) + '[/COLOR][/B]', GAMATO, 18, ICON, FANART, '') views.selectView('menu', 'menu-view')