def cat(self, url): try: r = urlparse.urljoin(self.base_link, url) r = client.request(r) r = BeautifulSoup(r) r = r.findAll('div', attrs={'class': re.compile('snag-slider-item\s*')}) for items in r: try: href = items['data-permalink'].encode('utf-8') title = items['data-title'].encode('utf-8') img = items.findAll('img')[0]['src'].encode('utf-8') if img == '' or img == None: img = items.findAll('img')[0]['data-src'].encode( 'utf-8') href = urlparse.urljoin(self.base_link, href) meta = {"poster": img, "title": title} meta = urllib.quote_plus(json.dumps(meta)) control.addDirMeta(title, href, 'snagfilms_resolve', img, control.fanart, meta) except: pass except: pass
def cat(self, url): try: r = client.request(url) query = BeautifulSoup(r) r = query.findAll('div', attrs = {'class': 'post-thumbnail'}) for items in r: href = items.findAll('a')[0]['href'].encode('utf-8') img = items.findAll('img')[0]['src'].encode('utf-8') title = items.findAll('a')[0]['title'].encode('utf-8') title = cleantitle.get2(title) href = urlparse.urljoin(self.base_link, href) img = urlparse.urljoin(self.base_link, img) meta = {"poster": img , "title" : title} meta = urllib.quote_plus(json.dumps(meta)) control.addDirMeta(title,href,'docheaven_resolve', img, control.fanart, meta) except: pass try: n = query.findAll('div', attrs = {'class': 'numeric-nav'}) for x in n: pages = x.findAll('a') for p in pages: page = p['href'].encode('utf-8') page = urlparse.urljoin(self.base_link, page) page_title = p.string if not page == url: control.addDir("[COLOR yellow]PAGE:[/COLOR] " + page_title,page,'docheaven_cat',control.fanart,control.fanart) except: pass
def cat(self, url): try: r = client.request(url) query = BeautifulSoup(r) r = query.findAll('div', attrs={'class': 'item'}) for items in r: href = items.findAll('a')[0]['href'].encode('utf-8') img = items.findAll('img')[0]['src'].encode('utf-8') title = items.findAll('img')[0]['alt'].encode('utf-8') title = cleantitle.get2(title) href = urlparse.urljoin(self.base_link, href) img = urlparse.urljoin(self.base_link, img) meta = {"poster": img, "title": title} meta = urllib.quote_plus(json.dumps(meta)) control.addDirMeta(title, href, 'docstorm_resolve', img, control.fanart, meta) except: pass try: n = query.findAll('link', attrs={'rel': 'next'}) for p in n: page = p['href'].encode('utf-8') page_title = page.split('/')[-1] if page_title == '': page_title = '1' page = urlparse.urljoin(self.base_link, page) if not page == url: control.addDir("[COLOR yellow]PAGE:[/COLOR] " + page_title, page, 'docstorm_cat', control.fanart, control.fanart) except: pass try: n = query.findAll('link', attrs={'rel': 'prev'}) for p in n: page = p['href'].encode('utf-8') page_title = page.split('/')[-1] if page_title == '': page_title = '1' page = urlparse.urljoin(self.base_link, page) if not page == url: control.addDir("[COLOR yellow]PAGE:[/COLOR] " + page_title, page, 'docstorm_cat', control.fanart, control.fanart) except: pass
def popular(self, url): r = client.request(url) r = client.parseDOM(r, 'div', attrs = {'class': 'doc'}) for items in r: try: href = client.parseDOM(items, 'a', ret='href')[0].encode('utf-8') title = client.parseDOM(items, 'a', ret='title')[0].encode('utf-8') img = client.parseDOM(items, 'img', ret='src')[0].encode('utf-8') href = urlparse.urljoin(self.base_link, href) img = urlparse.urljoin(self.base_link, img) meta = {"poster": img , "title" : title} meta = urllib.quote_plus(json.dumps(meta)) control.addDirMeta(title,href,'topdocs_resolve', img, control.fanart, meta) except: pass
def cat(self, url): r = client.request(url) r = BeautifulSoup(r) r = r.findAll('div', attrs = {'class': 'film'}) for items in r: print ("FREEDOC 1", items) url = items.findAll('a')[0]['href'].encode('utf-8') img = items.findAll('img')[0]['src'].encode('utf-8') title = url.split('/')[-1] title = cleantitle.get2(title) url = urlparse.urljoin(self.base_link, url) img = urlparse.urljoin(self.base_link, img) meta = {"poster": img , "title" : title} meta = urllib.quote_plus(json.dumps(meta)) control.addDirMeta(title,url,'freedoc_resolve', img, control.fanart, meta)
def search(self, title): progressDialog = control.progressDialog progressDialog.create(control.addonInfo('name'), '') progressDialog.update(0, 'Searching...') sourceDict = [] for pkg, name, is_pkg in pkgutil.walk_packages(__path__): sourceDict.append((name, is_pkg)) sourceDict = [i[0] for i in sourceDict if i[1] == False] sourceDict = [(i, __import__(i, globals(), locals(), [], -1).source()) for i in sourceDict] sourceDict = [i[0] for i in sourceDict] sourceLabel = [i for i in sourceDict] self.sources = [] threads = [] for source in sourceDict: threads.append( workers.Thread( self.getMovieSource, title, __import__(source, globals(), locals(), [], -1).source())) [i.start() for i in threads] timeout = 30 string1 = "Time Elapsed %s / 30" for i in range(0, timeout * 2): try: try: info = [ sourceLabel[int(re.sub('[^0-9]', '', str(x.getName()))) - 1] for x in threads if x.is_alive() == True ] except: info = [] timerange = int(i * 0.5) try: if progressDialog.iscanceled(): break except: pass try: if progressDialog.iscanceled(): break string4 = string1 % str(timerange) string5 = str(info).translate(None, "[]'") progressDialog.update( int((100 / float(len(threads))) * len([x for x in threads if x.is_alive() == False])), str(string4), str(string5)) except: pass is_alive = [x for x in threads if x.is_alive() == True] if not is_alive: break time.sleep(0.5) except: pass for item in self.sources: title = item['title'] url = item['url'].encode('utf-8') mode = item['action'] poster = item['poster'] meta = item['meta'] mode = item['action'] provider = item['provider'] if url.startswith('/'): url = "https:" + url label = "[I]%s[/I] | [B]%s[/B]" % (provider, title) label = label.upper() control.addDirMeta(label, url, mode, poster, control.fanart, meta)