def scrape_movie(self, title, year, imdb, debrid = False): try: langMap = {'hi':'hindi', 'ta':'tamil', 'te':'telugu', 'ml':'malayalam', 'kn':'kannada', 'bn':'bengali', 'mr':'marathi', 'pa':'punjabi'} lang = 'http://www.imdb.com/title/%s/' % imdb lang = client.request(lang) lang = re.findall('href\s*=\s*[\'|\"](.+?)[\'|\"]', lang) lang = [i for i in lang if 'primary_language' in i] lang = [urlparse.parse_qs(urlparse.urlparse(i).query) for i in lang] lang = [i['primary_language'] for i in lang if 'primary_language' in i] lang = langMap[lang[0][0]] q = self.search_link % (lang, urllib.quote_plus(title)) q = urlparse.urljoin(self.base_link, q) t = cleantitle.get(title) r = self.request(q) r = client.parseDOM(r, 'li') r = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'h3'), client.parseDOM(i, 'div', attrs = {'class': 'info'})) for i in r] r = [(i[0][0], i[1][0], i[2][0]) for i in r if i[0] and i[1] and i[2]] r = [(re.findall('(\d+)', i[0]), i[1], re.findall('(\d{4})', i[2])) for i in r] r = [(i[0][0], i[1], i[2][0]) for i in r if i[0] and i[2]] r = [i[0] for i in r if t == cleantitle.get(i[1]) and year == i[2]][0] url = str(r) return self.sources(client.replaceHTMLCodes(url)) except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) pass return []
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: if url == None: return self.srcs try: result = client.request(url, referer=self.base_link) except: result = '' items = client.parseDOM(result, "source", ret="src") for item in items: try: url = item host = client.host(url) self.srcs.append({ 'source': host, 'parts': '1', 'quality': 'HD', 'scraper': self.name, 'url': url, 'direct': False }) except: pass logger.debug('SOURCES [%s]' % self.srcs, __name__) return self.srcs except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return self.srcs
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs url = urlparse.urljoin(self.base_link, url) try: result = client.request(url, referer=self.base_link) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','').replace('\t','') items = client.parseDOM(result, "div", attrs={"class":"entry-content"}) for item in items: try : url = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall(item)[0][1] host = client.host(url) srcs.append({'source': host, 'parts' : '1', 'quality': 'HD', 'scraper': self.name, 'url': url, 'direct':False}) except : pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return srcs
def scrape_movie(self, title, year, imdb, debrid = False): try: query = cleantitle.get(title) query = '/watch?v=%s_%s' % (query.replace(' ','_'),year) query = urlparse.urljoin(self.base_link, query) headers = {'User-Agent':self.userAgent} result = client.request(query, headers=headers) varid = re.compile('var frame_url = "(.+?)"',re.DOTALL).findall(result)[0].replace('/embed/','/streamdrive/info/') res_chk = re.compile('class="title"><h1>(.+?)</h1>',re.DOTALL).findall(result)[0] varid = 'http:'+varid holder = client.request(varid,headers=headers).content links = re.compile('"src":"(.+?)"',re.DOTALL).findall(holder) count = 0 for link in links: link = link.replace('\\/redirect?url=','') link = urllib.unquote(link).decode('utf8') if '1080' in res_chk: res= '1080p' elif '720' in res_chk: res='720p' else: res='DVD' count +=1 self.srcs.append({'source': 'Googlelink','parts' : '1', 'quality': res,'scraper': self.name,'url':link,'direct': False}) return self.srcs except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) pass return []
def unRegPlay(self): count = 1 try: now = datetime.datetime.now() currentDate = now.strftime("%Y-%m-%d") dbcon = database.connect(control.cacheFile) dbcur = dbcon.cursor() try: dbcur.execute( "SELECT playCount FROM playCount where date = '%s'" % currentDate) playCount = dbcur.fetchone()[0] if playCount == None: raise Exception() playCount = playCount + 1 dbcur.execute("DELETE FROM playCount where date = '%s'" % currentDate) dbcur.execute("INSERT INTO playCount Values (?, ?)", (playCount, currentDate)) dbcon.commit() except Exception as e: logger.error(e, __name__) playCount = 1 dbcur.execute("CREATE TABLE IF NOT EXISTS playCount (" "playCount INTEGER, " "date TEXT)") dbcur.execute("INSERT INTO playCount Values (?, ?)", (playCount, currentDate)) dbcon.commit() except Exception as e: logger.error(e, __name__) return playCount return playCount
def scrape_movie(self, title, year, imdb, debrid = False): try: t = cleantitle.get(title) try: query = '%s %s' % (title, year) query = base64.b64decode(self.search_link) % urllib.quote_plus(query) result = client.request(query) result = json.loads(result)['items'] r = [(i['link'], i['title']) for i in result] r = [(i[0], re.compile('(.+?) [\d{4}|(\d{4})]').findall(i[1])) for i in r] r = [(i[0], i[1][0]) for i in r if len(i[1]) > 0] r = [x for y,x in enumerate(r) if x not in r[:y]] r = [i for i in r if t == cleantitle.get(i[1])] #u = [i[0] for i in r][0] if r == None: raise Exception except: return return self.sources(r) except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) pass return []
def get(self, tvshowtitle, year, imdb, tvdb, season=None, episode=None, idx=True, provider=None, url=None): try: if idx == True: if not provider == None: call = __import__('resources.lib.sources.%s' % provider, globals(), locals(), ['source'], -1).source() self.list = call.episodes(tvshowtitle, url) if self.list == []: raise Exception(control.lang(30516).encode('utf-8')) self.list = self.super_info(self.list) try: logger.debug('Before Episode Direcotry', __name__) self.episodeDirectory(self.list, provider) logger.debug('After Episode Direcotry', __name__) except Exception as e: logger.error(e) return self.list except Exception as e: logger.error(e) control.infoDialog(control.lang(30516).encode('utf-8')) pass
def scrape_movie(self, title, year, imdb, debrid=False): try: query = '%s %s' % (title, year) query = self.search_link % (urllib.quote_plus(query)) query = urlparse.urljoin(self.base_link, query) result = client.request(query, error=True) items = client.parseDOM(result, "item") cleanedTitle = cleantitle.get(title) for item in items: linkTitle = client.parseDOM(item, "title")[0] if cleanedTitle in cleantitle.get(linkTitle): url = client.parseDOM(item, "a", attrs={"rel": "nofollow"}, ret="href")[0] break return self.sources(client.replaceHTMLCodes(url)) except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) pass return []
def desiTV(self): listItems = [] provider = control.setting('tvshow.provider') if not provider == None: call = __import__('resources.lib.sources.%s' % provider, globals(), locals(), ['source'], -1).source() listItems = call.networks() else: from resources.lib.sources import desirulez listItems = desirulez.source().networks() listItems.sort() try: for item in listItems: self.addDirectoryItem( item['name'], '%s&provider=%s&url=%s' % (item['action'], item['provider'], item['url']), os.path.join(control.logoPath(), item['image']), 'DefaultMovies.png') except Exception as e: logger.error(e) self.endDirectory()
def scrape_movie(self, title, year, imdb, debrid=False): try: query = '%s %s' % (title, year) query = self.search_link % (urllib.quote_plus(query)) query = urlparse.urljoin(self.base_link, query) result = client.request(query) result = result.decode('iso-8859-1').encode('utf-8') result = client.parseDOM(result, "item") cleanedTitle = cleantitle.get(title) for item in result: linkTitle = client.parseDOM(item, "title")[0] if cleanedTitle == cleantitle.get(linkTitle): url = client.parseDOM(item, "link")[0] break return self.sources(client.replaceHTMLCodes(url)) except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) pass return []
def scrape_movie(self, title, year, imdb, debrid=False): try: query = '%s' % (title) query = self.search_link % (urllib.quote_plus(query)) query = urlparse.urljoin(self.base_link, query) cleanedTitle = cleantitle.get(title) result = client.request(query) result = result.decode('iso-8859-1').encode('utf-8') items = client.parseDOM(result, "item") for item in items: linkTitle = client.parseDOM(item, 'title')[0] try: parsed = re.compile('(.+?) \((\d{4})\) ').findall( linkTitle)[0] parsedTitle = parsed[0] parsedYears = parsed[1] except: parsedTitle = '' pass if cleanedTitle == cleantitle.get(parsedTitle): url = client.parseDOM(item, "link")[0] return self.sources(client.replaceHTMLCodes(url)) except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) pass return []
def scrape_movie(self, title, year, imdb, debrid=False): try: query = self.moviesearch_link % urllib.quote_plus( cleantitle.query(title)) query = urlparse.urljoin(self.base_link, query) result = str(proxy.request(query, 'item')) if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'item')) result = client.parseDOM(result, 'div', attrs={'class': 'item'}) title = 'watchputlocker' + cleantitle.get(title) years = [ '(%s)' % str(year), '(%s)' % str(int(year) + 1), '(%s)' % str(int(year) - 1) ] result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result] result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0] result = [i for i in result if any(x in i[1] for x in years)] r = [(proxy.parse(i[0]), i[1]) for i in result] match = [ i[0] for i in r if title == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1] ] match2 = [i[0] for i in r] match2 = [x for y, x in enumerate(match2) if x not in match2[:y]] if match2 == []: return for i in match2[:5]: try: if len(match) > 0: url = match[0] break r = proxy.request(urlparse.urljoin(self.base_link, i), 'link_ite') r = re.findall('(tt\d+)', r) if imdb in r: url = i break except: pass url = re.findall('(?://.+?|)(/.+)', url)[0] url = client.replaceHTMLCodes(url) url = url.encode('utf-8') return self.sources(client.replaceHTMLCodes(url)) except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) pass return []
def scrape_movie(self, title, year, imdb, debrid=False): try: movies = cache.get(self.desiRulezCache, 168) url = [ i['url'] for i in movies if cleantitle.get(i['title'].decode( 'UTF-8')) == cleantitle.get(title) ][0] return self.sources(client.replaceHTMLCodes(url)) except Exception as e: logger.error(e) pass return []
def source(self, item): try: try: if '720p' in item: quality = 'HD' else: quality = 'SD' urls = client.parseDOM(item, "a", ret="href") for j in range(0, len(urls)): videoID = self.getVideoID(urls[j]) result = client.request(self.info_link % videoID) result = result.decode('iso-8859-1').encode('utf-8') item = client.parseDOM( result, name="div", attrs={ "style": "float:none;height:700px;margin-left:200px" })[0] rUrl = re.compile( '(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]').findall( item)[0][1] if not rUrl.startswith('http:') and not rUrl.startswith( 'https:'): rUrl = '%s%s' % ('http:', rUrl) urls[j] = rUrl host = client.host(urls[0]) url = "##".join(urls) self.srcs.append({ 'source': host, 'parts': str(len(urls)), 'quality': quality, 'scraper': self.name, 'url': url, 'direct': False }) urls = [] except Exception as e: logger.error(e) pass except: return self.srcs
def shows(self, url, provider=None, network=None): try: # change the implementation to get shows from all providers as save to DB if not provider == None: call = __import__('resources.lib.sources.%s' % provider, globals(), locals(), ['source'], -1).source() self.list = call.tvshows(network, url) self.worker() return self.list except Exception as e: logger.error(e, __name__) pass
def addDirectory(self, items): if items == None or len(items) == 0: return addonFanart = control.addonFanart() addonThumb = control.addonThumb() artPath = control.artPath() for i in items: try: try: name = control.lang(i['name']).encode('utf-8') except: name = i['name'] if i['image'].startswith('http://'): thumb = i['image'] elif not artPath == None: thumb = os.path.join(artPath, i['image']) else: thumb = addonThumb url = '%s?action=%s' % (sysaddon, i['action']) try: url += '&url=%s' % urllib.quote_plus(i['url']) except: pass cm = [] item = control.item(label=name, iconImage=thumb, thumbnailImage=thumb) item.addContextMenuItems(cm) if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart) control.addItem(handle=syshandle, url=url, listitem=item, isFolder=True) except Exception as e: logger.error(e, __name__) pass viewMode = 'mediainfo1' views.setView( 'tvshows', { 'skin.confluence': control.viewMode['confluence'][viewMode], 'skin.estuary': control.viewMode['esturary'][viewMode] }) control.directory(syshandle, cacheToDisc=True)
def get(self, url, idx=True, provider=None, network=None): try: self.list = cache.get(self.shows, 168, url, provider, network, table='rel_shows') self.list = sorted(self.list, key=lambda k: k['name']) if idx == True: self.tvshowDirectory(self.list) return self.list except Exception as e: logger.error(e, __name__) pass
def scrape_episode(self, title, show_year, year, season, episode, imdb, tvdb, debrid=False): try: return self.sources(client.replaceHTMLCodes(imdb)) except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return []
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs #url = urlparse.urljoin(self.base_link, url) try: result = client.request(url, referer=self.base_link) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '').replace('\t', '') result = client.parseDOM( result, "div", attrs={"class": "entry-content clearfix single-post-content"}) result = client.parseDOM(result, "p", attrs={"style": "text-align: center;"}) items = client.parseDOM(result, "a", ret="href") for item in items: try: url = item if 'digibolly.se' in url: result = client.request(url) url = re.findall('<iframe src="(.+?)"', result, re.IGNORECASE)[0] host = client.host(url) srcs.append({ 'source': host, 'parts': '1', 'quality': 'HD', 'scraper': self.name, 'url': url, 'direct': False }) except: pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return srcs
def source(self, item): title = item[0] links = item[1] urls = [] if '720p' in title: quality = 'HD' else: quality = 'SD' parts = client.parseDOM(links, "a", ret="href") srcs = [] for part in parts: try: part = client.request(part) part = part.decode('iso-8859-1').encode('utf-8') part = client.parseDOM( part, "td", attrs={ "style": "vertical-align:middle;text-align:center;" })[0] tUrl = re.compile('(SRC|src|data-config)=[\'|\"](.+?)[\'|\"]' ).findall(part)[0][1] host = client.host(tUrl) urls.append(tUrl) except Exception as e: logger.error(e) pass url = "##".join(urls) srcs.append({ 'source': host, 'parts': len(urls), 'quality': quality, 'scraper': self.name, 'url': url, 'direct': False }) return srcs
def get(self, url, idx=True, provider=None, lang=None): logger.debug( 'url [%s] provider [%s] lang [%s] ' % (url, provider, lang), self.__class__) self.lang = lang try: try: u = urlparse.urlparse(url).netloc.lower() except: pass if u in self.imdb_link: self.list = cache.get(self.imdb_list, 48, url) if idx == True: self.worker() if idx == True: self.movieDirectory(self.list, lang=lang) return self.list except Exception as e: logger.error(e, __name__) pass
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: quality = '' srcs = [] if url == None: return srcs result = client.request(url) result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '') result = client.parseDOM(result, "div", attrs={"class": "post-content bottom"})[0] items = client.parseDOM(result, "p") hosts = client.parseDOM(result, "span", attrs={"style": "color: red;"}) links = [] for item in items: if 'a href' in item: links.append(item) items = zip(hosts, links) for item in items: self.srcs.extend(self.source(item)) logger.debug('SOURCES [%s]' % self.srcs, __name__) return self.srcs except Exception as e: logger.error(e) return self.srcs
def sources(self, url): logger.debug('SOURCES URL %s' % url, __name__) try: srcs = [] if url == None: return srcs url = urlparse.urljoin(self.base_link, url) try: result = client.request(url, referer=self.base_link) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n', '').replace('\t', '') result = client.parseDOM(result, "div", attrs={"id": "list-dl"}) items = client.parseDOM(result, "a", ret="href") for item in items: try: url = item host = client.host(url) srcs.append({ 'source': host, 'parts': '1', 'quality': 'HD', 'scraper': self.name, 'url': url, 'direct': False }) except: pass logger.debug('SOURCES [%s]' % srcs, __name__) return srcs except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return srcs
def scrape_episode(self, title, show_year, year, season, episode, imdb, tvdb, debrid = False): try: query = '%s %s' % (title, episode) query = self.search_link % (urllib.quote_plus(query)) try: result = client.request(self.base_link + query) except: result = '' result = result.decode('iso-8859-1').encode('utf-8') result = result.replace('\n','').replace('\t','') result = client.parseDOM(result, 'content:encoded')[0] url = client.parseDOM(result, "a", attrs={"rel": "nofollow"}, ret="href")[0] if url == None: pass else: return self.sources(client.replaceHTMLCodes(url)) except Exception as e: logger.error('[%s] Exception : %s' % (self.__class__, e)) return []
def clearCache(self, url=None): try: if url == None: self.addDirectoryItem(90124, 'clearCache&url=main', 'clearcache.png', 'DefaultMovies.png') self.addDirectoryItem(90125, 'clearCache&url=providers', 'clearcache.png', 'DefaultMovies.png') self.addDirectoryItem(90127, 'clearCache&url=meta', 'clearcache.png', 'DefaultMovies.png') self.endDirectory() elif url == 'main': cache.clear() elif url == 'providers': cache.clear(['rel_src', 'rel_url']) elif url == 'live': control.deleteAll('.json') control.delete('user.db') cache.clear(['rel_live', 'rel_logo', 'live_meta']) cache.clear(['live_cache']) elif url == 'meta': cache.clear(['meta', 'meta_imdb'], control.metacacheFile) except Exception as e: logger.error(e)
def super_info(self, items): logger.debug('INSIDE SUPER_INFO', __name__) try: for i in range(0, len(items)): season = '0' if items[i].get( 'season') == None else items[i].get('season') self.list[i].update({ 'season': season, 'episode': self.list[i]['name'], 'imdb': '0', 'tvdb': '0', 'year': '0', 'poster': '0', 'banner': '0', 'fanart': '0', 'thumb': '0', 'premiered': '0', 'duration': '30' }) logger.debug('COMPLETE SUPER_INFO', __name__) return self.list except Exception as e: logger.error(e) pass
def episodes(self, title, url): try: episodes = [] links = [self.base_link_1, self.base_link_2, self.base_link_3] tvshowurl = url for base_link in links: try: url = url.replace(base_link, '') result = client.request(base_link + '/' + url) if result == None: raise Exception() except: result = '' if 'threadtitle' in result: break rawResult = result.decode('windows-1252').encode('utf-8') result = client.parseDOM( rawResult, "h3", attrs={"class": "title threadtitle_unread"}) result += client.parseDOM(rawResult, "h3", attrs={"class": "threadtitle"}) for item in result: name = client.parseDOM(item, "a", attrs={"class": "title"}) name += client.parseDOM( item, "a", attrs={"class": "title threadtitle_unread"}) if type(name) is list: name = name[0] url = client.parseDOM(item, "a", ret="href") if type(url) is list: url = url[0] if "Online" not in name: continue name = name.replace(title, '') if not title == 'awards': try: name = re.compile('([\d{1}|\d{2}]\w.+\d{4})').findall( name)[0] except: pass name = name.strip() try: season = title.lower() season = re.compile('[0-9]+').findall(season)[0] except: season = '0' episodes.append({ 'season': season, 'tvshowtitle': title, 'title': name, 'name': name, 'url': url, 'provider': 'desirulez', 'tvshowurl': tvshowurl }) next = client.parseDOM(rawResult, "span", attrs={"class": "prev_next"}) next = client.parseDOM(next, "a", attrs={"rel": "next"}, ret="href")[0] episodes[0].update({'next': next}) except Exception as e: logger.error(e) logger.debug(episodes, __name__) return episodes
def tvshows(self, name, url): try: result = '' shows = [] links = [self.base_link_1, self.base_link_2, self.base_link_3] for base_link in links: try: result, response_code, response_headers, headers, cookie = client.request( '%s/%s' % (base_link, url), output='extended') if result == None: raise Exception() except: result = '' if 'forumtitle' in result: break #result = result.decode('ISO-8859-1').encode('utf-8') result = result.decode('windows-1252').encode('utf-8') result = client.parseDOM(result, "h2", attrs={"class": "forumtitle"}) for item in result: title = '' url = '' try: title = client.parseDOM( item, "a", attrs={"class": "title threadtitle_unread"})[0] except: title = client.parseDOM(item, "a", attrs={"class": "title"}) title = title[0] if title else client.parseDOM(item, "a")[0] #title = cleantitle.unicodetoascii(title) title = client.replaceHTMLCodes(title) if title == 'Naamkarann': title = 'Naamkaran' url = client.parseDOM(item, "a", ret="href") if not url: url = client.parseDOM(item, "a", attrs={"class": "title"}, ret="href") if type(url) is list and len(url) > 0: url = str(url[0]) if not 'Past Shows' in title: # name , title, poster, imdb, tvdb, year, poster, banner, fanart, duration shows.append({ 'name': title, 'title': title, 'url': url, 'poster': '0', 'banner': '0', 'fanart': '0', 'next': '0', 'year': '0', 'duration': '0', 'provider': 'desirulez' }) return shows except Exception as e: logger.error(e) return
def addItem(self, title, content): try: control.playlist.clear() items = control.window.getProperty(self.itemProperty) items = json.loads(items) if items == []: raise Exception() meta = control.window.getProperty(self.metaProperty) meta = json.loads(meta) infoMenu = control.lang(30502).encode('utf-8') downloads = True if control.setting( 'downloads') == 'true' and not control.setting( 'movie.download.path') == '' else False #if 'tvshowtitle' in meta and 'season' in meta and 'episode' in meta: # name = '%s S%02dE%02d' % (title, int(meta['season']), int(meta['episode'])) #el if 'year' in meta: name = '%s (%s)' % (title, meta['year']) else: name = title systitle = urllib.quote_plus(title.encode('utf-8')) sysname = urllib.quote_plus(name.encode('utf-8')) poster = meta['poster'] if 'poster' in meta else '0' banner = meta['banner'] if 'banner' in meta else '0' thumb = meta['thumb'] if 'thumb' in meta else poster fanart = meta['fanart'] if 'fanart' in meta else '0' if poster == '0': poster = control.addonPoster() if banner == '0' and poster == '0': banner = control.addonBanner() elif banner == '0': banner = poster if thumb == '0' and fanart == '0': thumb = control.addonFanart() elif thumb == '0': thumb = fanart if control.setting('fanart') == 'true' and not fanart == '0': pass else: fanart = control.addonFanart() for i in range(len(items)): try: parts = int(items[i]['parts']) except: parts = 1 label = items[i]['label'] syssource = urllib.quote_plus(json.dumps([items[i]])) sysurl = '%s?action=playItem&title=%s&source=%s&content=%s' % ( sysaddon, systitle, syssource, content) item = control.item(label=label) cm = [] cm.append((control.lang(30504).encode('utf-8'), 'RunPlugin(%s?action=queueItem)' % sysaddon)) if content != 'live': if downloads == True and parts <= 1: sysimage = urllib.quote_plus(poster.encode('utf-8')) cm.append((control.lang(30505).encode( 'utf-8' ), 'RunPlugin(%s?action=download&name=%s&image=%s&source=%s)' % (sysaddon, systitle, sysimage, syssource))) item.setArt({ 'icon': thumb, 'thumb': thumb, 'poster': poster, 'tvshow.poster': poster, 'season.poster': poster, 'banner': banner, 'tvshow.banner': banner, 'season.banner': banner }) if not fanart == None: item.setProperty('Fanart_Image', fanart) item.addContextMenuItems(cm) item.setInfo(type='Video', infoLabels=meta) control.addItem(handle=syshandle, url=sysurl, listitem=item, isFolder=False) control.directory(syshandle, cacheToDisc=True) except Exception as e: logger.error(e.message) control.infoDialog(control.lang(30501).encode('utf-8'))
def sourcesDialog(self, items): try: labels = [i['label'] for i in items] select = control.selectDialog(labels) if select == -1: return 'close://' self.isRegistered() next = [y for x, y in enumerate(items) if x >= select] prev = [y for x, y in enumerate(items) if x < select][::-1] items = [items[select]] items = [i for i in items + next + prev][:40] header = control.addonInfo('name') header2 = header.upper() progressDialog = control.progressDialog progressDialog.create(control.addonInfo('name'), '') progressDialog.update(0) block = None for i in range(len(items)): try: if items[i]['source'] == block: raise Exception() w = workers.Thread(self.sourcesResolve, items[i]) w.start() try: if progressDialog.iscanceled(): break progressDialog.update( int((100 / float(len(items))) * i), str(items[i]['label']), str(' ')) except: progressDialog.update( int((100 / float(len(items))) * i), str(header2), str(items[i]['label'])) m = '' for x in range(3600): try: if xbmc.abortRequested == True: return sys.exit() if progressDialog.iscanceled(): return progressDialog.close() except: pass k = control.condVisibility( 'Window.IsActive(virtualkeyboard)') if k: m += '1' m = m[-1] if (w.is_alive() == False or x > 30) and not k: break k = control.condVisibility( 'Window.IsActive(yesnoDialog)') if k: m += '1' m = m[-1] if (w.is_alive() == False or x > 30) and not k: break time.sleep(0.5) for x in range(30): try: if xbmc.abortRequested == True: return sys.exit() if progressDialog.iscanceled(): return progressDialog.close() except: pass if m == '': break if w.is_alive() == False: break time.sleep(0.5) if w.is_alive() == True: block = items[i]['source'] if self.url == None: raise Exception() self.selectedSource = items[i]['label'] try: progressDialog.close() except: pass control.execute('Dialog.Close(virtualkeyboard)') control.execute('Dialog.Close(yesnoDialog)') return self.url except: pass try: progressDialog.close() except: pass except Exception as e: logger.error(e.message) try: progressDialog.close() except: pass