def news_episodes_listing(self, query): threads = [] for i in list(range(1, 101)): threads.append(workers.Thread(self.thread, i, self.newsgr_link_ajax.format(page=str(i), category=query))) self.data.append('') [i.start() for i in threads] [i.join() for i in threads] html = '\n'.join(self.data) items = client.parseDOM(html, 'div', attrs={'class': 'newsItem'}) for item in items: label = client.replaceHTMLCodes(client.parseDOM(item, 'a')[1]) title = u'[CR]'.join([label, client.parseDOM(item, 'time')[0]]) image = client.parseDOM(item, 'img', ret='src')[0] url = client.parseDOM(item, 'a', ret='href')[1] self.list.append({'title': title, 'image': image, 'url': url}) return self.list
def items_list(self, url): page = url result = client.request(page) try: if "contentContainer_totalpages" in result: totalPages = int( re.search(r'contentContainer_totalpages = (\d+);', result).group(1)) seriesId = re.search(r'/templates/data/morevideos\?aid=(\d+)', result).group(1) items = [] threads = [] for i in list(range(1, totalPages + 1)): threads.append( workers.Thread( self.thread, self.more_videos + seriesId + "&p=" + str(i), i - 1)) self.data.append('') [i.start() for i in threads] [i.join() for i in threads] for i in self.data: items.extend(client.parseDOM(i, "article")) else: items = client.parseDOM(result, "article") except: pass for item in items: try: title = client.parseDOM(item, "h2")[0] title = client.replaceHTMLCodes(title) title = title.encode('utf-8') link = client.parseDOM(item, "a", ret="href")[0] if re.match(r'/.+/(\d+)/.+', link) is not None: episodeId = re.search(r'/.+/(\d+)/.+', link).group(1) episodeJSON = client.request(self.episodes_link + episodeId) episodeJSON = json.loads(episodeJSON) url = episodeJSON['url'] url = client.replaceHTMLCodes(url) url = url.encode('utf-8') else: url = self.base_link + link + '/videos' image = client.parseDOM(item, "img", ret="src")[0] image = client.replaceHTMLCodes(image) image = image.encode('utf-8') self.list.append({'title': title, 'url': url, 'image': image}) except: pass return self.list
def videos_list(self, url, lang): try: request = urlencode({'request': self.post_link % (url, lang)}) result = client.request(self.api_link, post=request) result = json.loads(result) items = [] if 'themedetailslist' in result: items = result['themedetailslist'] elif 'programDetailsList' in result: items = result['programDetailsList'] elif 'homelist' in result: items = result['homelist'] except: return for item in items: try: title = item['title'] title = client.replaceHTMLCodes(title) title = title.encode('utf-8') url = str(item['id']) url = url.encode('utf-8') image = self.img2_link % (url, url) image = image.encode('utf-8') self.list.append({'title': title, 'url': url, 'image': image}) except: pass threads = [] for i in list(range(0, len(self.list))): threads.append(workers.Thread(self.list_worker, i, lang)) [i.start() for i in threads] [i.join() for i in threads] self.list = [ i for i in self.list if 'check' in i and not (i['check'] == '' or i['check'] is None) ] return self.list
def _exec(_items, _next_url=None): if control.setting('threading') == 'true': for count, _item in list(enumerate(_items, start=1)): threads_2.append(workers.Thread(self.loop(_item, header, count, _next_url))) [i.start() for i in threads_2] [i.join() for i in threads_2] else: for count, _item in list(enumerate(_items, start=1)): self.loop(_item, header, count, _next_url)
def run(self, query=None): if not 'Greek' in str(langs).split(','): control.directory(syshandle) control.infoDialog(control.lang(32002).encode('utf-8')) return if query is None: title = control.infoLabel('VideoPlayer.Title') if re.search(r'[^\x00-\x7F]+', title) is not None: title = control.infoLabel('VideoPlayer.OriginalTitle') year = control.infoLabel('VideoPlayer.Year') tvshowtitle = control.infoLabel('VideoPlayer.TVshowtitle') season = control.infoLabel('VideoPlayer.Season') episode = control.infoLabel('VideoPlayer.Episode') if 's' in episode.lower(): season, episode = '0', episode[-1:] if not tvshowtitle == '': # episode query = '%s S%02dE%02d' % (tvshowtitle, int(season), int(episode)) elif not year == '': # movie query = '%s (%s)' % (title, year) else: # file query, year = getCleanMovieTitle(title) if not year == '': query = '%s (%s)' % (query, year) self.query = query threads = [] threads.append(workers.Thread(self.xsubstv)) threads.append(workers.Thread(self.subzxyz)) threads.append(workers.Thread(self.subtitlesgr)) [i.start() for i in threads] for i in range(0, 10 * 2): try: is_alive = [x.is_alive() for x in threads] if all(x == False for x in is_alive): break if control.aborted is True: break control.sleep(500) except: pass if len(self.list) == 0: control.directory(syshandle) return f = [] f += [i for i in self.list if i['source'] == 'xsubstv'] f += [i for i in self.list if i['source'] == 'subzxyz'] f += [i for i in self.list if i['source'] == 'subtitlesgr'] self.list = f for i in self.list: try: if i['source'] == 'subzxyz': i['name'] = '[subzxyz] %s' % i['name'] elif i['source'] == 'xsubstv': i['name'] = '[xsubstv] %s' % i['name'] except: pass for i in self.list: try: name, url, source, rating = i['name'], i['url'], i[ 'source'], i['rating'] u = {'action': 'download', 'url': url, 'source': source} u = '%s?%s' % (sysaddon, urllib.urlencode(u)) item = control.item(label='Greek', label2=name, iconImage=str(rating), thumbnailImage='el') item.setProperty('sync', 'false') item.setProperty('hearing_imp', 'false') control.addItem(handle=syshandle, url=u, listitem=item, isFolder=False) except: pass control.directory(syshandle)
def _video_list(self, cid, url, pagination, limit): try: result = client.request(url) result = json.loads(result) items = result['items'] except Exception: pass for i in list(range(1, limit)): try: if pagination is True: raise Exception if not 'nextPageToken' in result: raise Exception page = url + '&pageToken=' + result['nextPageToken'] result = client.request(page) result = json.loads(result) items += result['items'] except Exception: pass try: if pagination is False: raise Exception next = cid + '&pageToken=' + result['nextPageToken'] except Exception: next = '' for item in items: try: title = item['snippet']['title'] try: title = title.encode('utf-8') except AttributeError: pass try: url = item['snippet']['resourceId']['videoId'] except (KeyError, ValueError): url = item['id']['videoId'] try: url = url.encode('utf-8') except AttributeError: pass image = item['snippet']['thumbnails']['high']['url'] if '/default.jpg' in image: raise Exception try: image = image.encode('utf-8') except AttributeError: pass try: dateadded = item['snippet']['publishedAt'] dateadded = str(iso8601.parse_date(dateadded).strftime('%Y-%m-%d %H:%M:%S')) except Exception: dateadded = str(datetime.now().strftime('%Y-%m-%d %H:%M:%S')) date = '.'.join(dateadded.split()[0].split('-')[::-1]) data = { 'title': title, 'url': url, 'image': image, 'dateadded': dateadded, 'date': date, 'premiered': dateadded.split()[0], 'aired': dateadded.split()[0], 'year': int(dateadded[:4]) } if next != '': data['next'] = next self.list.append(data) except Exception: pass try: u = [list(range(0, len(self.list)))[i:i+50] for i in list(range(len(list(range(0, len(self.list))))))[::50]] u = [','.join([self.list[x]['url'] for x in i]) for i in u] u = [self.content_link % i + self.key_link for i in u] threads = [] for i in list(range(0, len(u))): threads.append(workers.Thread(self.thread, u[i], i)) self.data.append('') [i.start() for i in threads] [i.join() for i in threads] items = [] for i in self.data: items += json.loads(i)['items'] except Exception: pass for item in list(range(0, len(self.list))): try: vid = self.list[item]['url'] self.list[item]['url'] = self.play_link.format(vid) d = [(i['id'], i['contentDetails']) for i in items] d = [i for i in d if i[0] == vid] d = d[0][1]['duration'] duration = 0 try: duration += 60 * 60 * int(re.findall('(\d*)H', d)[0]) except Exception: pass try: duration += 60 * int(re.findall('(\d*)M', d)[0]) except Exception: pass try: duration += int(re.findall('(\d*)S', d)[0]) except Exception: pass duration = str(duration) self.list[item]['duration'] = duration except Exception: pass return self.list
def episodes_list(self, url): try: base_link = re.findall('(http(?:s|)://.+?)/', url) if base_link: base_link = base_link[0] else: base_link = self.base_link if not '/webtv/' in url: result = client.request(url + '/webtv/') result = re.findall('"actual_args"\s*:\s*\["(.+?)"', result)[0] else: url, result = url.split('/webtv/') url = '%s/webtv/%s?page=%s' % (url, result.lower(), '%s') self.data.append('') self.thread(0, url % '0', None) try: result = client.parseDOM(self.data[0], 'div', attrs={'role': 'main'})[0] result = client.parseDOM(result, 'div', attrs={'class': 'view.+?'})[0] num = client.parseDOM(result, 'li', attrs={'class': 'pager__item pager__item--last'})[0] num = re.findall('page=(\d+)', num)[0] if num > 9: num = 9 num = int(num) + 1 threads = [] for i in range(1, num): self.data.append('') threads.append(workers.Thread(self.thread, i, url % str(i), None)) [i.start() for i in threads] [i.join() for i in threads] except: pass items = '' for i in self.data: items += i items = client.parseDOM(items, 'div', attrs={'role': 'main'}) items = [client.parseDOM(i, 'div', attrs={'class': 'view.+?'}) for i in items] items = [i[0] for i in items if len(i) > 0] items = client.parseDOM(items, 'article') except: return for item in items: try: t = client.parseDOM(item, 'div', attrs={'class': 'itemtitle'})[0] title = client.parseDOM(t, 'span') if title: title = title[0] else: title = t if title == '' or 'sneak preview' in title.lower(): raise Exception() tvshowtitle = client.parseDOM(item, 'figcaption', attrs={'class': 'showtitle'}) tvshowtitle += client.parseDOM(item, 'div', attrs={'class': 'showtitle'}) if tvshowtitle: tvshowtitle = tvshowtitle[0] else: tvshowtitle = title title = client.replaceHTMLCodes(title) title = title.encode('utf-8') tvshowtitle = client.replaceHTMLCodes(tvshowtitle) tvshowtitle = tvshowtitle.encode('utf-8') url = client.parseDOM(item, 'a', ret='href')[0] url = urlparse.urljoin(base_link, url) url = client.replaceHTMLCodes(url) url = url.encode('utf-8') image = client.parseDOM(item, 'img', ret='src')[0] image = urlparse.urljoin(base_link, image) image = client.replaceHTMLCodes(image) image = image.encode('utf-8') self.list.append({'title': title, 'url': url, 'image': image, 'tvshowtitle': tvshowtitle}) except: pass return self.list
def item_list(self, url): try: base_link = re.findall('(http(?:s|)://.+?)/', url) if base_link: base_link = base_link[0] else: base_link = self.base_link ajax_link = urlparse.urljoin(base_link, self.ajax_link) result = client.request(url) filtered = client.parseDOM(result, 'div', attrs={'class': 'panel-row row-.+?'})[0] filtered = client.parseDOM(filtered, 'div', attrs={'class': 'views.+?limit-'}) filtered = client.parseDOM(filtered, 'a', ret='href') filtered = [x for y, x in enumerate(filtered) if x not in filtered[:y]] threads = [] for i in range(0, 7): threads.append(workers.Thread(self.thread, i, ajax_link, self.view_name_link.format(str(i)))) self.data.append('') [i.start() for i in threads] [i.join() for i in threads] items = '' for i in self.data: items += json.loads(i)[1]['data'] items = client.parseDOM(items, 'li') except: return for item in items: try: title = client.parseDOM(item, 'div', attrs={'class': 'views-field-title'})[0] title = client.parseDOM(title, 'a')[0] title = client.replaceHTMLCodes(title) title = title.encode('utf-8') url = client.parseDOM(item, 'a', ret='href')[0] flt = True if any(url == i for i in filtered) else False url = urlparse.urljoin(base_link, url) url = client.replaceHTMLCodes(url) url = url.encode('utf-8') image = client.parseDOM(item, "img", ret="src")[0] image = urlparse.urljoin(base_link, image) image = client.replaceHTMLCodes(image) image = image.encode('utf-8') self.list.append({'title': title, 'url': url, 'image': image, 'filter': flt}) except: pass return self.list
def _listing(self, url): if self.ajax_url in url: result = client.request(url.partition('?')[0], post=url.partition('?')[2]) else: result = client.request(url) try: header = parseDOM(result, 'h2')[0] except IndexError: header = None next_url = None override = False if self.base_link + '/?s=' in url or control.setting('pagination') == 'true': override = True threads_1 = [] threads_2 = [] # Nest the function to work on either of the two cases def _exec(_items, _next_url=None): if control.setting('threading') == 'true': for count, _item in list(enumerate(_items, start=1)): threads_2.append(workers.Thread(self.loop(_item, header, count, _next_url))) [i.start() for i in threads_2] [i.join() for i in threads_2] else: for count, _item in list(enumerate(_items, start=1)): self.loop(_item, header, count, _next_url) if 'enimerosi-24' not in url and self.ajax_url not in url: ajaxes = [i for i in parseDOM(result, 'script', attrs={'type': 'text/javascript'}) if 'ajaxurl' in i] ajax1 = json.loads(re.search(r'var loadmore_params = ({.+})', ajaxes[-1]).group(1)) ajax2 = json.loads(re.search(r'var cactus = ({.+})', ajaxes[0]).group(1)) ajax = self._ajax_merge(ajax1, ajax2) pages = int(ajax['max_page']) posts = ajax['posts'] try: posts = posts.encode('utf-8') except Exception: pass if control.setting('threading') == 'true' and not override: for i in range(0, pages + 1): threads_1.append( workers.Thread( self.thread(self.ajax_url, post=self.load_more.format(query=quote(posts), page=str(i))) ) ) [i.start() for i in threads_1] [i.join() for i in threads_1] else: for i in range(0, pages + 1): a = client.request(self.ajax_url, post=self.load_more.format(query=quote(posts), page=str(i))) self.data.append(a) if i == 0 and override: next_url = '?'.join([self.ajax_url, self.load_more.format(query=quote(posts), page='1')]) break html = '\n'.join(self.data) items = itertags_wrapper(html, 'div', attrs={'class': 'item item-\d+'}) if len(items) < 20: next_url = None _exec(items, next_url) elif self.ajax_url in url: items = itertags_wrapper(result, 'div', attrs={'class': 'item item-\d+'}) parsed = dict(parse_qsl(url.partition('?')[2])) next_page = int(parsed['page']) + 1 parsed['page'] = next_page if len(items) >= 20: next_url = '?'.join([url.partition('?')[0], urlencode(parsed)]) _exec(items, next_url) else: items = itertags_wrapper(result, 'div', attrs={'class': 'item item-\d+'}) for item in items: text = item.text img = item.attributes['style'] image = re.search(r'url\((.+)\)', img).group(1) title = client.replaceHTMLCodes(parseDOM(text, 'a')[0].strip()) url = parseDOM(text, 'a', ret='href')[0] self.list.append({'title': title, 'image': image, 'url': url}) return self.list
def run(self, query=None): if 'Greek' not in str(langs).split(','): control.directory(syshandle) control.infoDialog(control.lang(32002)) return if not control.conditional_visibility( 'System.HasAddon(vfs.libarchive)') and float( control.addon('xbmc.addon').getAddonInfo('version') [:4]) >= 18.0: control.execute('InstallAddon(vfs.libarchive)') threads = [ workers.Thread(self.xsubstv), workers.Thread(self.subzxyz), workers.Thread(self.subtitlesgr) ] dup_removal = False if not query: if control.condVisibility('Player.HasVideo'): infolabel_prefix = 'VideoPlayer' else: infolabel_prefix = 'ListItem' title = control.infoLabel('{0}.Title'.format(infolabel_prefix)) if re.search(r'[^\x00-\x7F]+', title) is not None: title = control.infoLabel( '{0}.OriginalTitle'.format(infolabel_prefix)) year = control.infoLabel('{0}.Year'.format(infolabel_prefix)) tvshowtitle = control.infoLabel( '{0}.TVshowtitle'.format(infolabel_prefix)) season = control.infoLabel('{0}.Season'.format(infolabel_prefix)) if len(season) == 1: season = '0' + season episode = control.infoLabel('{0}.Episode'.format(infolabel_prefix)) if len(episode) == 1: episode = '0' + episode if 's' in episode.lower(): season, episode = '0', episode[-1:] if tvshowtitle != '': # episode title_query = '{0} {1}'.format(tvshowtitle, title) season_episode_query = '{0} S{1} E{2}'.format( tvshowtitle, season, episode) threads = [ workers.Thread(self.xsubstv, title_query), workers.Thread(self.subzxyz, title_query), workers.Thread(self.subtitlesgr, title_query), workers.Thread(self.xsubstv, season_episode_query), workers.Thread(self.subzxyz, season_episode_query), workers.Thread(self.subtitlesgr, season_episode_query) ] dup_removal = True log.log('Dual query used for subtitles search: ' + title_query + ' / ' + season_episode_query) elif year != '': # movie query = '{0} ({1})'.format(title, year) else: # file query, year = getCleanMovieTitle(title) if year != '': query = '{0} ({1})'.format(query, year) if not dup_removal: log.log('Query used for subtitles search: ' + query) self.query = query [i.start() for i in threads] for c, i in list(enumerate(range(0, 40))): is_alive = [x.is_alive() for x in threads] if all(x is False for x in is_alive): log.log('Reached count : ' + str(c)) break if control.aborted is True: log.log('Aborted, reached count : ' + str(c)) break control.sleep(750) if len(self.list) == 0: control.directory(syshandle) return f = [] # noinspection PyUnresolvedReferences f += [i for i in self.list if i['source'] == 'xsubstv'] f += [i for i in self.list if i['source'] == 'subzxyz'] f += [i for i in self.list if i['source'] == 'subtitlesgr'] self.list = f if dup_removal: self.list = [ dict(t) for t in {tuple(d.items()) for d in self.list} ] for i in self.list: try: if i['source'] == 'subzxyz': i['name'] = '[subzxyz] {0}'.format(i['name']) elif i['source'] == 'xsubstv': i['name'] = '[xsubstv] {0}'.format(i['name']) except Exception: pass for i in self.list: u = {'action': 'download', 'url': i['url'], 'source': i['source']} u = '{0}?{1}'.format(sysaddon, urlencode(u)) item = control.item(label='Greek', label2=i['name'], iconImage=str(i['rating']), thumbnailImage='el') item.setProperty('sync', 'false') item.setProperty('hearing_imp', 'false') control.addItem(handle=syshandle, url=u, listitem=item, isFolder=False) control.directory(syshandle)