def cookie(self): try: login = '******' token = client.request(login) token = client.parseDOM(token, 'input', ret='value', attrs={'name': 'csrfmiddlewaretoken'})[0] headers = {'Cookie': 'csrftoken={0}'.format(token)} post = { 'username': self.user, 'password': self.password, 'csrfmiddlewaretoken': token, 'next': '' } post = urlencode(post) c = client.request(login, post=post, headers=headers, output='cookie') return c except Exception as e: log.log('Xsubs.tv failed at cookie function, reason: ' + str(e)) return
def download(self, path, url): try: cookie = None anonymous = (self.user == '' or self.password == '') code, result = client.request(url, output='response', error=True) if code == '429' and anonymous is True: control.dialog.ok(str('xsubs.tv'), str(result), str('')) return elif anonymous is False: cookie = cache.get(self.cookie, 12) result, headers, content, cookie = client.request( url, cookie=cookie, output='extended') subtitle = content['Content-Disposition'] subtitle = re.findall('"(.+?)"', subtitle)[0] try: subtitle = subtitle.decode('utf-8') except Exception: pass subtitle = control.join(path, subtitle) if not subtitle.endswith('.srt'): raise Exception() with open(subtitle, 'wb') as subFile: subFile.write(result) fileparts = os_split(subtitle)[1].split('.') result = control.join( os_split(subtitle)[0], 'subtitles.' + fileparts[len(fileparts) - 1]) rename(subtitle, result) return result except Exception as e: log.log( 'Xsubstv subtitle download failed for the following reason: ' + str(e)) return
def cache(self, url): try: result = client.request(url) result = re.sub(r'[^\x00-\x7F]+', ' ', result) result = zip(client.parseDOM(result, 'series', ret='srsid'), client.parseDOM(result, 'series')) result = [(i[0], cleantitle.get(i[1])) for i in result] return result except Exception as e: log.log('Xsubs.tv failed at cache function, reason: ' + str(e)) return
def cache(self, i): try: self.r = client.request(i) self.r = re.sub(r'[^\x00-\x7F]+', ' ', self.r) t = re.findall( r'(?:\"|\')original_title(?:\"|\')\s*:\s*(?:\"|\')(.+?)(?:\"|\')', self.r)[0] y = re.findall(r'(?:\"|\')year(?:\"|\')\s*:\s*(?:\"|\'|)(\d{4})', self.r)[0] return t, y except Exception as e: log.log('Subzxyz failed at cache function, reason: ' + str(e)) return
def get(self, query): try: try: match = re.findall( r'(.+?) (?!\d{4})S?(\d{1,2}) ?X?E?(\d{1,2})$', query, flags=re.IGNORECASE)[0] except Exception: match = None if not match: match = re.findall(r'(.+?) *?\(?(\d{4})?\)?$', query)[0] if len(match[1]) == 4: title, year = match[0], match[1] else: title = match[0] query = ' '.join( unquote_plus(re.sub('%\w\w', ' ', quote_plus(title))).split()) url = 'https://subz.xyz/search?q={0}'.format(quote_plus(query)) result = client.request(url) result = re.sub(r'[^\x00-\x7F]+', ' ', result) url = client.parseDOM(result, 'section', attrs={'class': 'movies'})[0] url = re.findall('(/movies/\d+)', url) url = [x for y, x in enumerate(url) if x not in url[:y]] url = [urljoin('https://subz.xyz', i) for i in url] url = url[:20][::-1] for i in url: c = cache.get(self.cache, 2200, i) if c is not None: if len(match[1]) == 4: year_check = c[1] == year else: year_check = True if cleantitle.get( c[0]) == cleantitle.get(title) and year_check: try: item = self.r except Exception: item = client.request(i) break else: self.data.append(self.r) else: title, season, episode = match season, episode = '{0}'.format(season), '{0}'.format(episode) query = ' '.join( unquote_plus(re.sub('%\w\w', ' ', quote_plus(title))).split()) url = 'https://subz.xyz/search?q={0}'.format(quote_plus(query)) result = client.request(url) result = re.sub(r'[^\x00-\x7F]+', ' ', result) url = client.parseDOM(result, 'section', attrs={'class': 'tvshows'})[0] url = re.findall('(/series/\d+)', url) url = [x for y, x in enumerate(url) if x not in url[:y]] url = [urljoin('https://subz.xyz', i) for i in url] url = url[:20][::-1] for i in url: c = cache.get(self.cache, 2200, i) if c is not None: if cleantitle.get(c[0]) == cleantitle.get(title): item = i break item = '{0}/seasons/{1}/episodes/{2}'.format( item, season, episode) item = client.request(item) if self.data: item = '\n\n'.join(self.data) item = re.sub(r'[^\x00-\x7F]+', ' ', item) items = client.parseDOM(item, 'tr', attrs={'data-id': '.+?'}) except Exception as e: log.log('Subzxyz failed at get function, reason: ' + str(e)) return for item in items: try: r = client.parseDOM(item, 'td', attrs={'class': '.+?'})[-1] url = client.parseDOM(r, 'a', ret='href')[0] url = client.replaceHTMLCodes(url) url = url.replace("'", "").encode('utf-8') name = url.split('/')[-1].strip() name = re.sub('\s\s+', ' ', name) name = name.replace('_', '').replace('%20', '.') name = client.replaceHTMLCodes(name) name = name.encode('utf-8') self.list.append({ 'name': name, 'url': url, 'source': 'subzxyz', 'rating': 5 }) except Exception as e: log.log( 'Subzxyz failed at self.list formation function, reason: ' + str(e)) return return self.list
def download(path, url): try: result = client.request(url) f = control.join(path, os_split(url)[1]) with open(f, 'wb') as subFile: subFile.write(result) dirs, files = control.listDir(path) if len(files) == 0: return if not f.lower().endswith('.rar'): try: zipped = zipfile.ZipFile(f) zipped.extractall(path) except Exception: control.execute('Extract("{0}","{0}")'.format(f, path)) if f.lower().endswith('.rar'): if control.infoLabel('System.Platform.Windows'): uri = "rar://{0}/".format(quote(f)) else: uri = "rar://{0}/".format(quote_plus(f)) dirs, files = control.listDir(uri) else: dirs, files = control.listDir(path) if dirs and f.lower().endswith('.rar'): for dir in dirs: _dirs, _files = control.listDir(control.join(uri, dir)) [files.append(control.join(dir, i)) for i in _files] if _dirs: for _dir in _dirs: _dir = control.join(_dir, dir) __dirs, __files = control.listDir( control.join(uri, _dir)) [ files.append(control.join(_dir, i)) for i in __files ] filenames = [i for i in files if i.endswith(('.srt', '.sub'))] if len(filenames) == 1: filename = filenames[0] else: filename = multichoice(filenames) try: filename = filename.decode('utf-8') except Exception: pass if not control.exists(control.join( path, os_split(filename)[0])) and f.lower().endswith('.rar'): control.makeFiles(control.join(path, os_split(filename)[0])) subtitle = control.join(path, filename) if f.lower().endswith('.rar'): content = openFile(uri + filename).read() with open(subtitle, 'wb') as subFile: subFile.write(content) fileparts = os_split(subtitle)[1].split('.') result = control.join( os_split(subtitle)[0], 'subtitles.' + fileparts[len(fileparts) - 1]) rename(subtitle, result) return result except Exception as e: log.log( 'Subzxyz subtitle download failed for the following reason: ' + str(e)) return
def run(self, query=None): if 'Greek' not in str(langs).split(','): control.directory(syshandle) control.infoDialog(control.lang(32002)) return if not control.conditional_visibility( 'System.HasAddon(vfs.libarchive)') and float( control.addon('xbmc.addon').getAddonInfo('version') [:4]) >= 18.0: control.execute('InstallAddon(vfs.libarchive)') threads = [ workers.Thread(self.xsubstv), workers.Thread(self.subzxyz), workers.Thread(self.subtitlesgr) ] dup_removal = False if not query: if control.condVisibility('Player.HasVideo'): infolabel_prefix = 'VideoPlayer' else: infolabel_prefix = 'ListItem' title = control.infoLabel('{0}.Title'.format(infolabel_prefix)) if re.search(r'[^\x00-\x7F]+', title) is not None: title = control.infoLabel( '{0}.OriginalTitle'.format(infolabel_prefix)) year = control.infoLabel('{0}.Year'.format(infolabel_prefix)) tvshowtitle = control.infoLabel( '{0}.TVshowtitle'.format(infolabel_prefix)) season = control.infoLabel('{0}.Season'.format(infolabel_prefix)) if len(season) == 1: season = '0' + season episode = control.infoLabel('{0}.Episode'.format(infolabel_prefix)) if len(episode) == 1: episode = '0' + episode if 's' in episode.lower(): season, episode = '0', episode[-1:] if tvshowtitle != '': # episode title_query = '{0} {1}'.format(tvshowtitle, title) season_episode_query = '{0} S{1} E{2}'.format( tvshowtitle, season, episode) threads = [ workers.Thread(self.xsubstv, title_query), workers.Thread(self.subzxyz, title_query), workers.Thread(self.subtitlesgr, title_query), workers.Thread(self.xsubstv, season_episode_query), workers.Thread(self.subzxyz, season_episode_query), workers.Thread(self.subtitlesgr, season_episode_query) ] dup_removal = True log.log('Dual query used for subtitles search: ' + title_query + ' / ' + season_episode_query) elif year != '': # movie query = '{0} ({1})'.format(title, year) else: # file query, year = getCleanMovieTitle(title) if year != '': query = '{0} ({1})'.format(query, year) if not dup_removal: log.log('Query used for subtitles search: ' + query) self.query = query [i.start() for i in threads] for c, i in list(enumerate(range(0, 40))): is_alive = [x.is_alive() for x in threads] if all(x is False for x in is_alive): log.log('Reached count : ' + str(c)) break if control.aborted is True: log.log('Aborted, reached count : ' + str(c)) break control.sleep(750) if len(self.list) == 0: control.directory(syshandle) return f = [] # noinspection PyUnresolvedReferences f += [i for i in self.list if i['source'] == 'xsubstv'] f += [i for i in self.list if i['source'] == 'subzxyz'] f += [i for i in self.list if i['source'] == 'subtitlesgr'] self.list = f if dup_removal: self.list = [ dict(t) for t in {tuple(d.items()) for d in self.list} ] for i in self.list: try: if i['source'] == 'subzxyz': i['name'] = '[subzxyz] {0}'.format(i['name']) elif i['source'] == 'xsubstv': i['name'] = '[xsubstv] {0}'.format(i['name']) except Exception: pass for i in self.list: u = {'action': 'download', 'url': i['url'], 'source': i['source']} u = '{0}?{1}'.format(sysaddon, urlencode(u)) item = control.item(label='Greek', label2=i['name'], iconImage=str(i['rating']), thumbnailImage='el') item.setProperty('sync', 'false') item.setProperty('hearing_imp', 'false') control.addItem(handle=syshandle, url=u, listitem=item, isFolder=False) control.directory(syshandle)
def get(self, query): try: title, season, episode = re.findall('(.+?) S?(\d+) ?X?E?(\d+)$', query, flags=re.IGNORECASE)[0] season, episode = '{0}'.format(season), '{0}'.format(episode) title = re.sub('^THE\s+|^A\s+', '', title.strip().upper()) title = cleantitle.get(title) url = 'http://www.xsubs.tv/series/all.xml' srsid = cache.get(self.cache, 48, url) srsid = [i[0] for i in srsid if title == i[1]][0] url = 'http://www.xsubs.tv/series/{0}/main.xml'.format(srsid) result = client.request(url) ssnid = client.parseDOM(result, 'series_group', ret='ssnid', attrs={'ssnnum': season})[0] url = 'http://www.xsubs.tv/series/{0}/{1}.xml'.format(srsid, ssnid) result = client.request(url) items = client.parseDOM(result, 'subg') items = [(client.parseDOM(i, 'etitle', ret='number'), i) for i in items] items = [ i[1] for i in items if len(i[0]) > 0 and i[0][0] == episode ][0] items = re.findall('(<sr .+?</sr>)', items) except Exception as e: log.log('Xsubs.tv failed at get function, reason: ' + str(e)) return for item in items: try: p = client.parseDOM(item, 'sr', ret='published_on')[0] if p == '': raise Exception( 'Parsedom found no match, line 71 @ xsubztv.py') name = client.parseDOM(item, 'sr')[0] name = name.rsplit('<hits>', 1)[0] name = re.sub('</.+?><.+?>|<.+?>', ' ', name).strip() name = '{0} {1}'.format(query, name) name = client.replaceHTMLCodes(name) name = name.encode('utf-8') url = client.parseDOM(item, 'sr', ret='rlsid')[0] url = 'http://www.xsubs.tv/xthru/getsub/{0}'.format(url) url = client.replaceHTMLCodes(url) url = url.encode('utf-8') self.list.append({ 'name': name, 'url': url, 'source': 'xsubstv', 'rating': 5 }) except Exception as e: log.log( 'Xsubs.tv failed at self.list formation function, reason: ' + str(e)) return return self.list
def get(self, query): try: filtered = ['freeprojectx', 'subs4series', u'Εργαστήρι Υποτίτλων'] query = ' '.join( unquote_plus(re.sub('%\w\w', ' ', quote_plus(query))).split()) if control.setting('subtitlesgr') == 'true': url = 'http://www.subtitles.gr/search.php?name={0}'.format( quote_plus(query)) else: url = 'http://www.findsubtitles.eu/search.php?text={0}&lang=Greek&button=Search'.format( quote_plus(query)) url = client.request(url, output='geturl') result = client.request(url.replace('www', 'gr')) try: result = result.decode('utf-8', errors='replace') except AttributeError: pass items = client.parseDOM(result, 'tr', attrs={'on.+?': '.+?'}) except Exception as e: log.log('Subtitles.gr failed at get function, reason: ' + str(e)) return for item in items: try: if u'flags/el.gif' not in item: continue try: uploader = client.parseDOM(item, 'a', attrs={'class': 'link_from'})[0].strip() try: uploader = uploader.decode('utf-8') except AttributeError: pass if uploader == '': raise Exception if uploader in filtered: continue except Exception: uploader = 'other' try: downloads = client.parseDOM( item, 'td', attrs={'class': 'latest_downloads'})[0].strip() except: downloads = '0' downloads = re.sub('[^0-9]', '', downloads) name = client.parseDOM(item, 'a', attrs={'onclick': 'runme.+?'})[0] name = ' '.join(re.sub('<.+?>', '', name).split()) name = u'[{0}] {1} [{2} DLs]'.format(uploader, name, downloads) name = client.replaceHTMLCodes(name) # name = name.encode('utf-8') url = client.parseDOM(item, 'a', ret='href', attrs={'onclick': 'runme.+?'})[0] url = url.split('"')[0].split('\'')[0].split(' ')[0] url = client.replaceHTMLCodes(url) url = url.encode('utf-8') rating = self._rating(downloads) self.list.append({ 'name': name, 'url': url, 'source': 'subtitlesgr', 'rating': rating }) self.list.sort(key=lambda k: k['rating'], reverse=True) except Exception as e: log.log( 'Subtitles.gr failed at self.list formation function, reason: ' + str(e)) return return self.list