def _run(self): debug('Running in thread') app = Bottle() app.route('/test', callback=self.http.test) app.route('/stat', callback=self.http.stat) app.route('/popen', method='POST', callback=self.http.do_popen) app.route('/can_bind', method='POST', callback=self.http.do_can_bind) app.route('/find_free_port', method='POST', callback=self.http.do_find_free_port) app.route('/poll', callback=self.http.poll) app.route('/wait', callback=self.http.wait) app.route('/terminate', callback=self.http.terminate) app.route('/kill', callback=self.http.kill) app.route('/close', callback=self.http.close) app.route('/close_me', callback=self.http.close_me) #debug('host: ' + s.remote_host) #debug('port: ' + s.remote_port) self.server = BottleServer(host=s.remote_host, port=s.remote_port) try: app.run(server=self.server) except Exception, e: print_tb(e)
def process_page(self, url): request = self._s.get(real_url(url)) self.soup = BeautifulSoup(clean_html(request.text), 'html.parser') debug(url) # item = {} # item['category'] = cat_a['href'] # item['a'] = topic_a # item['dl_link'] = dl_a['href'] # item['seeds'] = seeds_td.get_text() # self._items.append(item.copy()) tbody = self.soup.find('tbody', attrs={'id': 'highlighted'}) if tbody: for tr in tbody: try: from bs4 import NavigableString if isinstance(tr, NavigableString): continue item = {} TDs = tr.find_all('td', recursive=False) item['a'] = TDs[2].find('a')['href'] item['title'] = TDs[2].find('a').get_text().strip( ' \n\r\t') item['dl_link'] = item['a'].replace( 'details.php', 'download.php') item['seeds'] = TDs[4].get_text().strip(' \n\r\t') item['cat'] = TDs[0].find('a')['href'].split('cat=')[-1] self._items.append(item.copy()) except BaseException as e: log.print_tb(e)
def add_media_case(): if _addon.getSetting('role').decode('utf-8') == u'клиент': return path = filesystem.join(addon_data_path(), 'add_media') if filesystem.exists(path): try: with filesystem.fopen(path, 'r') as f: while True: try: title = f.readline().strip(' \n\t\r').decode('utf-8') imdb = f.readline().strip(' \n\t\r') log.debug('add_media_case: ' + imdb) log.debug(title) if title and imdb: call_bg('add_media_process', { 'title': title, 'imdb': imdb }) else: break except BaseException as e: log.print_tb(e) finally: filesystem.remove(path)
def process_page(self, url): request = self._s.get(url) self.soup = BeautifulSoup(clean_html(request.text), "html.parser") debug(url) # item = {} # item['category'] = cat_a['href'] # item['a'] = topic_a # item['dl_link'] = dl_a['href'] # item['seeds'] = seeds_td.get_text() # self._items.append(item.copy()) tbody = self.soup.find("tbody", attrs={"id": "highlighted"}) if tbody: for tr in tbody: try: from bs4 import NavigableString if isinstance(tr, NavigableString): continue item = {} TDs = tr.find_all("td", recursive=False) item["a"] = TDs[2].find("a")["href"] item["title"] = TDs[2].find("a").get_text().strip(" \n\r\t") item["dl_link"] = item["a"].replace("details.php", "download.php") item["seeds"] = TDs[4].get_text().strip(" \n\r\t") item["cat"] = TDs[0].find("a")["href"].split("cat=")[-1] self._items.append(item.copy()) except BaseException as e: log.print_tb(e)
def update_case(): # Init if not hasattr(update_case, 'first_start'): update_case.first_start = True update_case.first_start_time = time() update_case.prev_generate_time = update_case.first_start_time try: every = int(_addon.getSetting( 'service_generate_persistent_every')) * 3600 # seconds delay_startup = int(_addon.getSetting('delay_startup')) * 60 except ValueError: every = 8 * 3600 delay_startup = 0 # User action path = filesystem.join(addon_data_path(), 'start_generate') if filesystem.exists( path) and _addon.getSetting('role').decode('utf-8') != u'клиент': log.debug('User action!!!') filesystem.remove(path) call_bg('update_service', {'show_progress': True}) update_case.first_start = False return # Startup if time( ) > update_case.first_start_time + delay_startup and update_case.first_start: if _addon.getSetting('service_startup') == 'true': try: log.debug("Persistent Update Service starting...") log.debug(_addon.getSetting('service_startup')) #update_service(show_progress=False) call_bg('update_service', {'show_progress': False}) except BaseException as e: log.print_tb(e) finally: update_case.first_start = False # Persistent if time() >= update_case.prev_generate_time + every: # verification if _addon.getSetting('service_generate_persistent') == 'true': try: update_case.prev_generate_time = time() #update_service(show_progress=False) call_bg('update_service', {'show_progress': False}) log.debug('Update List at %s' % asctime(localtime(update_case.prev_generate_time))) log.debug('Next Update in %s' % strftime("%H:%M:%S", gmtime(every))) except BaseException as e: log.print_tb(e) finally: update_case.first_start = False
def do_find_free_port(self): try: from ..util import find_free_port host = '' debug('find_free_port: ') return str(find_free_port(host)) except BaseException as e: print_tb(e) return "0"
def downloaded(self): info = self.torrent_info if info is None: return 0 try: return str(round(info['downloaded'] * 100 / info['size'])) except BaseException as e: log.print_tb(e) return 0
def add_video(self, path, label, content): path = filesystem.join(path, '') try: self.add('video', path, label) except SourceAlreadyExists as e: log.print_tb(e) db = VideoDB() scan_recursive = bool(content is 'movies') db.update_path(path, content, scan_recursive, 0, 0)
def do_find_free_port(self): try: from ..util import find_free_port host = '' debug('find_free_port: ') return str(find_free_port(host, xrange(5001, 5100))) except BaseException as e: print_tb(e) return "0"
def save_make_chdir(new_path): current = getcwd() try: if not exists(new_path): makedirs(new_path) chdir(new_path) except BaseException as e: log.print_tb(e) raise MakeCHDirException(current) finally: return current
def do_can_bind(self): try: from ..util import can_bind host = '' port = request.forms.get('port') debug('can_bind: ' + port) return str(can_bind(host, int(port))) except BaseException as e: print_tb(e) return "False"
def get_addon_path(): try: import xbmcaddon _ADDON_NAME = 'script.media.aggregator' _addon = xbmcaddon.Addon(id=_ADDON_NAME) path = _addon.getAddonInfo('path').decode('utf-8') if path == 'Unavailable': raise Exception('Not in Kodi') return path except BaseException as e: log.print_tb(e) return filesystem.getcwd()
def update_case(): # Init if not hasattr(update_case, 'first_start'): update_case.first_start = True update_case.first_start_time = time() update_case.prev_generate_time = update_case.first_start_time try: every = int(_addon.getSetting('service_generate_persistent_every')) * 3600 # seconds delay_startup = int(_addon.getSetting('delay_startup')) * 60 except ValueError: every = 8 * 3600 delay_startup = 0 # User action path = filesystem.join(addon_data_path(), 'start_generate') if filesystem.exists(path) and _addon.getSetting('role').decode('utf-8') != u'клиент': log.debug('User action!!!') filesystem.remove(path) update_service(show_progress=True) update_case.first_start = False return # Startup if time() > update_case.first_start_time + delay_startup and update_case.first_start: if _addon.getSetting('service_startup') == 'true': try: log.debug("Persistent Update Service starting...") log.debug(_addon.getSetting('service_startup')) update_service(show_progress=False) except BaseException as e: log.print_tb(e) finally: update_case.first_start = False # Persistent if time() >= update_case.prev_generate_time + every: # verification if _addon.getSetting('service_generate_persistent') == 'true': try: update_case.prev_generate_time = time() update_service(show_progress=False) log.debug('Update List at %s' % asctime(localtime(update_case.prev_generate_time))) log.debug('Next Update in %s' % strftime("%H:%M:%S", gmtime(every))) except BaseException as e: log.print_tb(e) finally: update_case.first_start = False
def Name(name): try: return name.decode('utf-8') except UnicodeDecodeError: import chardet enc = chardet.detect(name) debug('UnicodeDecodeError detected', log.lineno()) # debug(enc['confidence']) # debug(enc['encoding']) if enc['confidence'] > 0.7: name = name.decode(enc['encoding']) debug(name) return name else: log.print_tb()
def download_torrent(url, path, settings): url = url.replace("details.php", "download.php") if not "passkey" in url: url += "&passkey=" + settings.hdclub_passkey try: import shutil response = urllib2.urlopen(url) with filesystem.fopen(path, "wb") as f: shutil.copyfileobj(response, f) return True except BaseException as e: print_tb(e) return False
def create_mark_file(): import urllib2, shutil path = filesystem.join(_addondir, 'version_latest') if not filesystem.exists(path): try: with filesystem.fopen(path, 'w') as f: f.write('test') if filesystem.exists(path): url = 'https://github.com/vadyur/script.media.aggregator/releases/download/ver_0.15.2/version_latest' response = urllib2.urlopen(url) log.debug(response.read()) except BaseException as e: log.print_tb(e) pass
def get_links_with_ranks(strmFilename, settings, use_scrape_info=False): #import vsdbg #vsdbg._bp() strmFilename_alt = strmFilename + '.alternative' items = [] saved_dict = {} if filesystem.isfile(strmFilename_alt): with filesystem.fopen(strmFilename_alt, "r") as alternative: curr_rank = 1 while True: line = alternative.readline() if not line: break line = line.decode('utf-8') if line.startswith('#'): line = line.lstrip('#') parts = line.split('=') if len(parts) > 1: saved_dict[parts[0]] = parts[1].strip(' \n\t\r') elif line.startswith('plugin://script.media.aggregator'): try: saved_dict['link'] = line.strip(u'\r\n\t ') if use_scrape_info: sp = seeds_peers(saved_dict) saved_dict = dict(saved_dict, **sp) if 'rank' in saved_dict: curr_rank = float(saved_dict['rank']) else: curr_rank = get_rank( saved_dict.get('full_title', ''), saved_dict, settings) except BaseException as e: import log log.print_tb(e) curr_rank = 1 item = { 'rank': curr_rank, 'link': line.strip(u'\r\n\t ') } items.append(dict(item, **saved_dict)) saved_dict.clear() items.sort(key=operator.itemgetter('rank')) #debug('Sorded items') #debug(items) return items
def get_post_index(self): try: if 'nnm-club' in self.url: return re.search(r'\.php.+?t=(\d+)', self.url).group(1) elif 'hdclub' in self.url: return re.search(r'\.php.+?id=(\d+)', self.url).group(1) elif 'anidub' in self.url: return re.search(r'/(\d+)-', self.url).group(1) elif 'rutor' in self.url: return re.search(r'torrent/(\d+)/', self.url).group(1) else: return None except BaseException as e: from log import debug, print_tb print_tb(e) return None
def update_service(show_progress=False): anidub_enable = _addon.getSetting('anidub_enable') == 'true' hdclub_enable = _addon.getSetting('hdclub_enable') == 'true' nnmclub_enable = _addon.getSetting('nnmclub_enable') == 'true' rutor_enable = _addon.getSetting('rutor_enable') == 'true' settings = player.load_settings() if show_progress: info_dialog = xbmcgui.DialogProgressBG() info_dialog.create('Media Aggregator') settings.progress_dialog = info_dialog if anidub_enable: anidub.run(settings) if hdclub_enable: hdclub.run(settings) if nnmclub_enable: addon = Addon('settings2.xml') try: settings.nnmclub_hours = int(math.ceil((time() - float(addon.getSetting('nnm_last_generate'))) / 3600.0)) except BaseException as e: settings.nnmclub_hours = 168 log.print_tb(e) if settings.nnmclub_hours > 168: settings.nnmclub_hours = 168 log.debug('NNM hours: ' + str(settings.nnmclub_hours)) addon.setSetting('nnm_last_generate', str(time())) nnmclub.run(settings) if rutor_enable: import rutor rutor.run(settings) if show_progress: info_dialog.update(0, '', '') info_dialog.close() if anidub_enable or hdclub_enable or nnmclub_enable or rutor_enable: if not xbmc.getCondVisibility('Library.IsScanningVideo'): xbmc.executebuiltin('UpdateLibrary("video")')
def download_torrent(url, path, settings): from base import save_hashes save_hashes(path) url = url.replace('details.php', 'download.php') if not 'passkey' in url: url += '&passkey=' + settings.hdclub_passkey try: import shutil response = urllib2.urlopen(real_url(url)) with filesystem.fopen(path, 'wb') as f: shutil.copyfileobj(response, f) save_hashes(path) return True except BaseException as e: print_tb(e) return False
def scrape_case(): # Init if not hasattr(scrape_case, 'prev_scrape_time'): try: scrape_nnm() log.debug('scrape_nnm at %s' % asctime()) except BaseException as e: log.print_tb(e) scrape_case.prev_scrape_time = time() scrape_every = 30 * 60 if time() >= scrape_case.prev_scrape_time + scrape_every: try: scrape_case.prev_scrape_time = time() scrape_nnm() log.debug('scrape_nnm at %s' % asctime()) except BaseException as e: log.print_tb(e)
def do_popen(self): try: debug('do_popen') args_str = request.forms.get('args') tdata = request.forms.get('torrent_data') dict_str = request.forms.get("dict") argv = ['args=' + args_str, 'torrent_data=' + tdata, 'dict=' + dict_str] engn = parse(argv, s) self.engines.append(engn) return str(engn.pid()) + '.' + str(engn.bind_port) except BaseException as e: print_tb(e) return "None"
def getInfoFromTitle(fulltitle): debug(fulltitle) parts = fulltitle.split('/') originaltitle = None season = None episode = None episode_name = None ozvuchka = None quality = None try: originaltitle = parts[0].strip() if originaltitle.startswith('['): originaltitle = originaltitle.split(']')[-1] originaltitle = originaltitle.strip() season_episode = parts[1].strip() import re m = re.match(r'.+?(\d+).+?(\d+)', season_episode) if m: season = int(m.group(1)) episode = int(m.group(2)) episode_name = parts[2].strip() detail = parts[3].strip() parts = detail.split(',') ozvuchka = parts[0].split(': ')[1] quality = parts[1].split(': ')[1] except BaseException as e: from log import print_tb print_tb(e) return { 'originaltitle': originaltitle, 'season': season, 'episode': episode, 'episode_name': episode_name, 'ozvuchka': ozvuchka, 'quality': quality }
def do_popen(self): try: debug('do_popen') args_str = request.forms.get('args') tdata = request.forms.get('torrent_data') dict_str = request.forms.get("dict") argv = [ 'args=' + args_str, 'torrent_data=' + tdata, 'dict=' + dict_str ] engn = parse(argv, s) self.engines.append(engn) return str(engn.pid()) + '.' + str(engn.bind_port) except BaseException as e: print_tb(e) return "None"
def videotype(self): base_path = self.settings.base_path().encode('utf-8') rel_path = urllib.unquote(self.params.get('path', '')) nfoFilename = urllib.unquote(self.params.get('nfo', '')) from nforeader import NFOReader nfoFullPath = NFOReader.make_path(base_path, rel_path, nfoFilename) if filesystem.exists(nfoFullPath): with filesystem.fopen(nfoFullPath, 'r') as nfo: s = nfo.read() if '<episodedetails>' in s: return 'episode' if '<movie>' in s: return 'movie' try: import xbmc return xbmc.getInfoLabel('ListItem.DBTYPE') except BaseException as e: log.print_tb(e) return ''
def get_by(imdb_id=None, kinopoisk_url=None, orig=None, year=None, imdbRaiting=None, settings=None): if not imdb_id: imdb_id = IDs.get_by_kp(kinopoisk_url) if kinopoisk_url else None if not imdb_id: try: _orig = orig _year = year if kinopoisk_url: kp = KinopoiskAPI(kinopoisk_url, settings) orig = kp.originaltitle() if not orig: orig = kp.title() year = kp.year() imdb_id = TMDB_API.imdb_by_tmdb_search( orig if orig else _orig, year if year else _year) except BaseException as e: from log import print_tb print_tb(e) if imdb_id and kinopoisk_url: IDs.set(imdb_id, kinopoisk_url) if imdb_id and imdb_id in MovieAPI.APIs: return MovieAPI.APIs[imdb_id], imdb_id elif kinopoisk_url and kinopoisk_url in MovieAPI.APIs: return MovieAPI.APIs[kinopoisk_url], imdb_id api = MovieAPI(imdb_id, kinopoisk_url, settings, orig, year) if imdb_id: MovieAPI.APIs[imdb_id] = api elif kinopoisk_url: MovieAPI.APIs[kinopoisk_url] = api return api, imdb_id
def get_post_index(self): try: if 'nnm-club' in self.url: return re.search(r'\.php.+?t=(\d+)', self.url).group(1) elif 'hdclub' in self.url: return re.search(r'\.php.+?id=(\d+)', self.url).group(1) elif 'bluebird' in self.url: return re.search(r'\.php.+?id=(\d+)', self.url).group(1) elif 'anidub' in self.url: return re.search(r'/(\d+)-', self.url).group(1) elif 'rutor' in self.url: return re.search(r'torrent/(\d+)/', self.url).group(1) elif 'soap4' in self.url: return re.search(r'/(\d+).torrent', self.url).group(1) else: return None except BaseException as e: from log import debug, print_tb print_tb(e) return None
def imdb_by_tmdb_search(orig, year): try: for res in TMDB_API.search(orig): r = res.json_data_ release_date = r.get('release_date') if year and release_date and year not in release_date: continue r_title = r.get('title') r_original_title = r.get('original_title') if orig and (orig == r_title or orig == r_original_title): return r['imdb_id'] except BaseException as e: from log import print_tb print_tb(e) return None
def scrape_case(): # Init if not hasattr(scrape_case, 'prev_scrape_time'): try: #scrape_nnm() call_bg('scrape_nnm') log.debug('scrape_nnm at %s' % asctime()) except BaseException as e: log.print_tb(e) scrape_case.prev_scrape_time = time() scrape_every = 30 * 60 if time() >= scrape_case.prev_scrape_time + scrape_every: try: scrape_case.prev_scrape_time = time() #scrape_nnm() call_bg('scrape_nnm') log.debug('scrape_nnm at %s' % asctime()) except BaseException as e: log.print_tb(e)
def scrape_nnm(): settings = player.load_settings() data_path = settings.torrents_path() hashes = [] for torr in filesystem.listdir(filesystem.join(data_path, 'nnmclub')): if torr.endswith('.torrent'): try: from base import TorrentPlayer tp = TorrentPlayer() tp.AddTorrent(filesystem.join(data_path, 'nnmclub', torr)) data = tp.GetLastTorrentData() if data: hashes.append((data['announce'], data['info_hash'], torr.replace('.torrent', '.stat'))) except BaseException as e: log.print_tb(e) for chunk in chunks(hashes, 32): import scraper try: seeds_peers = scraper.scrape(chunk[0][0], [i[1] for i in chunk]) except RuntimeError as RunE: if '414 status code returned' in RunE.message: for c in chunks(chunk, 16): try: seeds_peers = scraper.scrape(c[0][0], [i[1] for i in c]) process_chunk(c, data_path, seeds_peers) except BaseException as e: log.print_tb(e) continue except BaseException as e: log.print_tb(e) continue process_chunk(chunk, data_path, seeds_peers)
def get_uid(settings, session=None): if session is None: session = create_session(settings) try: page = session.get('http://nnm-club.me/') if page.status_code == requests.codes.ok: soup = BeautifulSoup(clean_html(page.text), 'html.parser') ''' a = soup.select_one('a[href*="profile.php"]') if a is None: return None ''' for a in soup.select('a.mainmenu'): m = re.search('profile.php.+?u=(\d+)', a['href']) if m: return m.group(1) else: debug('page.status_code: ' + str(page.status_code)) except BaseException as e: log.print_tb(e) pass return None
def add_media_case(): if _addon.getSetting('role').decode('utf-8') == u'клиент': return path = filesystem.join(addon_data_path(), 'add_media') if filesystem.exists(path): with filesystem.fopen(path, 'r') as f: while True: try: title = f.readline().strip(' \n\t\r').decode('utf-8') imdb = f.readline().strip(' \n\t\r') log.debug('add_media_case: ' + imdb) log.debug(title) if title and imdb: add_media_process(title, imdb, player.load_settings()) else: break except BaseException as e: log.print_tb(e) filesystem.remove(path)
def get_uid(settings, session=None): if session is None: session = create_session(settings) try: page = session.get(real_url('http://nnm-club.me/')) if page.status_code == requests.codes.ok: soup = BeautifulSoup(clean_html(page.text), 'html.parser') ''' a = soup.select_one('a[href*="profile.php"]') if a is None: return None ''' for a in soup.select('a.mainmenu'): m = re.search('profile.php.+?u=(\d+)', a['href']) if m: return m.group(1) else: debug('page.status_code: ' + str(page.status_code)) except BaseException as e: log.print_tb(e) pass return None
def Name(name): try: return name.decode('utf-8') except UnicodeDecodeError: try: import chardet enc = chardet.detect(name) log.debug('confidence: {0}'.format(enc['confidence'])) log.debug('encoding: {0}'.format(enc['encoding'])) if enc['confidence'] > 0.5: try: name = name.decode(enc['encoding']) except UnicodeDecodeError: pass else: import vsdbg #vsdbg._bp() log.print_tb() except BaseException as e: import vsdbg #vsdbg._bp() log.print_tb() return name
def download_torrent(url, path, settings): if not settings.bluebird_passkey: settings.bluebird_passkey = get_passkey(settings) if not settings.bluebird_passkey: return False from base import save_hashes save_hashes(path) url = url.replace('details.php', 'download.php') if not 'passkey' in url: url += '&passkey=' + settings.bluebird_passkey try: response = urllib2.urlopen(real_url(url)) data = response.read() if not data.startswith('d8:'): return False with filesystem.fopen(path, 'wb') as f: f.write(data) save_hashes(path) return True except BaseException as e: print_tb(e) return False
def get_post_index(self): try: if 'nnm-club' in self.url: return re.search(r'\.php.+?t=(\d+)', self.url).group(1) elif 'hdclub' in self.url: return re.search(r'\.php.+?id=(\d+)', self.url).group(1) elif 'bluebird' in self.url: return re.search(r'\.php.+?id=(\d+)', self.url).group(1) elif 'anidub' in self.url: return re.search(r'/(\d+)-', self.url).group(1) elif 'rutor' in self.url: return re.search(r'torrent/(\d+)/', self.url).group(1) elif 'soap4' in self.url: return re.search(r'/(\d+).torrent', self.url).group(1) elif 'kinohd' in self.url: # http://kinohd.net/1080p/8279-tohya-928pot886b-bcex-itonya-2017.html part = self.url.split('/')[-1] return re.search(r'^(\d+)', part).group(1) else: return None except BaseException as e: from log import debug, print_tb print_tb(e) return None
def scrape_nnm(): from player import load_settings settings = load_settings() data_path = settings.torrents_path() if not filesystem.exists(filesystem.join(data_path, 'nnmclub')): return hashes = [] for torr in filesystem.listdir(filesystem.join(data_path, 'nnmclub')): if torr.endswith('.torrent'): try: from base import TorrentPlayer tp = TorrentPlayer() tp.AddTorrent(filesystem.join(data_path, 'nnmclub', torr)) data = tp.GetLastTorrentData() if data: hashes.append((data['announce'], data['info_hash'], torr.replace('.torrent', '.stat'))) except BaseException as e: log.print_tb(e) for chunk in chunks(hashes, 32): import scraper try: seeds_peers = scraper.scrape(chunk[0][0], [i[1] for i in chunk], 10) except RuntimeError as RunE: if '414 status code returned' in RunE.message: for c in chunks(chunk, 16): try: seeds_peers = scraper.scrape(c[0][0], [i[1] for i in c], 10) process_chunk(c, data_path, seeds_peers) except BaseException as e: log.print_tb(e) continue except BaseException as e: log.print_tb(e) continue process_chunk(chunk, data_path, seeds_peers)
pass # ------------------------------------------------------------------------------------------------------------------- # def create_mark_file(): import urllib2, shutil path = filesystem.join(_addondir, 'version_latest') if not filesystem.exists(path): try: with filesystem.fopen(path, 'w') as f: f.write('test') if filesystem.exists(path): url = 'https://github.com/vadyur/script.media.aggregator/releases/download/ver_0.15.2/version_latest' response = urllib2.urlopen(url) log.debug(response.read()) except BaseException as e: log.print_tb(e) pass # ------------------------------------------------------------------------------------------------------------------- # if __name__ == '__main__': try: create_mark_file() save_dbs() except BaseException as e: log.print_tb(e) pass main()
def kill(self): try: return self.engine().process.kill() except BaseException as e: print_tb(e) return "Fail"
def parse_description(self, html_text): from HTMLParser import HTMLParseError html_text = clean_html(html_text) try: self.soup = BeautifulSoup(html_text, 'html.parser') except HTMLParseError as e: log.print_tb(e) log.debug(html_text) return False tag = u'' for b in self.soup.select('#details b'): try: text = b.get_text() tag = self.get_tag(text) if tag == 'plot': plot = base.striphtml( unicode(b.next_sibling.next_sibling).strip()) if plot: self._dict[tag] = plot debug('%s (%s): %s' % (text.encode('utf-8'), tag.encode('utf-8'), self._dict[tag].encode('utf-8'))) elif tag == 'genre': genres = [] elements = b.findNextSiblings('a') for a in elements: if '/tag/' in a['href']: genres.append(a.get_text()) self._dict[tag] = u', '.join(genres) elif tag != '': self._dict[tag] = base.striphtml( unicode(b.next_sibling).strip()) debug('%s (%s): %s' % (text.encode('utf-8'), tag.encode('utf-8'), self._dict[tag].encode('utf-8'))) except: pass tags = [] for tag in [ u'title', u'year', u'genre', u'director', u'actor', u'plot' ]: if tag not in self._dict: tags.append(tag) if tags: try: details = self.soup.select_one('#details').get_text() lines = details.split('\n') for l in lines: if ':' in l: key, desc = l.split(':', 1) key = key.strip(u' \r\n\t✦═') desc = desc.strip(u' \r\n\t') tag = self.get_tag(key + ':') if tag and desc and tag not in self._dict: self._dict[tag] = desc except BaseException as e: debug('No parse #details') debug(e) pass if 'genre' in self._dict: self._dict['genre'] = self._dict['genre'].lower().replace('.', '') if 'video' in self._dict: self._dict['video'] = self._dict['video'].replace('|', ',') if self.settings.rutor_nosd: video = self._dict['video'] parts = video.split(',') for part in parts: part = part.strip() if 'XviD' in part: return False m = re.search(ur'(\d+)[xXхХ](\d+)', part) if m: w = int(m.group(1)) #h = int(m.group(2)) if w < 1280: return False else: pass count_id = 0 for a in self.soup.select('a[href*="www.imdb.com/title/"]'): try: href = a['href'] components = href.split('/') if components[2] == u'www.imdb.com' and components[ 3] == u'title': self._dict['imdb_id'] = components[4] count_id += 1 except: pass if count_id == 0: div_index = self.soup.select('#index') if div_index: for a in div_index[0].findAll('a', recursive=True): if '/torrent/' in a['href']: parts = a['href'].split('/') href = parts[0] + '/' + parts[1] + '/' + parts[2] html = urllib2.urlopen(real_url(href, self.settings)) soup = BeautifulSoup(clean_html(html.read()), 'html.parser') for a in soup.select('a[href*="www.imdb.com/title/"]'): try: href = a['href'] components = href.split('/') if components[ 2] == u'www.imdb.com' and components[ 3] == u'title': self._dict['imdb_id'] = components[4] count_id += 1 except: pass if 'imdb_id' in self._dict: break if count_id > 1: return False if 'imdb_id' not in self._dict: if not hasattr(self.settings, 'no_skip_by_imdb'): return False for det in self.soup.select('#details'): tr = det.find('tr', recursive=False) if tr: tds = tr.findAll('td', recursive=False) if len(tds) > 1: td = tds[1] img = td.find('img') try: self._dict['thumbnail'] = img['src'] debug('!!!!!!!!!!!!!!thumbnail: ' + self._dict['thumbnail']) break except: pass for kp_id in self.soup.select('a[href*="www.kinopoisk.ru/"]'): self._dict['kp_id'] = kp_id['href'] self.make_movie_api(self.get_value('imdb_id'), self.get_value('kp_id'), self.settings) return True
def _AddTorrent(self, path): if filesystem.exists(path): if path.startswith(r'\\') or '://' in path: tempPath = xbmc.translatePath('special://temp').decode('utf-8') destPath = filesystem.join(tempPath, 't2h.torrent') filesystem.copyfile(path, destPath) path = destPath uri = path2url(path) else: uri = path self.debug('AddTorrent: ' + uri) add_trackers = [] if getSetting('add_tracker'): add_trackers.append(getSetting('add_tracker')) download_path = self.settings.storage_path if download_path == '': download_path = xbmc.translatePath('special://temp') self.debug('download_path: %s' % download_path) self.download_path = download_path encryption = self.debug_assignment( Encryption.ENABLED if getSetting('encryption') == 'true' else Encryption.DISABLED, 'encryption') upload_limit = self.debug_assignment( int(getSetting("upload_limit")) * 1024 if getSetting("upload_limit") != "" else 0, "upload_limit") download_limit = self.debug_assignment( int(getSetting("download_limit")) * 1024 if getSetting("download_limit") != "" else 0, "download_limit") if getSetting("connections_limit") not in ["", 0, "0"]: connections_limit = self.debug_assignment( int(getSetting("connections_limit")), "connections_limit") else: connections_limit = None use_random_port = self.debug_assignment( True if getSetting('use_random_port') == 'true' else False, 'use_random_port') listen_port = self.debug_assignment( int(getSetting("listen_port")) if getSetting("listen_port") != "" else 62881, "listen_port") if listen_port == 6881: use_random_port = True keep_files = getSetting('action_files').decode('utf-8') != u'удалить' args = { 'uri': uri, 'download_path': download_path, 'user_agent': user_agent, 'encryption': encryption, 'upload_kbps': upload_limit, 'download_kbps': download_limit, 'connections_limit': connections_limit, 'keep_incomplete': False, 'keep_complete': keep_files, 'keep_files': keep_files, 'dht_routers': dht_routers, 'use_random_port': use_random_port, 'listen_port': listen_port, 'log_files_progress': True, 'trackers': add_trackers, 'startup_timeout': 1000 } try: if keep_files: args['resume_file'] = filesystem.join( self.settings.torrents_path(), self.info_hash + '.resume') except BaseException as e: log.print_tb(e) if keep_files: args['resume_file'] = filesystem.join( download_path, self.info_hash + '.resume') if args.get('resume_file'): self.debug('resume file is: ' + args['resume_file']) self.engine = Engine(**args)
def parse_description(self, html_text): from HTMLParser import HTMLParseError html_text = clean_html(html_text) try: self.soup = BeautifulSoup(html_text, 'html.parser') except HTMLParseError as e: log.print_tb(e) log.debug(html_text) return False tag = u'' for b in self.soup.select('#details b'): try: text = b.get_text() tag = self.get_tag(text) if tag == 'plot': self._dict[tag] = base.striphtml(unicode(b.next_sibling.next_sibling).strip()) debug('%s (%s): %s' % (text.encode('utf-8'), tag.encode('utf-8'), self._dict[tag].encode('utf-8'))) elif tag == 'genre': genres = [] elements = b.findNextSiblings('a') for a in elements: if '/tag/' in a['href']: genres.append(a.get_text()) self._dict[tag] = u', '.join(genres) elif tag != '': self._dict[tag] = base.striphtml(unicode(b.next_sibling).strip()) debug('%s (%s): %s' % (text.encode('utf-8'), tag.encode('utf-8'), self._dict[tag].encode('utf-8'))) except: pass if 'genre' in self._dict: self._dict['genre'] = self._dict['genre'].lower().replace('.', '') for tag in [u'title', u'year', u'genre', u'director', u'actor', u'plot']: if tag not in self._dict: return False count_id = 0 for a in self.soup.select('a[href*="www.imdb.com/title/"]'): try: href = a['href'] components = href.split('/') if components[2] == u'www.imdb.com' and components[3] == u'title': self._dict['imdb_id'] = components[4] count_id += 1 except: pass if count_id == 0: div_index = self.soup.select('#index') if div_index: for a in div_index[0].findAll('a', recursive=True): if '/torrent/' in a['href']: parts = a['href'].split('/') href = parts[0] + '/' + parts[1] + '/' + parts[2] html = urllib2.urlopen(real_url(href, settings)) soup = BeautifulSoup(clean_html(html.read()), 'html.parser') for a in soup.select('a[href*="www.imdb.com/title/"]'): try: href = a['href'] components = href.split('/') if components[2] == u'www.imdb.com' and components[3] == u'title': self._dict['imdb_id'] = components[4] count_id += 1 except: pass if 'imdb_id' in self._dict: break if count_id > 1: return False if 'imdb_id' not in self._dict: return False for det in self.soup.select('#details'): tr = det.find('tr', recursive=False) if tr: tds = tr.findAll('td', recursive=False) if len(tds) > 1: td = tds[1] img = td.find('img') try: self._dict['thumbnail'] = img['src'] debug('!!!!!!!!!!!!!!thumbnail: ' + self._dict['thumbnail']) break except: pass if self.settings: if self.settings.use_kinopoisk: for kp_id in self.soup.select('a[href*="www.kinopoisk.ru/"]'): self._dict['kp_id'] = kp_id['href'] self.make_movie_api(self.get_value('imdb_id'), self.get_value('kp_id')) return True
def play_torrent_variant(path, info_dialog, episodeNumber, nfoReader, settings, params, downloader): import filecmp def _debug(msg): debug(u'play_torrent_variant: {}'.format(msg) ) play_torrent_variant. resultOK = 'OK' play_torrent_variant. resultCancel = 'Cancel' play_torrent_variant. resultTryNext = 'TryNext' play_torrent_variant. resultTryAgain = 'TryAgain' start_time = time.time() start_play_max_time = int(_addon.getSetting( 'start_play_max_time')) # default 60 seconds search_seed_max_time = int(_addon.getSetting('search_seed_max_time')) # default 15 seconds if episodeNumber != None: episodeNumber = int(episodeNumber) if settings == None: return play_torrent_variant.resultCancel if downloader: try: downloader.start(True) except: print_tb() torrent_info = None torrent_path = path from torrent2http import Error as TPError try: if settings.torrent_player == 'YATP': from yatpplayer import YATPPlayer player = YATPPlayer() elif settings.torrent_player == 'torrent2http': from torrent2httpplayer import Torrent2HTTPPlayer player = Torrent2HTTPPlayer(settings) elif settings.torrent_player == 'Ace Stream': import aceplayer player = aceplayer.AcePlayer(settings) elif settings.torrent_player == 'Elementum': import elementumplayer player = elementumplayer.ElementumPlayer() _debug('------------ Open torrent: ' + path) player.AddTorrent(path) added = False for i in range(start_play_max_time): if player.CheckTorrentAdded(): added = True break if xbmc.abortRequested: return play_torrent_variant.resultCancel info_dialog.update(i, u'Проверяем файлы', ' ', ' ') if downloader and downloader.is_finished(): #if not filecmp.cmp(path, downloader.get_filename()): if downloader.info_hash() and downloader.info_hash() != player.info_hash: downloader.move_file_to(path) _debug('play_torrent_variant.resultTryAgain') return play_torrent_variant.resultTryAgain else: _debug('Torrents are equal') downloader = None xbmc.sleep(1000) if not added: _debug('Torrent not added') return play_torrent_variant.resultTryNext files = player.GetLastTorrentData()['files'] _debug(files) if 'cutName' not in params: if 'index' not in params: if episodeNumber is not None: files.sort(key=operator.itemgetter('name')) else: files.sort(key=operator.itemgetter('size'), reverse=True) _debug('sorted_files:') _debug(files) try: if 'cutName' not in params: if 'index' not in params: if episodeNumber is None: index = 0 playable_item = files[0] else: playable_item = files[episodeNumber] index = playable_item.get('index') else: index = -1 for item in files: if int(params['index']) == item['index']: playable_item = item index = playable_item.get('index') else: cutName = urllib.unquote(params['cutName']).decode('utf-8').lower() index = -1 for item in files: name = item['name'].lower() from tvshowapi import cutStr if cutName in unicode(cutStr(name)): playable_item = item index = playable_item.get('index') break if index == -1: return play_torrent_variant.resultTryNext except IndexError: for i in range(10): if downloader and downloader.is_finished(): #if not filecmp.cmp(path, downloader.get_filename()): if downloader.info_hash() and downloader.info_hash() != player.info_hash: downloader.move_file_to(path) print 'play_torrent_variant.resultTryAgain' return play_torrent_variant.resultTryAgain xbmc.sleep(1000) _debug(playable_item) player.StartBufferFile(index) if not player.CheckTorrentAdded(): info_dialog.update(0, 'Media Aggregator: проверка файлов') while not info_dialog.iscanceled() and not player.CheckTorrentAdded(): xbmc.sleep(1000) start_time = time.time() player.updateCheckingProgress(info_dialog) info_dialog.update(0, 'Media Aggregator: буфферизация') while not info_dialog.iscanceled(): if player.CheckBufferComplete(): break percent = player.GetBufferingProgress() if percent >= 0: player.updateDialogInfo(percent, info_dialog) if time.time() > start_time + start_play_max_time: return play_torrent_variant.resultTryNext if time.time() > start_time + search_seed_max_time: info = player.GetTorrentInfo() if 'num_seeds' in info: if info['num_seeds'] == 0: _debug('Seeds not found') return play_torrent_variant.resultTryNext if downloader and downloader.is_finished(): #if not filecmp.cmp(path, downloader.get_filename()): if downloader.info_hash() and downloader.info_hash() != player.info_hash: downloader.move_file_to(path) _debug('play_torrent_variant.resultTryAgain') return play_torrent_variant.resultTryAgain else: _debug('Torrents are equal') downloader = None xbmc.sleep(1000) canceled = info_dialog.iscanceled() info_dialog.update(0) info_dialog.close() if canceled: return play_torrent_variant.resultCancel playable_url = player.GetStreamURL(playable_item) _debug(playable_url) handle = int(sys.argv[1]) if nfoReader != None: list_item = nfoReader.make_list_item(playable_url) else: list_item = xbmcgui.ListItem(path=playable_url) _debug('ListItem created') rel_path = urllib.unquote(params['path']).decode('utf-8') filename = urllib.unquote(params['nfo']).decode('utf-8') from kodidb import KodiDB k_db = KodiDB(filename.replace(u'.nfo', u'.strm'), \ rel_path, sys.argv[0] + sys.argv[2]) k_db.PlayerPreProccessing() _debug('VideoDB PreProccessing: OK') class OurPlayer(xbmc.Player): def __init__(self): xbmc.Player.__init__(self) self.show_overlay = False self.fs_video = xbmcgui.Window(12005) x = 20 y = int(getSetting('dnl_progress_offset', 120)) w = self.fs_video.getWidth() h = 100 self.info_label = xbmcgui.ControlLabel(x, y, w, h, '', textColor='0xFF00EE00', font='font16') self.info_label_bg = xbmcgui.ControlLabel(x+2, y+2, w, h, '', textColor='0xAA000000', font='font16') def _show_progress(self): if settings.torrent_player == 'Ace Stream': return if settings.torrent_player == 'Elementum': return if getSetting('show_dnl_progress', 'true') != 'true': return if not self.show_overlay: self.fs_video.addControls([self.info_label_bg, self.info_label]) self.show_overlay = True def _hide_progress(self): if self.show_overlay: self.fs_video.removeControls([self.info_label_bg, self.info_label]) self.show_overlay = False def UpdateProgress(self): #debug('UpdateProgress') if self.show_overlay: info = player.GetTorrentInfo() #debug(info) percent = float(info['downloaded']) * 100 / info['size']; #debug(percent) if percent >= 0: heading = u"{} МB из {} МB - {}".format(info['downloaded'], info['size'], int(percent)) + r'%' + '\n' if percent < 100: heading += u"Скорость загрузки: {} KB/сек\n".format(info['dl_speed']) heading += u"Сиды: {} Пиры: {}".format(info['num_seeds'], info['num_peers']) #debug(heading) self.info_label.setLabel(heading) self.info_label_bg.setLabel(heading) def __del__(self): self._hide_progress() def onPlayBackPaused(self): self._show_progress() def onPlayBackResumed(self): self._hide_progress() def onPlayBackEnded(self): self._hide_progress() def onPlayBackStopped(self): self._hide_progress() xbmc_player = OurPlayer() _debug('OurPlayer creaded') xbmcplugin.setResolvedUrl(handle, True, list_item) _debug('setResolvedUrl') while not xbmc_player.isPlaying(): xbmc.sleep(300) _debug('!!!!!!!!!!!!!!!!! Start PLAYING !!!!!!!!!!!!!!!!!!!!!') if k_db.timeOffset != 0: _debug("Seek to time: " + str(k_db.timeOffset)) xbmc.sleep(2000) xbmc_player.seekTime(int(k_db.timeOffset)) # Wait until playing finished or abort requested while not xbmc.abortRequested and xbmc_player.isPlaying(): player.loop() xbmc.sleep(1000) xbmc_player.UpdateProgress() _debug('!!!!!!!!!!!!!!!!! END PLAYING !!!!!!!!!!!!!!!!!!!!!') xbmc.sleep(1000) k_db.PlayerPostProccessing() torrent_info = player.GetTorrentInfo() torrent_path = player.path info_hash = player.GetLastTorrentData()['info_hash'] xbmc.executebuiltin('Container.Refresh') UpdateLibrary_path = filesystem.join(settings.base_path(), rel_path).encode('utf-8') _debug(UpdateLibrary_path) if not xbmc.getCondVisibility('Library.IsScanningVideo'): xbmc.executebuiltin('UpdateLibrary("video", "%s", "false")' % UpdateLibrary_path) except TPError as e: _debug(e) print_tb() return play_torrent_variant.resultTryNext except BaseException as e: _debug(e) print_tb() return play_torrent_variant.resultTryNext finally: _debug('FINALLY') player.close() if settings.run_script or settings.remove_files or settings.move_video or settings.copy_torrent: import afteractions afteractions.Runner(settings, params, playable_item, torrent_info, torrent_path, info_hash) return play_torrent_variant.resultOK
def terminate(self): try: return self.engine().process.terminate() except BaseException as e: print_tb(e) return "Fail"
def poll(self): try: return str(self.engine().process.poll()) except BaseException as e: print_tb(e) return "None"
def update_service(show_progress=False): import anidub, hdclub, nnmclub, rutor, soap4me, bluebird, kinohd from player import _addon anidub_enable = _addon.getSetting('anidub_enable') == 'true' hdclub_enable = False bluebird_enable = _addon.getSetting('bluebird_enable') == 'true' nnmclub_enable = _addon.getSetting('nnmclub_enable') == 'true' rutor_enable = _addon.getSetting('rutor_enable') == 'true' soap4me_enable = _addon.getSetting('soap4me_enable') == 'true' kinohd_enable = _addon.getSetting('kinohd_enable') == 'true' from player import load_settings settings = load_settings() import time from_time = time.time() if show_progress: import xbmcgui info_dialog = xbmcgui.DialogProgressBG() info_dialog.create(settings.addon_name) settings.progress_dialog = info_dialog from log import dump_context if anidub_enable: with dump_context('anidub.run'): anidub.run(settings) #if hdclub_enable: # hdclub.run(settings) if bluebird_enable: with dump_context('bluebird.run'): bluebird.run(settings) if rutor_enable: with dump_context('rutor.run'): rutor.run(settings) if kinohd_enable: with dump_context('kinohd.run'): kinohd.run(settings) if nnmclub_enable: from service import Addon addon = Addon('settings3.xml') try: import math from time import time settings.nnmclub_hours = int( math.ceil( (time() - float(addon.getSetting('nnm_last_generate'))) / 3600.0)) except BaseException as e: settings.nnmclub_hours = 168 log.print_tb(e) if settings.nnmclub_hours > 168: settings.nnmclub_hours = 168 if settings.nnmclub_hours < 8: settings.nnmclub_hours = 8 log.debug('NNM hours: ' + str(settings.nnmclub_hours)) addon.setSetting('nnm_last_generate', str(time())) with dump_context('nnmclub.run'): nnmclub.run(settings) #if soap4me_enable: # import soap4me # soap4me.run(settings) if show_progress: info_dialog.update(0, '', '') info_dialog.close() if anidub_enable or nnmclub_enable or rutor_enable or soap4me_enable or bluebird_enable or kinohd_enable: import xbmc if not xbmc.getCondVisibility('Library.IsScanningVideo'): xbmc.executebuiltin('UpdateLibrary("video")') recheck_torrent_if_need(from_time, settings)
def wait(self): try: return str(self.engine().process.wait()) except BaseException as e: print_tb(e) return "0"