def process_dir(_d): for fn in filesystem.listdir(_d): full_name = filesystem.join(_d, fn) if fn.endswith('.hashes') and check_modify_time(full_name): hashes = get_hashes(full_name) if len(hashes) > 1: rehash_torrent(hashes, full_name.replace('.hashes', ''))
def scrape_nnm(): settings = player.load_settings() data_path = settings.torrents_path() hashes = [] for torr in filesystem.listdir(filesystem.join(data_path, 'nnmclub')): if torr.endswith('.torrent'): try: from base import TorrentPlayer tp = TorrentPlayer() tp.AddTorrent(filesystem.join(data_path, 'nnmclub', torr)) data = tp.GetLastTorrentData() if data: hashes.append((data['announce'], data['info_hash'], torr.replace('.torrent', '.stat'))) except BaseException as e: log.print_tb(e) for chunk in chunks(hashes, 32): import scraper try: seeds_peers = scraper.scrape(chunk[0][0], [i[1] for i in chunk]) except RuntimeError as RunE: if '414 status code returned' in RunE.message: for c in chunks(chunk, 16): try: seeds_peers = scraper.scrape(c[0][0], [i[1] for i in c]) process_chunk(c, data_path, seeds_peers) except BaseException as e: log.print_tb(e) continue except BaseException as e: log.print_tb(e) continue process_chunk(chunk, data_path, seeds_peers)
def recheck_torrent_if_need(from_time, settings): if settings.torrent_player != 'torrent2http': return def check_modify_time(fn): import time, filesystem mt = filesystem.getmtime(fn) if abs(from_time - mt) < 3600: return True return False def get_hashes(fn): with filesystem.fopen(fn, 'r') as hf: hashes = hf.readlines() return [h.strip('\r\n') for h in hashes] return [] def rehash_torrent(hashes, torrent_path): import time try: from torrent2httpplayer import Torrent2HTTPPlayer from torrent2http import State except ImportError: return player = Torrent2HTTPPlayer(settings) player.AddTorrent(torrent_path) player.GetLastTorrentData() #player.StartBufferFile(0) player._AddTorrent(torrent_path) player.engine.start() f_status = player.engine.file_status(0) while True: time.sleep(1.0) status = player.engine.status() if status.state in [ State.FINISHED, State.SEEDING, State.DOWNLOADING ]: break player.engine.wait_on_close() player.close() def process_dir(_d): for fn in filesystem.listdir(_d): full_name = filesystem.join(_d, fn) if fn.endswith('.hashes') and check_modify_time(full_name): hashes = get_hashes(full_name) if len(hashes) > 1: rehash_torrent(hashes, full_name.replace('.hashes', '')) for d in filesystem.listdir(settings.torrents_path()): dd = filesystem.join(settings.torrents_path(), d) if not filesystem.isfile(dd): process_dir(dd)
def save_dbs(): path = filesystem.join(_addondir, 'dbversions') with filesystem.save_make_chdir_context(path): for fn in filesystem.listdir(path): filesystem.remove(fn) log_dir = xbmc.translatePath('special://logpath').decode('utf-8') log_path = filesystem.join(log_dir, 'kodi.log') log.debug(log_path) with filesystem.fopen(log_path, 'r') as lf: for line in lf.readlines(): if 'Running database version' in line: log.debug(line) name = line.split(' ')[-1].strip('\r\n\t ').decode('utf-8') with filesystem.fopen(name, 'w'): pass
def scrape_nnm(): from player import load_settings settings = load_settings() data_path = settings.torrents_path() if not filesystem.exists(filesystem.join(data_path, 'nnmclub')): return hashes = [] for torr in filesystem.listdir(filesystem.join(data_path, 'nnmclub')): if torr.endswith('.torrent'): try: from base import TorrentPlayer tp = TorrentPlayer() tp.AddTorrent(filesystem.join(data_path, 'nnmclub', torr)) data = tp.GetLastTorrentData() if data: hashes.append((data['announce'], data['info_hash'], torr.replace('.torrent', '.stat'))) except BaseException as e: log.print_tb(e) for chunk in chunks(hashes, 32): import scraper try: seeds_peers = scraper.scrape(chunk[0][0], [i[1] for i in chunk], 10) except RuntimeError as RunE: if '414 status code returned' in RunE.message: for c in chunks(chunk, 16): try: seeds_peers = scraper.scrape(c[0][0], [i[1] for i in c], 10) process_chunk(c, data_path, seeds_peers) except BaseException as e: log.print_tb(e) continue except BaseException as e: log.print_tb(e) continue process_chunk(chunk, data_path, seeds_peers)