def parse_terms(termlist): """Takes an array of terms, and sorts them out into 4 categories: * torrent URLs * movie URLs * targets (where to perform the search e.g. collages or bookmarks) * all other search parameters """ torrents = [] movies = [] terms = {} target = 'torrents' for arg in termlist: url = urlparse(arg) url_args = parse_qs(url.query) if url.path == '/collages.php': target = 'collage' terms = url_args elif url.path == "/artist.php": target = 'artist' terms = url_args elif url.path == '/torrents.php': if 'torrentid' in url_args: torrents.append(ptpapi.Torrent(url_args['torrentid'][0])) elif 'id' in url_args: if 'action' in url_args and url_args['action'][0] == 'download': torrents.append(ptpapi.Torrent(url_args['id'][0])) else: movies.append(ptpapi.Movie(url_args['id'][0])) else: terms = url_args else: term = arg.partition('=') if not term[2]: if term[0] == 'bookmarks': target = 'bookmarks' else: terms['searchstr'] = term[0] else: # Provide aliases for commonly used terms term_map = { 'taglist': ['genre', 'genres', 'tags'], 'searchstr': ['name', 'title'] } for key, value in term_map.items(): if term[0] in value: term = (key, term[1], term[2]) terms[term[0]] = term[2] return (target, movies, torrents, terms)
def load_torrent(proxy, ID, path): """Send a torrent to rtorrent and kick off the hash recheck""" logger = logging.getLogger(__name__) torrent = ptpapi.Torrent(ID=ID) torrent_data = torrent.download() data = bencode.bdecode(torrent_data) thash = metafile.info_hash(data) try: logger.debug(u"Testing for hash {0}".format( proxy.d.hash(thash, fail_silently=True))) logger.error( u"Hash {0} already exists in rtorrent as {1}, cannot load.".format( thash, proxy.d.name(thash))) return except (xmlrpc_client.Fault, xmlrpc.HashNotFound): pass proxy.load.raw('', xmlrpc_client.Binary(torrent_data)) # Wait until the torrent is loaded and available while True: sleep(1) try: proxy.d.hash(thash, fail_silently=True) break except (xmlrpc_client.Fault, xmlrpc.HashNotFound): pass logger.info(u"Torrent loaded at {0}".format(path)) proxy.d.custom.set(thash, 'tm_completed', str(int(time()))) proxy.d.directory.set(thash, path) proxy.d.check_hash(thash)
def do_fields(api, args): print("Movie:") m = ptpapi.Movie(ID=1) for values in m.key_finder.values(): for val in values: print("- {0}".format(val)) print("Torrent:") t = ptpapi.Torrent(ID=1) for values in t.key_finder.values(): for val in values: print("- {0}".format(val))
def main(): """The entrypoint""" parser = define_parser() args = parser.parse_args() logger = logging.getLogger('ptp-reseed') logging.basicConfig(level=args.loglevel) # Load pyroscope load_config.ConfigLoader().load() proxy = config.engine.open() # Futile attempt to impose our loglevel upon pyroscope logging.basicConfig(level=args.loglevel) # Load PTP API ptp = ptpapi.login() loaded = [] would_load = [] already_loaded = [] not_found = [] if args.files == ['-'] or args.files == []: filelist = sys.stdin else: filelist = args.files if args.compare_paths: logger.debug('Loading existing torrents for pre-matching') loaded_paths = find_existing_torrents(proxy) else: loaded_paths = [] for filename in filelist: match = Match(None) filename = filename.strip("\n").decode('utf-8') logger.info(u'Starting reseed attempt on file {0}'.format(filename)) if not os.path.exists(filename): logger.error(u"File/directory {0} does not exist".format(filename)) continue if args.url: parsed_url = parse_qs(urlparse(args.url).query) if 'torrentid' in parsed_url: match = match_by_torrent( ptpapi.Torrent(ID=parsed_url['torrentid'][0]), filename.encode()) elif 'id' in parsed_url: match = match_by_movie(ptpapi.Movie(ID=parsed_url['id'][0]), filename.encode()) elif filename: for match_type in ptpapi.config.config.get('Reseed', 'findBy').split(','): try: if match_type == 'filename': if os.path.abspath(filename) in loaded_paths: logger.info( u'Path {0} already in rtorrent, skipping'. format(os.path.abspath(filename))) else: logger.debug(u'Path {0} not in rtorrent'.format( os.path.abspath(filename))) match = match_against_file(ptp, filename, args.limit) elif match_type == 'title': match = match_by_guessed_name(ptp, filename, args.limit) else: logger.error( u"Match type {0} not recognized for {1}, skipping". format(match_type, filename)) if match: break except Exception: print(u"Error while attempting to match file '{0}'".format( filename)) raise # Make sure we have the minimum information required if not match: not_found.append(filename) logger.error( u"Could not find an associated torrent for '{0}', cannot reseed" .format(filename)) continue if args.create_in_directory: create_in = args.create_in_directory elif ptpapi.config.config.has_option('Reseed', 'createInDirectory'): create_in = ptpapi.config.config.get('Reseed', 'createInDirectory') else: create_in = None create_matched_files(match, directory=create_in, action=args.action, dry_run=args.dry_run) logger.info(u"Found match, now loading torrent {0} to path {1}".format( match.ID, match.path)) if args.dry_run: would_load.append(filename) logger.debug("Dry-run: Stopping before actual load") continue if load_torrent(proxy, match.ID, match.path): loaded.append(filename) else: already_loaded.append(filename) if args.summary: print('==> Loaded:') print('\n'.join(loaded)) print('==> Would have loaded:') print('\n'.join(would_load)) print('==> Already loaded:') print('\n'.join(already_loaded)) print('==> Not found:') print('\n'.join(not_found)) exit_code = 0 if len(not_found) == 1: exit_code = 1 elif len(not_found) > 1: exit_code = 2 elif len(already_loaded) > 0: exit_code = 3 logger.debug("Total session tokens consumed: %s", ptpapi.session.session.consumed_tokens) logger.debug("Exiting...") sys.exit(exit_code)