def _search_title(self, title): ''' Search TMDB for title title (str): movie title Title can include year ie Move Title 2017 Returns list of results ''' title = Url.normalize(title) url = 'https://api.themoviedb.org/3/search/movie?page=1&include_adult=false&' if title[-4:].isdigit(): query = 'query={}&year={}'.format(title[:-5], title[-4:]) else: query = 'query={}'.format(title) url = url + query logging.info('Searching TMDB {}'.format(url)) url = url + '&api_key={}'.format(_k(b'tmdb')) self.use_token() try: results = json.loads(Url.open(url).text) if results.get('success') == 'false': return [] else: return results['results'][:6] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Error searching for title on TMDB.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info( 'Performing backlog search on ThePirateBay for {}.'.format(imdbid)) url = 'https://www.thepiratebay.org/search/{}/0/99/200'.format(imdbid) headers = {'Cookie': 'lw=s'} try: if proxy_enabled and core.proxy.whitelist( 'https://www.thepiratebay.org') is True: response = Url.open(url, proxy_bypass=True, headers=headers).text else: response = Url.open(url, headers=headers).text if response: return _parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('ThePirateBay search failed.', exc_info=True) return []
def fuzzy_title(self, titles): ''' Score and remove results based on title match titles (list): titles to match against If titles is an empty list every result is treated as a perfect match Iterates through self.results and removes any entry that does not fuzzy match 'title' > 70. Adds fuzzy_score / 20 points to ['score'] Does not return ''' logging.info('Checking title match.') lst = [] if titles == []: for result in self.results: result['score'] += 20 lst.append(result) else: for result in self.results: if result['type'] == 'import' and result not in lst: result['score'] += 20 lst.append(result) continue test = Url.normalize(result['title']) matches = [fuzz.partial_ratio(Url.normalize(title), test) for title in titles] if any([match > 70 for match in matches]): result['score'] += int(max(matches) / 5) lst.append(result) else: logging.debug('{} best title match was {}%, removing search result.'.format(test, max(matches))) self.results = lst logging.info('Keeping {} results.'.format(len(self.results)))
def _search_tmdbid(self, tmdbid): url = u'https://api.themoviedb.org/3/movie/{}?language=en-US&append_to_response=alternative_titles,external_ids,release_dates'.format(tmdbid) logging.info('Searching TMDB {}'.format(url)) url = url + '&api_key={}'.format(_k('tmdb')) request = Url.request(url) request = Url.request(url) while self.get_tokens() < 3: sleep(0.3) self.use_token() try: response = Url.open(request) results = json.loads(response) if results.get('status_code'): logging.warning(results.get('status_code')) return [''] else: return [results] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Error searching for TMDBID on TMDB.', exc_info=True) return ['']
def _search_imdbid(self, imdbid): url = u'https://api.themoviedb.org/3/find/{}?language=en-US&external_source=imdb_id'.format(imdbid) logging.info('Searching TMDB {}'.format(url)) url = url + '&api_key={}'.format(_k('tmdb')) request = Url.request(url) while self.get_tokens() < 3: sleep(0.5) self.use_token() try: response = Url.open(request) results = json.loads(response) if results['movie_results'] == []: return [''] else: response = results['movie_results'][0] response['imdbid'] = imdbid return [response] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Error searching for IMDBID on TMDB.', exc_info=True) return ['']
def trailer(title_date): ''' Gets trailer embed ID from Youtube. title_date (str): movie title and date ('Movie Title 2016') Attempts to connect 3 times in case Youtube is down or not responding Can fail if no response is received. Returns str ''' logging.info('Getting trailer url from YouTube for {}'.format(title_date)) search_term = Url.normalize((title_date + '+trailer')) url = 'https://www.googleapis.com/youtube/v3/search?part=snippet&q={}&maxResults=1&key={}'.format(search_term, _k(b'youtube')) tries = 0 while tries < 3: try: results = json.loads(Url.open(url).text) return results['items'][0]['id']['videoId'] except (SystemExit, KeyboardInterrupt): raise except Exception as e: if tries == 2: logging.error('Unable to get trailer from Youtube.', exc_info=True) tries += 1 return ''
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Performing backlog search on YTS for {}.'.format(imdbid)) url = 'https://yts.ag/api/v2/list_movies.json?limit=1&query_term={}'.format( imdbid) try: if proxy_enabled and proxy.whitelist('https://www.yts.ag') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: r = json.loads(response) if r['data']['movie_count'] < 1: return [] else: return YTS.parse(r['data']['movies'][0], imdbid, term) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('YTS search failed.', exc_info=True) return []
def _search_db(title_year): ''' Helper for backlog_search title_year (str): movie title and year 'Black Swan 2010' Returns list of found predb entries ''' title_year = Url.normalize(title_year, ascii_only=True) categories = 'movies' if core.CONFIG['Search'].get('predb_unknown'): categories += ',unknown' url = 'http://predb.me/?cats={}&search={}&rss=1'.format( categories, title_year) try: response = Url.open(url).text results_xml = response.replace('&', '%26') items = _parse_predb_xml(results_xml) return items except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Predb.me search failed.', exc_info=True) return []
def _fuzzy_title(a, b): ''' Determines how much of a is in b a (str): String to match against b b (str): String to match a against Order of a and b matters. A is broken down and words are compared against B's words. ie: _fuzzy_title('This is string a', 'This is string b and has extra words.') Returns 75 since 75% of a is in b. Returns int ''' a = a.replace('&', 'and') b = b.replace('&', 'and') a_words = Url.normalize(a).split(' ') b_words = Url.normalize(b).split(' ') m = 0 a_len = len(a_words) for i in a_words: if i in b_words: b_words.remove(i) m += 1 return int((m / a_len) * 100)
def search(imdbid, term, ignore_if_imdbid_cap=False): if ignore_if_imdbid_cap: return [] proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info('Performing backlog search on YTS for {}.'.format(imdbid)) host = base_url() url = '{}/api/v2/list_movies.json?limit=1&query_term={}'.format( host, imdbid) try: if proxy_enabled and core.proxy.whitelist(host) is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: r = json.loads(response) if r['data']['movie_count'] < 1: return [] else: return _parse(r['data']['movies'][0], imdbid, term) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('YTS search failed.', exc_info=True) return []
def search(imdbid, term, ignore_if_imdbid_cap=False, page=0): if ignore_if_imdbid_cap: return [] proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] if page == 0: logging.info('Searching TorrentGalaxy for {}.'.format(term)) host = base_url() url = '{}/torrents.php?c42=1&c3=1&c4=1&search={}&lang=0&sort=id&order=desc&page={}'.format( host, imdbid, page) try: if proxy_enabled and core.proxy.whitelist(host) is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return _parse(response, imdbid, term, page) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception: logging.error('TorrentGalaxy search failed.', exc_info=True) return []
def search(imdbid, term): proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info( 'Performing backlog search on TorrentDownloads for {}.'.format(imdbid)) url = 'http://www.torrentdownloads.me/rss.xml?type=search&search={}'.format( term) try: if proxy_enabled and core.proxy.whitelist( 'http://www.torrentdownloads.me') is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return _parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('TorrentDownloads search failed.', exc_info=True) return []
def test_connection(indexer, apikey): ''' Tests connection to NewzNab API ''' if not indexer: return {'response': False, 'error': 'Indexer field is blank.'} while indexer[-1] == '/': indexer = indexer[:-1] response = {} logging.info(u'Testing connection to {}.'.format(indexer)) url = u'{}/api?apikey={}&t=search&id=tt0063350'.format(indexer, apikey) request = Url.request(url) try: response = Url.open(request) except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Newz/TorzNab connection check.', exc_info=True) return { 'response': False, 'message': 'No connection could be made because the target machine actively refused it.' }
def cancel_download(downloadid): ''' Cancels download in client downloadid: int download id Returns bool ''' logging.info('Cancelling download # {}'.format(downloadid)) conf = core.CONFIG['Downloader']['Torrent']['QBittorrent'] host = conf['host'] port = conf['port'] base_url = '{}:{}/'.format(host, port) user = conf['user'] password = conf['pass'] if QBittorrent.cookie is None: if QBittorrent._login(base_url, user, password) is not True: return False post_data = {} post_data['hashes'] = downloadid.lower() url = '{}command/deletePerm'.format(base_url) headers = {'cookie': QBittorrent.cookie} try: Url.open(url, post_data=post_data, headers=headers) # QBit returns an empty string return True except Exception as e: logging.error('Unable to cancel download.', exc_info=True) return False
def test_connection(data): ''' Tests connectivity to Sabnzbd :para data: dict of Sab server information Tests if we can get Sab's stats using server info in 'data' Return True on success or str error message on failure ''' host = data['host'] port = data['port'] api = data['api'] url = u'http://{}:{}/sabnzbd/api?apikey={}&mode=server_stats'.format( host, port, api) request = Url.request(url) try: response = Url.open(request) if 'error' in response: return response return True except (SystemExit, KeyboardInterrupt): raise except Exception, e: logging.error(u'Sabnzbd connection test failed.', exc_info=True) return u'{}.'.format(e.reason)
def search(imdbid, term, ignore_if_imdbid_cap=False): if ignore_if_imdbid_cap: return [] proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] logging.info( 'Performing backlog search on ThePirateBay for {}.'.format(imdbid)) host = base_url() url = '{}/q.php?q={}'.format(host, imdbid) try: if proxy_enabled and core.proxy.whitelist(host) is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: return _parse(response, imdbid) else: return [] except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('ThePirateBay search failed.', exc_info=True) return []
def search_newznab(self, url_base, apikey, **params): ''' Searches Newznab for imdbid url_base: str base url for all requests (https://indexer.com/) apikey: str api key for indexer params: parameters to url encode and append to url Creates url based off url_base. Appends url-encoded **params to url. Returns list of dicts of search results ''' url = u'{}api?apikey={}&{}'.format(url_base, apikey, urllib.urlencode(params)) logging.info(u'SEARCHING: {}api?apikey=APIKEY&{}'.format( url_base, urllib.urlencode(params))) proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] request = Url.request(url) try: if proxy_enabled and Proxy.whitelist(url) is True: response = Proxy.bypass(request) else: response = Url.open(request) return self.parse_newznab_xml(response) except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Newz/TorzNab backlog search.', exc_info=True) return []
def get_trailer(self, title_date): ''' Gets trailer embed url from Youtube. :param title_date: str movie title and date ("Movie Title 2016") Attempts to connect 3 times in case Youtube is down or not responding Can fail if no response is recieved. Returns str or None ''' search_term = Url.encode((title_date + '+trailer')) search_string = u"https://www.googleapis.com/youtube/v3/search?part=snippet&q={}&maxResults=1&key={}".format(search_term, _k('youtube')) request = Url.request(search_string) tries = 0 while tries < 3: try: response = Url.open(request) results = json.loads(response) return results['items'][0]['id']['videoId'] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa if tries == 2: logging.error(u'Unable to get trailer from Youtube.', exc_info=True) tries += 1
def get_hash_and_magnet(url): ''' Get hash and magnet from URL link url (str): torrent info_link url Returns tuple with str of lower-case torrent hash, and magnet uri ''' proxy_enabled = core.CONFIG['Server']['Proxy']['enabled'] host = urlparse(url) host = '{}://{}'.format(host.scheme, host.netloc) try: if proxy_enabled and core.proxy.whitelist(host) is True: response = Url.open(url, proxy_bypass=True).text else: response = Url.open(url).text if response: soup = BeautifulSoup(response, 'html.parser') magnet = soup.select_one('a[href^="magnet:?"]').attrs['href'] hash = magnet.split('&')[0].split(':')[-1].lower() return (hash, magnet) else: return () except (SystemExit, KeyboardInterrupt): raise except Exception: logging.error('1337x get magnet failed.', exc_info=True) return ()
def _set_label(torrent, label, url): ''' Sets label for download torrent: str hash of torrent to apply label label: str name of label to apply url: str url of deluge web interface Returns bool ''' global command_id label = label_fix.sub('', label.lower()).replace(' ', '') logging.info('Applying label {} to torrent {} in Deluge Web UI.'.format( label, torrent)) command = {'method': 'label.get_labels', 'params': [], 'id': command_id} command_id += 1 try: response = Url.open(url, post_data=json.dumps(command), headers=headers).text deluge_labels = json.loads(response).get('result') or [] except Exception as e: logging.error('Unable to get labels from Deluge Web UI.', exc_info=True) return False if label not in deluge_labels: logging.info('Adding label {} to Deluge.'.format(label)) command = {'method': 'label.add', 'params': [label], 'id': command_id} command_id += 1 try: sc = Url.open(url, post_data=json.dumps(command), headers=headers).status_code if sc != 200: logging.error('Deluge Web UI response {}.'.format(sc)) return False except Exception as e: logging.error('Delugeweb get_labels.', exc_info=True) return False try: command = { 'method': 'label.set_torrent', 'params': [torrent.lower(), label], 'id': command_id } command_id += 1 sc = Url.open(url, post_data=json.dumps(command), headers=headers).status_code if sc != 200: logging.error('Deluge Web UI response {}.'.format(sc)) return False except Exception as e: logging.error('Delugeweb set_torrent.', exc_info=True) return False return True
def _download_magnet(data, path): ''' Resolves magnet link to torrent file data (dict): release information file (str): absolute path to FILE in which to save file Attempts to use magnet2torrent.com to resolve to torrent file. If that fails, iterates through bt_cache sites and attempts to get download. The downloaded content is ran through bencode (via core.helpers.Torrent) to make sure the hash from the torrent file (or whatever content was download) matches the hash submitted. Returns bool ''' magnet_hash = data['guid'].upper() try: logging.info( 'Attempting to resolve torrent hash through magnet2torrent.com') dl_bytes = Url.open('http://magnet2torrent.com/upload/', post_data={ 'magnet': 'magnet:?xt=urn:btih:{}'.format(magnet_hash) }, stream=True).content if _verify_torrent(dl_bytes, magnet_hash): logging.info('Torrent found on magnet2torrent.com') with open(path, 'wb') as f: f.write(dl_bytes) del dl_bytes return True except Exception as e: logging.warning('Unable to reach magnet2torrent.com', exc_info=True) for i in bt_cache: try: url = i.format(magnet_hash) logging.info( 'Attempting to resolve torrent hash through {}'.format(url)) dl_bytes = Url.open(url, stream=True).content if _verify_torrent(dl_bytes, magnet_hash): logging.info('Torrent found at {}'.format(url)) with open(path, 'wb') as f: f.write(dl_bytes) del dl_bytes return True else: continue except Exception as e: logging.warning( 'Unable to resolve magnet hash through {}.'.format(i), exc_info=True) continue logging.warning( 'Torrent hash {} not found on any torrent cache.'.format(magnet_hash)) return False
def get_imdbid(self, tmdbid=None, title=None, year=''): ''' Gets imdbid from tmdbid or title and year tmdbid: str TMDB movie id # title: str movie title year str year of movie release MUST supply either tmdbid or title. Year is optional with title, but results are more reliable with it. Returns str imdbid or None on failure ''' if not tmdbid and not title: logging.warning( 'Neither tmdbid or title supplied. Unable to find imdbid.') return None if not tmdbid: title = Url.normalize(title) year = Url.normalize(year) url = 'https://api.themoviedb.org/3/search/movie?api_key={}&language=en-US&query={}&year={}&page=1&include_adult=false'.format( _k(b'tmdb'), title, year) while self.get_tokens() < 3: sleep(0.3) self.use_token() try: results = json.loads(Url.open(url).text) results = results['results'] if results: tmdbid = results[0]['id'] else: return None except (SystemExit, KeyboardInterrupt): raise except Exception as e: # noqa logging.error('Error attempting to get TMDBID from TMDB.', exc_info=True) return None url = 'https://api.themoviedb.org/3/movie/{}?api_key={}'.format( tmdbid, _k(b'tmdb')) while self.get_tokens() < 3: sleep(0.3) self.use_token() try: results = json.loads(Url.open(url).text) return results.get('imdb_id') except Exception as e: # noqa logging.error('Error attempting to get IMDBID from TMDB.', exc_info=True) return None
def add_torrent(data): ''' Adds torrent or magnet to qbittorrent data: dict of torrrent/magnet information Adds torrents to default/path/<category> Returns dict {'response': True, 'download_id': 'id'} {'response': False, 'error': 'exception'} ''' conf = core.CONFIG['Downloader']['Torrent']['QBittorrent'] host = conf['host'] port = conf['port'] base_url = u'{}:{}/'.format(host, port) user = conf['user'] password = conf['pass'] # check cookie validity while getting default download dir download_dir = QBittorrent._get_download_dir(base_url) if not download_dir: if QBittorrent._login(base_url, user, password) is not True: return {'response': False, 'error': 'Incorrect usename or password.'} download_dir = QBittorrent._get_download_dir(base_url) if not download_dir: return {'response': False, 'error': 'Unable to get path information.'} # if we got download_dir we can connect. post_data = {} post_data['urls'] = data['torrentfile'] post_data['savepath'] = u'{}{}'.format(download_dir, conf['category']) post_data['category'] = conf['category'] url = u'{}command/download'.format(base_url) post_data = urllib.urlencode(post_data) request = Url.request(url, post_data=post_data) request.add_header('cookie', QBittorrent.cookie) try: Url.open(request) # QBit returns an empty string downloadid = Torrent.get_hash(data['torrentfile']) return {'response': True, 'downloadid': downloadid} except (SystemExit, KeyboardInterrupt): raise except Exception, e: logging.error(u'QBittorrent connection test failed.', exc_info=True) return {'response': False, 'error': str(e.reason)}
def add_torrent(data): ''' Adds torrent or magnet to qbittorrent data: dict of torrrent/magnet information Adds torrents to default/path/<category> Returns dict {'response': True, 'download_id': 'id'} {'response': False, 'error': 'exception'} ''' logging.info('Sending torrent {} to QBittorrent.'.format( data['title'])) conf = core.CONFIG['Downloader']['Torrent']['QBittorrent'] host = conf['host'] port = conf['port'] base_url = '{}:{}/'.format(host, port) user = conf['user'] password = conf['pass'] if QBittorrent.cookie is None: QBittorrent._login(base_url, user, password) download_dir = QBittorrent._get_download_dir(base_url) if not download_dir: return { 'response': False, 'error': 'Unable to get path information.' } # if we got download_dir we can connect. post_data = {} post_data['urls'] = data['torrentfile'] post_data['savepath'] = '{}{}'.format(download_dir, conf['category']) post_data['category'] = conf['category'] url = '{}command/download'.format(base_url) headers = {'cookie': QBittorrent.cookie} try: Url.open(url, post_data=post_data, headers=headers) # QBit returns an empty string downloadid = Torrent.get_hash(data['torrentfile']) return {'response': True, 'downloadid': downloadid} except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('QBittorrent connection test failed.', exc_info=True) return {'response': False, 'error': str(e)}
def add_nzb(data): ''' Adds nzb file to sab to download :param data: dict of nzb information Returns dict {'response': True, 'downloadid': 'id'} {'response': False, 'error': 'exception'} ''' conf = core.CONFIG['Downloader']['Usenet']['Sabnzbd'] host = conf['host'] port = conf['port'] api = conf['api'] base_url = u'http://{}:{}/sabnzbd/api?apikey={}'.format( host, port, api) mode = u'addurl' name = urllib2.quote(data['guid']) nzbname = data['title'] cat = conf['category'] priority_keys = { 'Paused': '-2', 'Low': '-1', 'Normal': '0', 'High': '1', 'Forced': '2' } priority = priority_keys[conf['priority']] command_url = u'&mode={}&name={}&nzbname={}&cat={}&priority={}&output=json'.format( mode, name, nzbname, cat, priority) url = base_url + command_url request = Url.request(url) try: response = json.load(Url.open(request)) if response['status'] is True and len(response['nzo_ids']) > 0: downloadid = response['nzo_ids'][0] logging.info( 'NZB sent to SABNzbd - downloadid {}.'.format(downloadid)) return {'response': True, 'downloadid': downloadid} else: logging.error( 'Unable to send NZB to Sabnzbd. {}'.format(response)) return {'response': False, 'error': 'Unable to add NZB.'} except Exception as e: logging.error('Unable to send NZB to Sabnzbd.', exc_info=True) return {'response': False, 'error': str(e.reason)}
def get_imdbid(tmdbid=None, title=None, year=''): ''' Gets imdbid from tmdbid or title and year tmdbid (str): themoviedatabase id # title (str): movie title year (str/int): year of movie release MUST supply either tmdbid or title. Year is optional with title, but results are more reliable with it. Returns str imdbid ''' if not tmdbid and not title: logging.warning( 'Neither tmdbid or title supplied. Unable to find imdbid.') return '' if not tmdbid: title = Url.normalize(title) year = Url.normalize(year) url = 'https://api.themoviedb.org/3/search/movie?api_key={}&language=en-US&query={}&year={}&page=1&include_adult={}'.format( _k(b'tmdb'), title, year, 'true' if core.CONFIG['Search']['allowadult'] else 'false') TheMovieDatabase._use_token() try: results = json.loads(Url.open(url).text) results = results['results'] if results: tmdbid = results[0]['id'] else: return '' except (SystemExit, KeyboardInterrupt): raise except Exception as e: logging.error('Error attempting to get TMDBID from TMDB.', exc_info=True) return '' url = 'https://api.themoviedb.org/3/movie/{}?api_key={}'.format( tmdbid, _k(b'tmdb')) TheMovieDatabase._use_token() try: results = json.loads(Url.open(url).text) return results.get('imdb_id') except Exception as e: logging.error('Error attempting to get IMDBID from TMDB.', exc_info=True) return ''
def _get_download_dir(base_url): try: url = u'{}query/preferences'.format(base_url) request = Url.request(url) request.add_header('cookie', QBittorrent.cookie) response = json.loads(Url.open(request)) return response['save_path'] except urllib2.HTTPError: return False except Exception, e: logging.error(u'QBittorrent unable to get download dir.', exc_info=True) return {'response': False, 'error': str(e.reason)}
def get_newest_hash(self): api_url = u'{}/commits/{}'.format(core.GIT_API, core.CONFIG['Server']['gitbranch']) request = Url.request(api_url) try: response = Url.open(request) result = json.loads(response) hash = result['sha'] except (SystemExit, KeyboardInterrupt): raise except Exception, e: # noqa logging.error(u'Could not get newest hash from git.', exc_info=True) return None
def fuzzy_title(self, titles): ''' Score and remove results based on title match titles (list): titles to match against If titles is an empty list every result is treated as a perfect match Iterates through self.results and removes any entry that does not fuzzy match 'title' > 70. Adds fuzzy_score / 20 points to ['score'] Does not return ''' logging.info('Checking title match.') lst = [] if titles == []: logging.debug( 'No titles available to compare, scoring all as perfect match.' ) for result in self.results: result['score'] += 20 lst.append(result) else: for result in self.results: if result['type'] == 'import' and result not in lst: logging.debug( '{} is an Import, soring as a perfect match.'.format( result['title'])) result['score'] += 20 lst.append(result) continue release = Url.normalize(result['title']) logging.debug('Comparing release {} with titles {}.'.format( result['title'], titles)) matches = [ lm.score(release, Url.normalize(title)) * 100 for title in titles ] if any(match > 70 for match in matches): result['score'] += int(max(matches) / 5) lst.append(result) else: logging.debug( '{} best title match was {}%, removing search result.'. format(release, max(matches))) self.results = lst logging.info('Keeping {} results.'.format(len(self.results)))
def _send(method, post_data=None): ''' Sends API request to QBittorrent method (str): name of method to call. *must* include category (ie 'query/preferences') post_data (dict): post data to send with request <optional> Returns str text response from QBit ''' conf = core.CONFIG['Downloader']['Torrent']['QBittorrent'] if not QBittorrent.cookie: r = QBittorrent._login('{}:{}/'.format(conf['host'], conf['port']), conf['user'], conf['pass']) if r is not True: logging.error('Unable to connect to QBittorrent: {}'.format(r)) return False url = '{}:{}/{}'.format(conf['host'], conf['port'], method) try: response = Url.open(url, post_data=post_data, headers={'cookie': QBittorrent.cookie}) except Exception as e: logging.error('Unable to contact QBittorrent API.', exc_info=True) raise APIConnectionError(response.status_code, response.reason) if response.status_code == 403: logging.info('QBittorrent request unauthorized.') QBittorrent.cookie = None u = '{}:{}/'.format(conf['host'], conf['port']) if QBittorrent._login(u, conf['user'], conf['pass']) is not True: raise APIConnectionError('403', 'Unable to log in to QBittorrent.') else: try: response = Url.open(url, post_data=post_data, headers={'cookie': QBittorrent.cookie}) except Exception as e: logging.error('Unable to contact QBittorrent API.', exc_info=True) raise APIConnectionError(response.status_code, response.reason) elif response.status_code != 200: logging.error('QBittorrent API call failed: {}'.format( response.reason)) raise APIConnectionError(response.status_code, response.reason) return response.text