def _notify(self, title, body, prowl_api=None, prowl_priority=None, **kwargs): prowl_api = self._choose(prowl_api, sickbeard.PROWL_API) prowl_priority = self._choose(prowl_priority, sickbeard.PROWL_PRIORITY) self._log_debug('Sending notice with details: title="%s", message="%s", priority=%s, api=%s' % ( title, body, prowl_priority, prowl_api)) http_handler = moves.http_client.HTTPSConnection('api.prowlapp.com') data = dict(apikey=prowl_api, application='SickGear', event=title, description=body.encode('utf-8'), priority=prowl_priority) try: http_handler.request('POST', '/publicapi/add', headers={'Content-type': 'application/x-www-form-urlencoded'}, body=urlencode(data)) except (SSLError, moves.http_client.HTTPException, socket.error): result = 'Connection failed' self._log_error(result) else: response = http_handler.getresponse() result = None if 200 != response.status: if 401 == response.status: result = u'Authentication, %s (bad API key?)' % response.reason else: result = 'Http response code "%s"' % response.status self._log_error(result) return self._choose((True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result))
def get_token(self, user, passw): auth = '' try: auth = get_url('https://plex.tv/users/sign_in.json', headers={ 'X-Plex-Device-Name': 'SickGear', 'X-Plex-Platform': platform.system(), 'X-Plex-Device': platform.system(), 'X-Plex-Platform-Version': platform.release(), 'X-Plex-Provides': 'Python', 'X-Plex-Product': 'Python', 'X-Plex-Client-Identifier': self.client_id, 'X-Plex-Version': str(self.config_version), 'X-Plex-Username': user }, parse_json=True, post_data=urlencode({ b'user[login]': user, b'user[password]': passw }).encode('utf-8'))['user']['authentication_token'] except TypeError: self.log('Error in response from plex.tv auth server') except IndexError: self.log('Error getting Plex Token') return auth
def _cache_data(self, **kwargs): mode = 'Cache' search_url = '%srss.php?%s' % (self.provider.url, urlencode({'filter': '1'})) data = self.get_rss(search_url) results = [] if data and 'entries' in data: rc = dict([ (k, re.compile('(?i)' + v)) for (k, v) in iteritems({'size': r'size:\s*(\d+[.,]\d+\w+)'}) ]) for cur_item in data.get('entries', []): try: title, download_url = self._title_and_url(cur_item) size = rc['size'].findall( cur_item.get('summary_detail', { 'value': '' }).get('value', '')) size = size and size[0] or -1 except (AttributeError, TypeError, ValueError): continue if title and download_url: # feed does not carry seed, leech counts results.append((title, download_url, 0, self.provider._bytesizer(size))) self.provider._log_search(mode, len(results), search_url) return results
def _notify(self, title, body, access_token=None, sound=None, **kwargs): """ Sends a boxcar2 notification to the address provided title: The title of the message body: The message to send access_token: To send to this device sound: Sound profile to use returns: True if the message succeeded, False otherwise """ access_token = self._choose(access_token, sickbeard.BOXCAR2_ACCESSTOKEN) sound = self._choose(sound, sickbeard.BOXCAR2_SOUND) # build up the URL and parameters # more info goes here - # https://boxcar.uservoice.com/knowledgebase/articles/306788-how-to-send-your-boxcar-account-a-notification body = body.strip().encode('utf-8') data = urlencode({ 'user_credentials': access_token, 'notification[title]': '%s - %s' % (title, body), 'notification[long_message]': body, 'notification[sound]': sound, 'notification[source_name]': 'SickGear', 'notification[icon_url]': self._sg_logo_url }) # send the request to boxcar2 result = None try: req = urllib.request.Request('https://new.boxcar.io/api/notifications') http_response_obj = urllib.request.urlopen(req, data) # PY2 http_response_obj has no `with` context manager http_response_obj.close() except urllib.error.HTTPError as e: if not hasattr(e, 'code'): self._log_error(u'Notification failed: %s' % ex(e)) else: result = 'Notification failed. Error code: %s' % e.code self._log_error(result) if 503 == e.code: result = 'Server too busy to handle the request at this time' self._log_warning(result) else: if 404 == e.code: result = 'Access token is wrong/not associated to a device' self._log_error(result) elif 401 == e.code: result = 'Access token not recognized' self._log_error(result) elif 400 == e.code: result = 'Wrong data sent to Boxcar' self._log_error(result) except urllib.error.URLError as e: self._log_error(u'Notification failed: %s' % ex(e)) return self._choose((True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result))
def notify_settings(self, host, db_loc, instance): """ Retrieves the NMJv2 database location from Popcorn hour host: The hostname/IP of the Popcorn Hour server dbloc: 'local' for PCH internal harddrive. 'network' for PCH network shares instance: Allows for selection of different DB in case of multiple databases Returns: True if the settings were retrieved successfully, False otherwise """ result = False try: base_url = 'http://%s:8008/' % host req = urllib.request.Request('%s%s%s' % (base_url, 'file_operation?', urlencode( dict(arg0='list_user_storage_file', arg1='', arg2=instance, arg3=20, arg4='true', arg5='true', arg6='true', arg7='all', arg8='name_asc', arg9='false', arg10='false')))) http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager response = http_response_obj.read() http_response_obj.close() xml_data = parseString(response) time.sleep(300.0 / 1000.0) for node in xml_data.getElementsByTagName('path'): xml_tag = node.toxml() reqdb = urllib.request.Request('%s%s%s' % (base_url, 'metadata_database?', urlencode( dict(arg0='check_database', arg1=xml_tag.replace('<path>', '').replace('</path>', '').replace('[=]', ''))))) http_response_obj_db = urllib.request.urlopen(reqdb) # PY2 http_response_obj has no `with` context mgr responsedb = http_response_obj_db.read() http_response_obj.close() xml_db = parseString(responsedb) if '0' == xml_db.getElementsByTagName('returnValue')[0].toxml().replace( '<returnValue>', '').replace('</returnValue>', ''): db_path = xml_db.getElementsByTagName('database_path')[0].toxml().replace( '<database_path>', '').replace('</database_path>', '').replace('[=]', '') if 'local' == db_loc and db_path.find('localhost') > -1: sickbeard.NMJv2_HOST = host sickbeard.NMJv2_DATABASE = db_path result = True if 'network' == db_loc and db_path.find('://') > -1: sickbeard.NMJv2_HOST = host sickbeard.NMJv2_DATABASE = db_path result = True except IOError as e: self._log_warning(u'Couldn\'t contact popcorn hour on host %s: %s' % (host, ex(e))) if result: return '{"message": "Success, NMJ Database found at: %(host)s", "database": "%(database)s"}' % { "host": host, "database": sickbeard.NMJv2_DATABASE} return '{"message": "Failed to find NMJ Database at location: %(dbloc)s. ' \ 'Is the right location selected and PCH running? ", "database": ""}' % {"dbloc": db_loc}
def _request(self, method='get', params=None, files=None, **kwargs): params = {} if None is params else params return super(uTorrentAPI, self)._request( method=method, params='token={0:s}&{1:s}'.format( self.auth, '&'.join([ '%s' % urlencode(dict([[key, str(value)]])) for key, value in iteritems(params) ])) if any(params) else params, files=files)
def _send_to_xbmc(self, command, host=None, username=None, password=None): """Handles communication to XBMC servers via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the XBMC API via HTTP host: XBMC webserver host:port username: XBMC webserver username password: XBMC webserver password Returns: Returns response.result for successful commands or False if there was an error """ if not host: self._log_debug(u'No host passed, aborting update') return False username = self._choose(username, sickbeard.XBMC_USERNAME) password = self._choose(password, sickbeard.XBMC_PASSWORD) for key in command: if not PY2 or type(command[key]) == text_type: command[key] = command[key].encode('utf-8') enc_command = urlencode(command) self._log_debug(u'Encoded API command: ' + enc_command) url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) try: req = urllib.request.Request(url) # if we have a password, use authentication if password: req.add_header( 'Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) self._log_debug(u'Contacting (with auth header) via url: ' + fixStupidEncodings(url)) else: self._log_debug(u'Contacting via url: ' + fixStupidEncodings(url)) http_response_obj = urllib.request.urlopen( req) # PY2 http_response_obj has no `with` context manager result = decode_str(http_response_obj.read(), sickbeard.SYS_ENCODING) http_response_obj.close() self._log_debug(u'HTTP response: ' + result.replace('\n', '')) return result except (urllib.error.URLError, IOError) as e: self._log_warning(u'Couldn\'t contact HTTP at %s %s' % (fixStupidEncodings(url), ex(e))) return False
def _search_provider(self, search, search_mode='eponly', epcount=0, retention=0, needed=NeededQualities(need_all=True), **kwargs): """ :param search: :type search: AnyStr :param search_mode: :type search_mode: AnyStr :param epcount: :type epcount: int or long :param retention: :type retention: int :param needed: :type needed: NeededQualities :param kwargs: :return: :rtype: List """ api_key = self._init_api() if False is api_key: return self.search_html(search, search_mode, needed=needed, **kwargs) results = [] cats = self._get_cats(needed=needed) if None is not api_key: params = {'user': self.username, 'api': api_key, 'eng': 1, 'nukes': 1, 'catid': ','.join(cats), # SD,HD 'retention': retention or sickbeard.USENET_RETENTION or 0, 'search': search} search_url = self.urls['search'] % urlencode(params) data_json = self.get_url(search_url, parse_json=True) if self.should_skip(): return results if data_json and self._check_auth_from_data(data_json, is_xml=False): for item in data_json: if 'release' in item and 'getnzb' in item: if item.get('nuked', '').startswith('1'): continue results.append(item) mode = search_mode if 'eponly' == search_mode: mode = 'Episode' elif 'sponly' == search_mode: mode = 'Season' self._log_search(mode, len(results), search_url) return results
def _send_to_plex(self, command, host, username=None, password=None): """Handles communication to Plex hosts via HTTP API Args: command: Dictionary of field/data pairs, encoded via urllib and passed to the legacy xbmcCmds HTTP API host: Plex host:port username: Plex API username password: Plex API password Returns: Returns True for successful commands or False if there was an error """ if not host: self._log_error(u'No host specified, check your settings') return False for key in command: if not PY2 or type(command[key]) == text_type: command[key] = command[key].encode('utf-8') enc_command = urlencode(command) self._log_debug(u'Encoded API command: ' + enc_command) url = 'http://%s/xbmcCmds/xbmcHttp/?%s' % (host, enc_command) try: req = urllib.request.Request(url) if password: req.add_header( 'Authorization', 'Basic %s' % b64encodestring('%s:%s' % (username, password))) self._log_debug(u'Contacting (with auth header) via url: ' + url) else: self._log_debug(u'Contacting via url: ' + url) http_response_obj = urllib.request.urlopen( req) # PY2 http_response_obj has no `with` context manager result = decode_str(http_response_obj.read(), sickbeard.SYS_ENCODING) http_response_obj.close() self._log_debug(u'HTTP response: ' + result.replace('\n', '')) return True except (urllib.error.URLError, IOError) as e: self._log_warning(u'Couldn\'t contact Plex at ' + fixStupidEncodings(url) + ' ' + ex(e)) return False
def cache_data(self, needed=NeededQualities(need_all=True), **kwargs): """ :param needed: needed class :type needed: NeededQualities :param kwargs: :return: :rtype: List """ if self.should_skip(): return [] api_key = self._init_api() if False is api_key: return self.search_html(needed=needed, **kwargs) results = [] cats = self._get_cats(needed=needed) if None is not api_key: params = { 'search': '', 'user': self.username, 'api': api_key, 'eng': 1, 'catid': ','.join(cats) } # SD,HD url = self.urls['cache'] % urlencode(params) response = self.get_url(url) if self.should_skip(): return results data = feedparser.parse( response.replace('<xml', '<?xml').replace( '>\n<info>', '?>\n<feed>\n<info>').replace( '<search_req>\n', '').replace('</search_req>\n', '').replace('post>\n', 'entry>\n').replace( '</xml>', '</feed>')) if data and 'entries' in data: results = data.entries self._log_search('Cache', len(results), url) return results
def get_devices(self, user_key=None, api_key=None): user_key = self._choose(user_key, sickbeard.PUSHOVER_USERKEY) api_key = self._choose(api_key, sickbeard.PUSHOVER_APIKEY) data = urlencode(dict(token=api_key, user=user_key)) # get devices from pushover result = False try: req = urllib.request.Request(DEVICE_URL) http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager if http_response_obj: result = http_response_obj.read() http_response_obj.close() except (urllib.error.URLError, socket.timeout): pass return ('{}', result)[bool(result)]
def get_tvmaze_by_name(showname, premiere_date): """ :param showname: show name :type showname: AnyStr :param premiere_date: premiere date :type premiere_date: datetime.date :return: :rtype: Dict """ ids = {} try: url = '%ssearch/shows?%s' % ( sickbeard.TVInfoAPI(TVINFO_TVMAZE).config['base_url'], urlencode({'q': clean_show_name(showname)})) res = get_tvmaze_data(url=url, parse_json=True, raise_status_code=True, timeout=120) if res: for r in res: if 'show' in r and 'premiered' in r[ 'show'] and 'externals' in r['show']: premiered = parse(r['show']['premiered'], fuzzy=True) if abs(premiere_date - premiered.date()) < datetime.timedelta(days=2): ids[TVINFO_TVRAGE] = r['show']['externals'].get( 'tvrage', 0) ids[TVINFO_TVDB] = r['show']['externals'].get( 'thetvdb', 0) ids[TVINFO_IMDB] = try_int( str(r['show']['externals'].get('imdb')).replace( 'tt', '')) ids[TVINFO_TVMAZE] = r['show'].get('id', 0) break except (BaseException, Exception): pass return {k: v for k, v in iteritems(ids) if v not in (None, '', 0)}
def _notify(self, title, body, pushalot_auth_token=None, **kwargs): pushalot_auth_token = self._choose( pushalot_auth_token, sickbeard.PUSHALOT_AUTHORIZATIONTOKEN) self._log_debug(u'Title: %s, Message: %s, API: %s' % (title, body, pushalot_auth_token)) http_handler = moves.http_client.HTTPSConnection('pushalot.com') try: http_handler.request( 'POST', '/api/sendmessage', body=urlencode( dict(Title=title.encode('utf-8'), Body=body.encode('utf-8'), AuthorizationToken=pushalot_auth_token)), headers={'Content-type': 'application/x-www-form-urlencoded'}) except (SSLError, moves.http_client.HTTPException, socket.error): result = 'Connection failed' self._log_error(result) else: response = http_handler.getresponse() result = None if 200 != response.status: if 410 == response.status: result = u'Authentication, %s (bad API key?)' % response.reason else: result = 'Http response code "%s"' % response.status self._log_error(result) return self._choose( (True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result))
def update_library(self, ep_obj=None, **kwargs): host = sickbeard.PYTIVO_HOST share_name = sickbeard.PYTIVO_SHARE_NAME tsn = sickbeard.PYTIVO_TIVO_NAME # There are two more values required, the container and file. # # container: The share name, show name and season # # file: The file name # # Some slicing and dicing of variables is required to get at these values. # # There might be better ways to arrive at the values, but this is the best I have been able to # come up with. # # Calculated values show_path = ep_obj.show_obj.location show_name = ep_obj.show_obj.name root_show_and_season = ek.ek(os.path.dirname, ep_obj.location) abs_path = ep_obj.location # Some show names have colons in them which are illegal in a path location, so strip them out. # (Are there other characters?) show_name = show_name.replace(':', '') root = show_path.replace(show_name, '') show_and_season = root_show_and_season.replace(root, '') container = share_name + '/' + show_and_season file_path = '/' + abs_path.replace(root, '') # Finally create the url and make request request_url = 'http://%s/TiVoConnect?%s' % ( host, urlencode( dict(Command='Push', Container=container, File=file_path, tsn=tsn))) self._log_debug(u'Requesting ' + request_url) request = urllib.request.Request(request_url) try: http_response_obj = urllib.request.urlopen( request) # PY2 http_response_obj has no `with` context manager http_response_obj.close() except urllib.error.HTTPError as e: if hasattr(e, 'reason'): self._log_error(u'Error, failed to reach a server - ' + e.reason) return False elif hasattr(e, 'code'): self._log_error( u'Error, the server couldn\'t fulfill the request - ' + e.code) return False except (BaseException, Exception) as e: self._log_error(u'Unknown exception: ' + ex(e)) return False self._log(u'Successfully requested transfer of file') return True
def _send(self, host=None): """ Sends a NMJ update command to the specified machine host: The hostname/IP to send the request to (no port) database: The database to send the requst to mount: The mount URL to use (optional) Returns: True if the request succeeded, False otherwise """ host = self._choose(host, sickbeard.NMJv2_HOST) self._log_debug(u'Sending scan command for NMJ ') # if a host is provided then attempt to open a handle to that URL try: base_url = 'http://%s:8008/' % host url_scandir = '%s%s%s' % (base_url, 'metadata_database?', urlencode( dict(arg0='update_scandir', arg1=sickbeard.NMJv2_DATABASE, arg2='', arg3='update_all'))) self._log_debug(u'Scan update command sent to host: %s' % host) url_updatedb = '%s%s%s' % (base_url, 'metadata_database?', urlencode( dict(arg0='scanner_start', arg1=sickbeard.NMJv2_DATABASE, arg2='background', arg3=''))) self._log_debug(u'Try to mount network drive via url: %s' % host) prereq = urllib.request.Request(url_scandir) req = urllib.request.Request(url_updatedb) http_response_obj1 = urllib.request.urlopen(prereq) # PY2 http_response_obj has no `with` context manager response1 = http_response_obj1.read() http_response_obj1.close() time.sleep(300.0 / 1000.0) http_response_obj2 = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager response2 = http_response_obj2.read() http_response_obj2.close() except IOError as e: self._log_warning(u'Couldn\'t contact popcorn hour on host %s: %s' % (host, ex(e))) return False try: et = XmlEtree.fromstring(response1) result1 = et.findtext('returnValue') except SyntaxError as e: self._log_error(u'Unable to parse XML returned from the Popcorn Hour: update_scandir, %s' % ex(e)) return False try: et = XmlEtree.fromstring(response2) result2 = et.findtext('returnValue') except SyntaxError as e: self._log_error(u'Unable to parse XML returned from the Popcorn Hour: scanner_start, %s' % ex(e)) return False # if the result was a number then consider that an error error_codes = ['8', '11', '22', '49', '50', '51', '60'] error_messages = ['Invalid parameter(s)/argument(s)', 'Invalid database path', 'Insufficient size', 'Database write error', 'Database read error', 'Open fifo pipe failed', 'Read only file system'] if 0 < int(result1): index = error_codes.index(result1) self._log_error(u'Popcorn Hour returned an error: %s' % (error_messages[index])) return False elif 0 < int(result2): index = error_codes.index(result2) self._log_error(u'Popcorn Hour returned an error: %s' % (error_messages[index])) return False self._log(u'NMJv2 started background scan') return True
def _search_provider(self, search_params, **kwargs): self._check_auth() results = [] api_data = {'username': self.username, 'passkey': self.passkey, 'category': self.categories} items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} for mode in search_params: for search_param in search_params[mode]: post_data = api_data.copy() if isinstance(search_param, dict): post_data.update(search_param) id_search = True else: post_data['search'] = search_param = search_param.replace('.', ' ') id_search = False post_data = json.dumps(post_data) search_url = self.urls['search'] json_resp = self.get_url(search_url, post_data=post_data, parse_json=True) if self.should_skip(): return results try: if not (json_resp and self._check_auth_from_data(json_resp) and 'data' in json_resp): logger.log(u'Response from %s does not contain any json data, abort' % self.name, logger.ERROR) return results except AuthException as e: logger.log(u'Authentication error: %s' % (ex(e)), logger.ERROR) return results cnt = len(items[mode]) for item in json_resp['data']: try: seeders, leechers, size = [try_int(n, n) for n in [item.get(x) for x in ('seeders', 'leechers', 'size')]] if self._reject_item(seeders, leechers, self.freeleech and ( re.search('(?i)no', item.get('freeleech', 'no')))): continue title = item['name'] download_url = self.urls['get'] % urlencode({'id': item['id'], 'passkey': self.passkey}) except (AttributeError, TypeError, ValueError): continue if title and download_url: items[mode].append((title, download_url, item.get('seeders', 0), self._bytesizer(size))) self._log_search(mode, len(items[mode]) - cnt, ('search_param: ' + str(search_param), self.name)['Cache' == mode]) results = self._sort_seeding(mode, results + items[mode]) if id_search and len(results): return results return results
def _search_provider(self, search_params, # type: Dict[AnyStr, List[Dict[AnyStr, List[AnyStr]]]] needed=NeededQualities(need_all=True), # type: NeededQualities max_items=400, # type: int try_all_searches=False, # type: bool **kwargs ): # type: (...) -> Tuple[List, Dict] """ :param search_params: :param needed: :param max_items: :param try_all_searches: :param kwargs: :return: """ results, n_spaces = [], {} if self.should_skip(): return results, n_spaces api_key = self._check_auth() if isinstance(api_key, bool) and not api_key: return results, n_spaces base_params = {'t': 'tvsearch', 'maxage': sickbeard.USENET_RETENTION or 0, 'limit': self.limits, 'attrs': ','.join([k for k, v in iteritems(NewznabConstants.providerToIndexerMapping) if v in self.caps]), 'offset': 0} uc_only = all([re.search('(?i)usenet_crawler', self.get_id())]) base_params_uc = {'num': self.limits, 'dl': '1', 'i': '64660'} if isinstance(api_key, string_types) and api_key not in ('0', ''): base_params['apikey'] = api_key base_params_uc['r'] = api_key results, n_spaces = [], {} total, cnt, search_url, exit_log = 0, len(results), '', True cat_sport = self.cats.get(NewznabConstants.CAT_SPORT, ['5060']) cat_anime = self.cats.get(NewznabConstants.CAT_ANIME, ['5070']) cat_hd = self.cats.get(NewznabConstants.CAT_HD, ['5040']) cat_sd = self.cats.get(NewznabConstants.CAT_SD, ['5030']) cat_uhd = self.cats.get(NewznabConstants.CAT_UHD) cat_webdl = self.cats.get(NewznabConstants.CAT_WEBDL) for mode in search_params: if self.should_skip(log_warning=False): break for i, params in enumerate(search_params[mode]): # type: int, List[Dict[AnyStr, List[AnyStr]]] if self.should_skip(log_warning=False): break # category ids cat = [] if 'Episode' == mode or 'Season' == mode: if not (any([x in params for x in [v for c, v in iteritems(self.caps) if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON]]]) or not self.supports_tvdbid()): logger.log('Show is missing either an id or search term for search') continue if needed.need_anime: cat.extend(cat_anime) if needed.need_sports: cat.extend(cat_sport) if needed.need_hd: cat.extend(cat_hd) if needed.need_sd: cat.extend(cat_sd) if needed.need_uhd and None is not cat_uhd: cat.extend(cat_uhd) if needed.need_webdl and None is not cat_webdl: cat.extend(cat_webdl) if self.cat_ids or len(cat): base_params['cat'] = ','.join(sorted(set((self.cat_ids.split(',') if self.cat_ids else []) + cat))) base_params_uc['t'] = base_params['cat'] request_params = base_params.copy() # if ('Propers' == mode or 'nzbs_org' == self.get_id()) \ if 'Propers' == mode \ and 'q' in params and not (any([x in params for x in ['season', 'ep']])): request_params['t'] = 'search' request_params.update(params) # deprecated; kept here as bookmark for new haspretime:0|1 + nuked:0|1 can be used here instead # if hasattr(self, 'filter'): # if 'nzbs_org' == self.get_id(): # request_params['rls'] = ((0, 1)['so' in self.filter], 2)['snn' in self.filter] # workaround a strange glitch if sum([ord(i) for i in self.get_id()]) in [383] and 5 == 14 - request_params['maxage']: request_params['maxage'] += 1 offset = 0 batch_count = not 0 first_date = last_date = None # hardcoded to stop after a max of 4 hits (400 items) per query while (offset <= total) and (offset < max_items) and batch_count: cnt = len(results) if 'Cache' == mode and uc_only: search_url = '%srss?%s' % (self.url, urlencode(base_params_uc)) else: search_url = '%sapi?%s' % (self.url, urlencode(request_params)) i and time.sleep(2.1) data = self.get_url(search_url) if self.should_skip() or not data: break # hack this in until it's fixed server side if not data.startswith('<?xml'): data = '<?xml version="1.0" encoding="ISO-8859-1" ?>%s' % data try: parsed_xml, n_spaces = self.cache.parse_and_get_ns(data) items = parsed_xml.findall('channel/item') except (BaseException, Exception): logger.log('Error trying to load %s RSS feed' % self.name, logger.WARNING) break if not self._check_auth_from_data(parsed_xml, search_url): break if 'rss' != parsed_xml.tag: logger.log('Resulting XML from %s isn\'t RSS, not parsing it' % self.name, logger.WARNING) break i and time.sleep(2.1) for item in items: title, url = self._title_and_url(item) if title and url: results.append(item) else: logger.log('The data returned from %s is incomplete, this result is unusable' % self.name, logger.DEBUG) # get total and offset attributes try: if 0 == total: total = (helpers.try_int(parsed_xml.find( './/%sresponse' % n_spaces['newznab']).get('total', 0)), 1000)['Cache' == mode] hits = (total // self.limits + int(0 < (total % self.limits))) hits += int(0 == hits) offset = helpers.try_int(parsed_xml.find('.//%sresponse' % n_spaces['newznab']).get('offset', 0)) except (AttributeError, KeyError): if not uc_only: break total = len(items) # No items found, prevent from doing another search if 0 == total: break # Cache mode, prevent from doing another search if 'Cache' == mode: if items and len(items): if not first_date: first_date = self._parse_pub_date(items[0]) last_date = self._parse_pub_date(items[-1]) if not first_date or not last_date or not self._last_recent_search or \ last_date <= self.last_recent_search or uc_only: break if offset != request_params['offset']: logger.log('Ask your newznab provider to fix their newznab responses') break request_params['offset'] += request_params['limit'] if total <= request_params['offset']: break # there are more items available than the amount given in one call, grab some more items = total - request_params['offset'] logger.log('%s more item%s to fetch from a batch of up to %s items.' % (items, helpers.maybe_plural(items), request_params['limit']), logger.DEBUG) batch_count = self._log_result(results, mode, cnt, search_url) exit_log = False if 'Cache' == mode and first_date: self.last_recent_search = first_date if exit_log: self._log_search(mode, total, search_url) if not try_all_searches and any([x in request_params for x in [ v for c, v in iteritems(self.caps) if c not in [NewznabConstants.SEARCH_EPISODE, NewznabConstants.SEARCH_SEASON, NewznabConstants.SEARCH_TEXT]]]) and len(results): break return results, n_spaces
def _search_provider(self, search_params, **kwargs): results = [] if self.show_obj and not self.show_obj.is_anime: return results items = {'Season': [], 'Episode': [], 'Propers': []} rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({ 'nodots': r'[\.\s]+', 'stats': r'S:\s*?(\d)+\s*L:\s*(\d+)', 'size': r'size:\s*(\d+[.,]\d+\w+)' })]) for mode in search_params: for search_string in search_params[mode]: params = urlencode({ 'terms': rc['nodots'].sub(' ', search_string).encode('utf-8'), 'type': 1 }) search_url = '%ssearch.php?%s' % (self.url, params) html = self.get_url(search_url) if self.should_skip(): return self._sort_seeding(mode, results) cnt = len(items[mode]) try: if not html or self._has_no_results(html): raise generic.HaltParseException with BS4Parser( html, parse_only=dict(table={ 'class': (lambda at: at and 'listing' in at) })) as tbl: tbl_rows = [] if not tbl else tbl.find_all('tr') if tbl_rows: a = (0, 1)[None is not tbl_rows[0].find( 'td', class_='centertext')] for top, bottom in zip(tbl_rows[a::2], tbl_rows[a + 1::2]): try: bottom_text = bottom.get_text() or '' stats = rc['stats'].findall(bottom_text) seeders, leechers = (0, 0) if not stats else [ try_int(n) for n in stats[0] ] size = rc['size'].findall(bottom_text) size = size and size[0] or -1 info = top.find('td', class_='desc-top') title = info and re.sub( r'[ .]{2,}', '.', info.get_text().strip()) links = info and map_list( lambda l: l.get('href', ''), info.find_all('a')) or None download_url = self._link( (filter_list(lambda l: 'magnet:' in l, links) or filter_list( lambda l: not re.search( r'(magnet:|\.se).+', l), links))[0]) except (AttributeError, TypeError, ValueError, IndexError): continue if title and download_url: items[mode].append( (title, download_url, seeders, self._bytesizer(size))) except (BaseException, Exception): time.sleep(1.1) self._log_search(mode, len(items[mode]) - cnt, search_url) results = self._sort_seeding(mode, results + items[mode]) return results
def _send(self, host=None, database=None, mount=None): """ Sends a NMJ update command to the specified machine host: The hostname/IP to send the request to (no port) database: The database to send the requst to mount: The mount URL to use (optional) Returns: True if the request succeeded, False otherwise """ host = self._choose(host, sickbeard.NMJ_HOST) database = self._choose(database, sickbeard.NMJ_DATABASE) mount = self._choose(mount, sickbeard.NMJ_MOUNT) self._log_debug(u'Sending scan command for NMJ ') # if a mount URL is provided then attempt to open a handle to that URL if mount: try: req = urllib.request.Request(mount) self._log_debug(u'Try to mount network drive via url: %s' % mount) http_response_obj = urllib.request.urlopen( req) # PY2 http_response_obj has no `with` context manager http_response_obj.close() except IOError as e: if hasattr(e, 'reason'): self._log_warning( u'Could not contact Popcorn Hour on host %s: %s' % (host, e.reason)) elif hasattr(e, 'code'): self._log_warning( u'Problem with Popcorn Hour on host %s: %s' % (host, e.code)) return False except (BaseException, Exception) as e: self._log_error(u'Unknown exception: ' + ex(e)) return False # build up the request URL and parameters params = dict(arg0='scanner_start', arg1=database, arg2='background', arg3='') params = urlencode(params) update_url = 'http://%(host)s:8008/metadata_database?%(params)s' % { 'host': host, 'params': params } # send the request to the server try: req = urllib.request.Request(update_url) self._log_debug(u'Sending scan update command via url: %s' % update_url) http_response_obj = urllib.request.urlopen(req) response = http_response_obj.read() http_response_obj.close() except IOError as e: if hasattr(e, 'reason'): self._log_warning( u'Could not contact Popcorn Hour on host %s: %s' % (host, e.reason)) elif hasattr(e, 'code'): self._log_warning(u'Problem with Popcorn Hour on host %s: %s' % (host, e.code)) return False except (BaseException, Exception) as e: self._log_error(u'Unknown exception: ' + ex(e)) return False # try to parse the resulting XML try: et = etree.fromstring(response) result = et.findtext('returnValue') except SyntaxError as e: self._log_error( u'Unable to parse XML returned from the Popcorn Hour: %s' % ex(e)) return False # if the result was a number then consider that an error if 0 < int(result): self._log_error(u'Popcorn Hour returned an errorcode: %s' % result) return False self._log(u'NMJ started background scan') return True
def _notify(self, title, body, user_key=None, api_key=None, priority=None, device=None, sound=None, **kwargs): """ Sends a pushover notification to the address provided title: The title of the message msg: The message to send (unicode) user_key: The pushover user id to send the message to (or to subscribe with) returns: True if the message succeeded, False otherwise """ user_key = self._choose(user_key, sickbeard.PUSHOVER_USERKEY) api_key = self._choose(api_key, sickbeard.PUSHOVER_APIKEY) priority = self._choose(priority, sickbeard.PUSHOVER_PRIORITY) device = self._choose(device, sickbeard.PUSHOVER_DEVICE) sound = self._choose(sound, sickbeard.PUSHOVER_SOUND) # build up the URL and parameters params = dict(title=title, message=decode_str(body.strip()), user=user_key, timestamp=int(time.time())) if api_key: params.update(token=api_key) if priority: params.update(priority=priority) if not device: params.update(device=device) if not sound: params.update(sound=sound) # send the request to pushover result = None try: req = urllib.request.Request(API_URL) # PY2 http_response_obj has no `with` context manager http_response_obj = urllib.request.urlopen( req, decode_bytes(urlencode(params))) http_response_obj.close() except urllib.error.HTTPError as e: # HTTP status 404 if the provided email address isn't a Pushover user. if 404 == e.code: result = 'Username is wrong/not a Pushover email. Pushover will send an email to it' self._log_warning(result) # For HTTP status code 401's, it is because you are passing in either an invalid token, # or the user has not added your service. elif 401 == e.code: # HTTP status 401 if the user doesn't have the service added subscribe_note = self._notify(title, body, user_key) if subscribe_note: self._log_debug('Subscription sent') # return True else: result = 'Subscription could not be sent' self._log_error(result) else: # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters if 400 == e.code: result = 'Wrong data sent to Pushover' # If you receive a HTTP status code of 429, # it is because the message limit has been reached (free limit is 7,500) elif 429 == e.code: result = 'API message limit reached - try a different API key' # If you receive a HTTP status code of 500, service is unavailable elif 500 == e.code: result = 'Unable to connect to API, service unavailable' self._log_error(result) return self._choose( (True, 'Failed to send notification: %s' % result)[bool(result)], not bool(result))