def on_task_input(self, task, config): try: r = requests.post('http://www.pogdesign.co.uk/cat/', data={'username': config['username'], 'password': config['password'], 'sub_login': '******'}, allow_redirects=True) if 'U / P Invalid' in r.text: raise plugin.PluginError('Invalid username/password for pogdesign.') page = requests.get('http://www.pogdesign.co.uk/cat/showselect.php', cookies=r.cookies) except requests.RequestException as e: raise plugin.PluginError('Error retrieving source: %s' % e) soup = BeautifulSoup(page.text) entries = [] for row in soup.find_all('label', {'class': 'label_check'}): if row.find(attrs={'checked': 'checked'}): t = row.text if t.endswith('[The]'): t = 'The ' + t[:-6] # Make certain names friendlier if t in self.name_map: t = self.name_map[t] e = Entry() e['title'] = t url = row.find_next('a', {'class': 'selectsummary'}) e['url'] = 'http://www.pogdesign.co.uk' + url['href'] entries.append(e) return entries
def get_auth_token(self, refresh=False): with Session() as session: auth_token = session.query(TVDBTokens).filter(TVDBTokens.name == self.auth_key).first() if not auth_token: auth_token = TVDBTokens() auth_token.name = self.auth_key if refresh or auth_token.has_expired(): data = {'apikey': TVDBRequest.API_KEY} if self.username: data['username'] = self.username if self.account_id: data['userkey'] = self.account_id if self.api_key: data['apikey'] = self.api_key log.debug( 'Authenticating to TheTVDB with %s', self.username if self.username else 'api_key', ) auth_token.token = ( requests.post(TVDBRequest.BASE_URL + 'login', json=data).json().get('token') ) auth_token.refreshed = datetime.now() auth_token = session.merge(auth_token) return auth_token.token
def create_token(api_key, login, password): """ login in and request an new API token. http://www.betaseries.com/wiki/Documentation#cat-members :param string api_key: Api key requested at http://www.betaseries.com/api :param string login: Login name :param string password: Password :return: User token """ r = requests.post(API_URL_PREFIX + 'members/auth', params={ 'login': login, 'password': md5(password).hexdigest() }, headers={ 'Accept': 'application/json', 'X-BetaSeries-Version': '2.1', 'X-BetaSeries-Key': api_key, }) assert r.status_code == 200, "Bad HTTP status code: %s" % r.status_code j = r.json() error_list = j['errors'] for err in error_list: log.error(str(err)) if not error_list: return j['token']
def on_task_input(self, task, config): try: r = requests.post('http://www.pogdesign.co.uk/cat/', data={'username': config['username'], 'password': config['password'], 'sub_login': '******'}, allow_redirects=True) if 'U / P Invalid' in r.text: raise plugin.PluginError('Invalid username/password for pogdesign.') page = requests.get('http://www.pogdesign.co.uk/cat/showselect.php', cookies=r.cookies) except requests.RequestException, e: raise plugin.PluginError('Error retrieving source: %s' % e)
def device_auth(): data = {'client_id': CLIENT_ID} try: r = requests.post(get_api_url('oauth/device/code'), data=data).json() device_code = r['device_code'] user_code = r['user_code'] expires_in = r['expires_in'] interval = r['interval'] console( 'Please visit {0} and authorize Flexget. Your user code is {1}. Your code expires in ' '{2} minutes.'.format(r['verification_url'], user_code, expires_in / 60.0) ) log.debug('Polling for user authorization.') data['code'] = device_code data['client_secret'] = CLIENT_SECRET end_time = time.time() + expires_in console('Waiting...', end='') # stop polling after expires_in seconds while time.time() < end_time: time.sleep(interval) polling_request = requests.post( get_api_url('oauth/device/token'), data=data, raise_status=False ) if polling_request.status_code == 200: # success return polling_request.json() elif polling_request.status_code == 400: # pending -- waiting for user console('...', end='') elif polling_request.status_code == 404: # not found -- invalid device_code raise plugin.PluginError('Invalid device code. Open an issue on Github.') elif polling_request.status_code == 409: # already used -- user already approved raise plugin.PluginError('User code has already been approved.') elif polling_request.status_code == 410: # expired -- restart process break elif polling_request.status_code == 418: # denied -- user denied code raise plugin.PluginError('User code has been denied.') elif polling_request.status_code == 429: # polling too fast log.warning('Polling too quickly. Upping the interval. No action required.') interval += 1 raise plugin.PluginError('User code has expired. Please try again.') except requests.RequestException as e: raise plugin.PluginError('Device authorization with Trakt.tv failed: {0}'.format(e))
def notify(self, title, message, config): """ Send discord notification :param str message: message body :param dict config: discord plugin config """ web_hook = { 'content': message, 'username': config.get('username'), 'avatar_url': config.get('avatar_url'), 'embeds': config.get('embeds'), } try: requests.post(config['web_hook_url'], json=web_hook) except RequestException as e: raise PluginWarning(e.args[0])
def get_url_content(self, url, params={}, method = "GET", json = False): if method is "GET": result = requests.get(url, params=params) elif method is "POST": result = requests.post(url, data=params) if json: if result.content: return result.json() else: return {} else: return BeautifulSoup(result.content, "html5lib")
def get_access_token(account, token=None, refresh=False, re_auth=False): """ Gets authorization info from a pin or refresh token. :param account: Arbitrary account name to attach authorization to. :param unicode token: The pin or refresh token, as supplied by the trakt website. :param bool refresh: If True, refresh the access token using refresh_token from db. :raises RequestException: If there is a network error while authorizing. """ data = { 'client_id': CLIENT_ID, 'client_secret': CLIENT_SECRET, 'redirect_uri': 'urn:ietf:wg:oauth:2.0:oob' } with Session() as session: acc = session.query(TraktUserAuth).filter( TraktUserAuth.account == account).first() if acc and datetime.now( ) < acc.expires and not refresh and not re_auth: return acc.access_token else: if acc and (refresh or datetime.now() >= acc.expires) and not re_auth: data['refresh_token'] = acc.refresh_token data['grant_type'] = 'refresh_token' elif token: data['code'] = token data['grant_type'] = 'authorization_code' else: raise plugin.PluginError( 'Account %s not found in db and no pin specified.' % account) try: r = requests.post(get_api_url('oauth/token'), data=data).json() if acc: acc.access_token = r.get('access_token') acc.refresh_token = r.get('refresh_token') acc.expires = token_expire_date(r.get('expires_in')) acc.created = token_created_date(r.get('created_at')) else: acc = TraktUserAuth(account, r.get('access_token'), r.get('refresh_token'), r.get('created_at'), r.get('expires_in')) session.add(acc) return r.get('access_token') except requests.RequestException as e: raise plugin.PluginError( 'Token exchange with trakt failed: %s' % e.args[0])
def plex_get_globalaccesstoken(self, config): header = {'X-Plex-Client-Identifier': 'flexget'} try: r = requests.post('https://my.plexapp.com/users/sign_in.xml', auth=(config['username'], config['password']), headers=header) except requests.RequestException as error: raise plugin.PluginError('Could not log in to myplex! Error: %s' % error) if 'Invalid email' in r.text: raise plugin.PluginError('Myplex: invalid username and/or password!') dom = parseString(r.text) globalaccesstoken = dom.getElementsByTagName('authentication-token')[0].firstChild.nodeValue if not globalaccesstoken: raise plugin.PluginError('Myplex: could not find a server!') else: log.debug('Myplex: Got global accesstoken: %s' % globalaccesstoken) return globalaccesstoken
def on_task_input(self, task, config): try: r = requests.post('http://www.pogdesign.co.uk/cat/', data={ 'username': config['username'], 'password': config['password'], 'sub_login': '******' }, allow_redirects=True) if 'U / P Invalid' in r.text: raise plugin.PluginError( 'Invalid username/password for pogdesign.') page = requests.get( 'http://www.pogdesign.co.uk/cat/showselect.php', cookies=r.cookies) except requests.RequestException, e: raise plugin.PluginError('Error retrieving source: %s' % e)
def get_auth_token(self, refresh=False): tokens = persist.get('auth_tokens') if not tokens: tokens = {'default': None} auth_token = tokens.get(self.auth_key) if not auth_token or refresh: data = {'apikey': TVDBRequest.API_KEY} if self.username: data['username'] = self.username if self.account_id: data['userkey'] = self.account_id log.debug('Authenticating to TheTVDB with %s' % (self.username if self.username else 'api_key')) tokens[self.auth_key] = requests.post(TVDBRequest.BASE_URL + 'login', json=data).json().get('token') persist['auth_tokens'] = tokens return tokens[self.auth_key]
def get_access_token(account, token=None, refresh=False, re_auth=False): """ Gets authorization info from a pin or refresh token. :param account: Arbitrary account name to attach authorization to. :param unicode token: The pin or refresh token, as supplied by the trakt website. :param bool refresh: If True, refresh the access token using refresh_token from db. :raises RequestException: If there is a network error while authorizing. """ data = { 'client_id': CLIENT_ID, 'client_secret': CLIENT_SECRET, 'redirect_uri': 'urn:ietf:wg:oauth:2.0:oob' } with Session() as session: acc = session.query(TraktUserAuth).filter(TraktUserAuth.account == account).first() if acc and datetime.now() < acc.expires and not refresh and not re_auth: return acc.access_token else: if acc and (refresh or datetime.now() >= acc.expires) and not re_auth: data['refresh_token'] = acc.refresh_token data['grant_type'] = 'refresh_token' elif token: data['code'] = token data['grant_type'] = 'authorization_code' else: raise plugin.PluginError('Account %s not found in db and no pin specified.' % account) try: r = requests.post(get_api_url('oauth/token'), data=data).json() if acc: acc.access_token = r.get('access_token') acc.refresh_token = r.get('refresh_token') acc.expires = token_expire_date(r.get('expires_in')) acc.created = token_created_date(r.get('created_at')) else: acc = TraktUserAuth(account, r.get('access_token'), r.get('refresh_token'), r.get('created_at'), r.get('expires_in')) session.add(acc) return r.get('access_token') except requests.RequestException as e: raise plugin.PluginError('Token exchange with trakt failed: %s' % e.args[0])
def query_api(url, method, post=None): try: url = url.rstrip("/") + "/" + method.strip("/") + "/" if post: response = requests.post(url, data=post, timeout=160.0) else: response = requests.get(url, data=post, timeout=160.0) response.raise_for_status() return response except Timeout as e: log.exception(e) msg = 'Timeout error connecting!: %s %s %s %s' % (method, url, post, e) raise plugin.PluginError(msg, log) except RequestException as e: log.exception(e.response) if e.response.status_code == 500: msg = 'Internal API Error: %s %s %s %s' % (method, url, post, e) raise plugin.PluginError(msg, log) raise
def get_auth_token(self, refresh=False): with Session() as session: auth_token = session.query(TVDBTokens).filter(TVDBTokens.name == self.auth_key).first() if not auth_token: auth_token = TVDBTokens() auth_token.name = self.auth_key if refresh or auth_token.has_expired(): data = {'apikey': TVDBRequest.API_KEY} if self.username: data['username'] = self.username if self.account_id: data['userkey'] = self.account_id log.debug('Authenticating to TheTVDB with %s', self.username if self.username else 'api_key') auth_token.token = requests.post(TVDBRequest.BASE_URL + 'login', json=data).json().get('token') auth_token.refreshed = datetime.now() auth_token = session.merge(auth_token) return auth_token.token
def create_token(api_key, login, password): """ login in and request an new API token. http://www.betaseries.com/wiki/Documentation#cat-members :param string api_key: Api key requested at http://www.betaseries.com/api :param string login: Login name :param string password: Password :return: User token """ r = requests.post( API_URL_PREFIX + "members/auth", params={"login": login, "password": md5(password).hexdigest()}, headers={"Accept": "application/json", "X-BetaSeries-Version": "2.1", "X-BetaSeries-Key": api_key}, ) assert r.status_code == 200, "Bad HTTP status code: %s" % r.status_code j = r.json() error_list = j["errors"] for err in error_list: log.error(str(err)) if not error_list: return j["token"]
def on_task_input(self, task, config): config = self.prepare_config(config) accesstoken = "" urlconfig = {} urlappend = "?" entries = [] if config['unwatched_only'] and config['section'] != 'recentlyViewedShows' and config['section'] != 'all': urlconfig['unwatched'] = '1' plexserver = config['server'] if gethostbyname(config['server']) != config['server']: config['server'] = gethostbyname(config['server']) if config['username'] and config['password'] and config['server'] != '127.0.0.1': header = {'X-Plex-Client-Identifier': 'flexget'} log.debug("Trying to to connect to myplex.") try: r = requests.post('https://my.plexapp.com/users/sign_in.xml', auth=(config['username'], config['password']), headers=header) except requests.RequestException as e: raise plugin.PluginError('Could not login to my.plexapp.com: %s. Username: %s Password: %s' % (e, config['username'], config['password'])) log.debug("Connected to myplex.") if 'Invalid email' in r.text: raise plugin.PluginError("Could not login to my.plexapp.com: invalid username and/or password!") log.debug("Managed to login to myplex.") dom = parseString(r.text) plextoken = dom.getElementsByTagName('authentication-token')[0].firstChild.nodeValue log.debug("Got plextoken: %s" % plextoken) try: r = requests.get("https://my.plexapp.com/pms/servers?X-Plex-Token=%s" % plextoken) except requests.RequestException as e: raise plugin.PluginError("Could not get servers from my.plexapp.com using " "authentication-token: %s. (%s)" % (plextoken, e)) dom = parseString(r.text) for node in dom.getElementsByTagName('Server'): if node.getAttribute('address') == config['server']: accesstoken = node.getAttribute('accessToken') log.debug("Got accesstoken: %s" % accesstoken) urlconfig['X-Plex-Token'] = accesstoken if accesstoken == "": raise plugin.PluginError('Could not retrieve accesstoken for %s.' % config['server']) for key in urlconfig: urlappend += '%s=%s&' % (key, urlconfig[key]) if not isinstance(config['section'], int): try: r = requests.get("http://%s:%d/library/sections/%s" % (config['server'], config['port'], urlappend)) except requests.RequestException as e: raise plugin.PluginError('Error retrieving source: %s' % e) dom = parseString(r.text.encode("utf-8")) for node in dom.getElementsByTagName('Directory'): if node.getAttribute('title') == config['section']: config['section'] = int(node.getAttribute('key')) if not isinstance(config['section'], int): raise plugin.PluginError('Could not find section \'%s\'' % config['section']) log.debug("Fetching http://%s:%d/library/sections/%s/%s%s" % (config['server'], config['port'], config['section'], config['selection'], urlappend)) try: r = requests.get("http://%s:%d/library/sections/%s/%s%s" % (config['server'], config['port'], config['section'], config['selection'], urlappend)) except requests.RequestException as e: raise plugin.PluginError('Error retrieving source: %s' % e) dom = parseString(r.text.encode("utf-8")) plexsectionname = dom.getElementsByTagName('MediaContainer')[0].getAttribute('title1') log.debug("Plex section name %s" % plexsectionname) if dom.getElementsByTagName('MediaContainer')[0].getAttribute('viewGroup') == "show": for node in dom.getElementsByTagName('Directory'): e = Entry() title = node.getAttribute('title') if config['strip_year']: title = re.sub(r'^(.*)\(\d+\)$', r'\1', title) title = re.sub(r'[\(\)]', r'', title) title = re.sub(r'&', r'And', title) title = re.sub(r'[^A-Za-z0-9- ]', r'', title) if config['lowercase_title']: title = title.lower() e['title'] = title e['url'] = "NULL" e['plex_server'] = plexserver e['plex_port'] = config['port'] e['plex_section'] = config['section'] e['plex_section_name'] = plexsectionname entries.append(e) elif dom.getElementsByTagName('MediaContainer')[0].getAttribute('viewGroup') == "episode": for node in dom.getElementsByTagName('Video'): e = Entry() title = node.getAttribute('grandparentTitle') season = int(node.getAttribute('parentIndex')) episodethumb = "http://%s:%d%s%s" % (config['server'], config['port'], node.getAttribute('thumb'), urlappend) seriesart = "http://%s:%d%s%s" % (config['server'], config['port'], node.getAttribute('art'), urlappend) seasoncover = "http://%s:%d%s%s" % (config['server'], config['port'], node.getAttribute('parentThumb'), urlappend) seriescover = "http://%s:%d%s%s" % (config['server'], config['port'], node.getAttribute('grandparentThumb'), urlappend) episodetitle = node.getAttribute('title') episodesummary = node.getAttribute('summary') count = node.getAttribute('viewCount') offset = node.getAttribute('viewOffset') if count: status='seen' elif offset: status = 'inprogress' else: status = 'unwatched' if node.getAttribute('parentIndex') == node.getAttribute('year'): season = node.getAttribute('originallyAvailableAt') filenamemap = "%s_%s%s_%s_%s_%s.%s" episode = "" elif node.getAttribute('index'): episode = int(node.getAttribute('index')) filenamemap = "%s_%02dx%02d_%s_%s_%s.%s" else: log.debug("Could not get episode number for '%s' (Hint, ratingKey: %s)" % (title, node.getAttribute('ratingKey'))) break for media in node.getElementsByTagName('Media'): vcodec = media.getAttribute('videoCodec') acodec = media.getAttribute('audioCodec') if config['fetch'] == "file" or not config['fetch']: container = media.getAttribute('container') else: container = "jpg" resolution = media.getAttribute('videoResolution') + "p" for part in media.getElementsByTagName('Part'): key = part.getAttribute('key') duration = part.getAttribute('duration') if config['original_filename']: filename, fileext = os.path.splitext(basename(part.getAttribute('file'))) if config['fetch'] != 'file': filename += ".jpg" else: filename = "%s.%s" % (filename, fileext) else: title = re.sub(r'[\(\)]', r'', title) title = re.sub(r'&', r'And', title).strip() title = re.sub(r'[^A-Za-z0-9- _]', r'', title) if config['strip_year']: title = re.sub(r'^(.*)\(\d+\)$', r'\1', title) if config['lowercase_title']: title = title.lower() filename = filenamemap % (title.replace(" ", "."), season, episode, resolution, vcodec, acodec, container) e['title'] = filename e['filename'] = filename e['plex_url'] = "http://%s:%d%s%s" % (config['server'], config['port'], key, urlappend) e['url'] = "http://%s:%d%s%s" % (config['server'], config['port'], key, urlappend) e['plex_server'] = plexserver e['plex_server_ip'] = config['server'] e['plex_port'] = config['port'] e['plex_section'] = config['section'] e['plex_section_name'] = plexsectionname e['plex_path'] = key e['plex_duration'] = duration e['plex_thumb'] = episodethumb e['plex_art'] = seriesart e['plex_cover'] = seriescover e['plex_season_cover'] = seasoncover e['plex_title'] = episodetitle e['plex_summary'] = episodesummary e['plex_status'] = status if config['fetch'] == "file" or not config['fetch']: e['url'] = e['plex_url'] elif config['fetch'] == "thumb": e['url'] = e['plex_thumb'] elif config['fetch'] == "art": e['url'] = e['plex_art'] elif config['fetch'] == "cover": e['url'] = e['plex_cover'] elif config['fetch'] == "season_cover": e['url'] = e['plex_season_cover'] log.debug("Setting url to %s since %s was selected." % (e['url'], config['fetch'])) if find(e['url'], '/library/') == -1: log.debug('Seems like the chosen item could not be found in the PMS.') break entries.append(e) elif dom.getElementsByTagName('MediaContainer')[0].getAttribute('viewGroup') == "movie": if config['fetch'] == "thumb": raise plugin.pluginError('There are no thumbnails for movies.') for node in dom.getElementsByTagName('Video'): e = Entry() title = node.getAttribute('title') log.debug("found %s" % title) art = node.getAttribute('art') thumb = node.getAttribute('thumb') duration = node.getAttribute('duration') year = node.getAttribute('year') summary = node.getAttribute('summary') count = node.getAttribute('viewCount') offset = node.getAttribute('viewOffset') if count: status='seen' elif offset: status = 'inprogress' else: status = 'unwatched' for media in node.getElementsByTagName('Media'): vcodec = media.getAttribute('videoCodec') acodec = media.getAttribute('audioCodec') resolution = media.getAttribute('videoResolution') + 'p' for part in media.getElementsByTagName('Part'): key = part.getAttribute('key') if config['fetch'] == "file" or not config['fetch']: container = media.getAttribute('container') else: container = "jpg" if config['original_filename']: filename, fileext = os.path.splitext(basename(part.getAttribute('file'))) if config['fetch'] != 'file': e['title'] = "%s.jpg" % filename else: e['title'] = "%s.%s" % (filename, fileext) else: title = re.sub(r'&', r'And', title).strip() title = re.sub(r'[^A-Za-z0-9- _]', r'', title).replace(" ", ".") if config['strip_year']: filenamemap = "%s_%s_%s_%s.%s" e['title'] = filenamemap % (title, resolution, vcodec, acodec, container) else: filenamemap = "%s_%d_%s_%s_%s.%s" e['title'] = filenamemap % (title, year, resolution, vcodec, acodec, container) if config['lowercase_title']: title = title.lower() e['filename'] = e['title'] e['plex_url'] = "http://%s:%d%s%s" % (config['server'], config['port'], key, urlappend) e['url'] = "http://%s:%d%s%s" % (config['server'], config['port'], key, urlappend) e['plex_server'] = plexserver e['plex_server_ip'] = config['server'] e['plex_port'] = config['port'] e['plex_section'] = config['section'] e['plex_section_name'] = plexsectionname e['plex_path'] = key e['plex_duration'] = duration e['plex_episode_thumb'] = '' e['plex_art'] = art e['plex_cover'] = thumb e['plex_summary'] = summary e['plex_title'] = title e['plex_status'] = status if config['fetch'] == "file" or not config['fetch']: e['url'] = e['plex_url'] elif config['fetch'] == "cover" or config['fetch'] == "season_cover": e['url'] = e['plex_cover'] elif config['fetch'] == "art": e['url'] = e['plex_art'] if find(e['url'], '/library/') == -1: log.debug('Seems like the chosen item could not be found in PMS, missing art?') break entries.append(e) else: raise plugin.PluginError('Selected section is neither TV nor movie section.') return entries
def search(self, task, entry, config=None): """ Search for name from torrentleech. """ rss_key = config['rss_key'] # build the form request: data = { 'username': config['username'], 'password': config['password'], 'remember_me': 'on', 'submit': 'submit' } # POST the login form: login = requests.post('https://torrentleech.org/', data=data) if not isinstance(config, dict): config = {} # sort = SORT.get(config.get('sort_by', 'seeds')) # if config.get('sort_reverse'): # sort += 1 categories = config.get('category', 'all') # Make sure categories is a list if not isinstance(categories, list): categories = [categories] # If there are any text categories, turn them into their id number categories = [ c if isinstance(c, int) else CATEGORIES[c] for c in categories ] filter_url = '/categories/%s' % ','.join(str(c) for c in categories) entries = set() for search_string in entry.get('search_strings', [entry['title']]): query = normalize_unicode(search_string).replace(":", "") # urllib.quote will crash if the unicode string has non ascii characters, so encode in utf-8 beforehand url = ('http://torrentleech.org/torrents/browse/index/query/' + quote(query.encode('utf-8')) + filter_url) log.debug('Using %s as torrentleech search url' % url) page = requests.get(url, cookies=login.cookies).content soup = get_soup(page) for tr in soup.find_all("tr", ["even", "odd"]): # within each even or odd row, find the torrent names link = tr.find("a", attrs={'href': re.compile('/torrent/\d+')}) log.debug('link phase: %s' % link.contents[0]) entry = Entry() # extracts the contents of the <a>titlename/<a> tag entry['title'] = link.contents[0] # find download link torrent_url = tr.find("a", attrs={ 'href': re.compile('/download/\d+/.*') }).get('href') # parse link and split along /download/12345 and /name.torrent download_url = re.search('(/download/\d+)/(.+\.torrent)', torrent_url) # change link to rss and splice in rss_key torrent_url = 'http://torrentleech.org/rss' + download_url.group(1) + '/' \ + rss_key + '/' + download_url.group(2) log.debug('RSS-ified download link: %s' % torrent_url) entry['url'] = torrent_url # us tr object for seeders/leechers seeders, leechers = tr.find_all('td', ["seeders", "leechers"]) entry['torrent_seeds'] = int(seeders.contents[0]) entry['torrent_leeches'] = int(leechers.contents[0]) entry['search_sort'] = torrent_availability( entry['torrent_seeds'], entry['torrent_leeches']) # use tr object for size size = tr.find( "td", text=re.compile('([\.\d]+) ([TGMK]?)B')).contents[0] size = re.search('([\.\d]+) ([TGMK]?)B', size) if size: if size.group(2) == 'T': entry['content_size'] = int( float(size.group(1)) * 1000**4 / 1024**2) elif size.group(2) == 'G': entry['content_size'] = int( float(size.group(1)) * 1000**3 / 1024**2) elif size.group(2) == 'M': entry['content_size'] = int( float(size.group(1)) * 1000**2 / 1024**2) elif size.group(2) == 'K': entry['content_size'] = int( float(size.group(1)) * 1000 / 1024**2) else: entry['content_size'] = int( float(size.group(1)) / 1024**2) entries.add(entry) return sorted(entries, reverse=True, key=lambda x: x.get('search_sort'))
def on_task_input(self, task, config): config = self.prepare_config(config) accesstoken = "" plexserver = config['server'] if gethostbyname(config['server']) != config['server']: config['server'] = gethostbyname(config['server']) if config['username'] and config['password'] and config['server'] != '127.0.0.1': header = {'X-Plex-Client-Identifier': 'flexget'} log.debug("Trying to to connect to myplex.") try: r = requests.post('https://my.plexapp.com/users/sign_in.xml', auth=(config['username'], config['password']), headers=header) except requests.RequestException as e: raise PluginError('Could not login to my.plexapp.com: %s. Username: %s Password: %s' % (e, config['username'], config['password'])) log.debug("Managed to connect to myplex.") if 'Invalid email' in r.text: raise PluginError('Could not login to my.plexapp.com: invalid username and/or password!') log.debug("Managed to login to myplex.") dom = parseString(r.text) plextoken = dom.getElementsByTagName('authentication-token')[0].firstChild.nodeValue log.debug("Got plextoken: %s" % plextoken) try: r = requests.get("https://my.plexapp.com/pms/servers?X-Plex-Token=%s" % plextoken) except requests.RequestException as e: raise PluginError('Could not get servers from my.plexapp.com using authentication-token: %s.' % plextoken) dom = parseString(r.text) for node in dom.getElementsByTagName('Server'): if node.getAttribute('address') == config['server']: accesstoken = node.getAttribute('accessToken') log.debug("Got accesstoken: %s" % plextoken) accesstoken = "?X-Plex-Token=%s" % accesstoken if accesstoken == "": raise PluginError('Could not retrieve accesstoken for %s.' % config['server']) if not isinstance(config['section'], int): try: r = requests.get("http://%s:%d/library/sections/%s" % (config['server'], config['port'], accesstoken)) except requests.RequestException as e: raise PluginError('Error retrieving source: %s' % e) dom = parseString(r.text.encode("utf-8")) for node in dom.getElementsByTagName('Directory'): if node.getAttribute('title') == config['section']: config['section'] = int(node.getAttribute('key')) if not isinstance(config['section'], int): raise PluginError('Could not find section \'%s\'' % config['section']) try: r = requests.get("http://%s:%d/library/sections/%s/%s%s" % (config['server'], config['port'], config['section'], config['selection'], accesstoken)) except requests.RequestException as e: raise PluginError('Error retrieving source: %s' % e) dom = parseString(r.text.encode("utf-8")) entries = [] plexsectionname = dom.getElementsByTagName('MediaContainer')[0].getAttribute('title1') if dom.getElementsByTagName('MediaContainer')[0].getAttribute('viewGroup') == "show": for node in dom.getElementsByTagName('Directory'): title = node.getAttribute('title') if config['strip_year']: title = re.sub(r'^(.*)\(\d+\)$', r'\1', title) title = re.sub(r'[\(\)]', r'', title) title = re.sub(r'\&', r'And', title) title = re.sub(r'[^A-Za-z0-9- ]', r'', title) if config['lowercase_title']: title = title.lower() e = Entry() e['title'] = title e['url'] = "NULL" e['plexserver'] = plexserver e['plexport'] = config['port'] e['plexsection'] = config['section'] e['plexsectionname'] = plexsectionname entries.append(e) elif dom.getElementsByTagName('MediaContainer')[0].getAttribute('viewGroup') == "episode": for node in dom.getElementsByTagName('Video'): title = node.getAttribute('grandparentTitle') season = int(node.getAttribute('parentIndex')) if node.getAttribute('parentIndex') == node.getAttribute('year'): season = node.getAttribute('originallyAvailableAt') filenamemap = "%s_%s%s_%s_%s_%s.%s" episode = "" elif node.getAttribute('index'): episode = int(node.getAttribute('index')) filenamemap = "%s_%02dx%02d_%s_%s_%s.%s" else: log.debug("Could not get episode number for '%s' (Hint, ratingKey: %s)" % (title, node.getAttribute('ratingKey'))) break for media in node.getElementsByTagName('Media'): vcodec = media.getAttribute('videoCodec') acodec = media.getAttribute('audioCodec') container = media.getAttribute('container') resolution = media.getAttribute('videoResolution') + "p" for part in media.getElementsByTagName('Part'): key = part.getAttribute('key') e = Entry() if config['original_filename']: e['title'] = basename(part.getAttribute('file')) else: if config['strip_year']: title = re.sub(r'^(.*)\(\d+\)$', r'\1', title) title = re.sub(r'[\(\)]', r'', title) title = re.sub(r'\&', r'And', title).strip() title = re.sub(r'[^A-Za-z0-9- ]', r'', title).replace(" ", ".") if config['lowercase_title']: title = title.lower() e['title'] = filenamemap % (title, season, episode, resolution, vcodec, acodec, container) e['url'] = "http://%s:%d%s%s" % (config['server'], config['port'], key, accesstoken) e['plex_server'] = plexserver e['plex_server_ip'] = config['server'] e['plex_port'] = config['port'] e['plex_section'] = config['section'] e['plex_section_name'] = plexsectionname e['plex_path'] = key entries.append(e) else: raise PluginError('Selected section is not a TV section.') return entries
def search(self, entry, config=None): """ Search for name from torrentleech. """ rss_key = config["rss_key"] # build the form request: data = {"username": config["username"], "password": config["password"], "remember_me": "on", "submit": "submit"} # POST the login form: login = requests.post("http://torrentleech.org/", data=data) if not isinstance(config, dict): config = {} # sort = SORT.get(config.get('sort_by', 'seeds')) # if config.get('sort_reverse'): # sort += 1 if isinstance(config.get("category"), int): category = config["category"] else: category = CATEGORIES.get(config.get("category", "all")) filter_url = "/categories/%d" % category entries = set() for search_string in entry.get("search_strings", [entry["title"]]): query = normalize_unicode(search_string) # urllib.quote will crash if the unicode string has non ascii characters, so encode in utf-8 beforehand url = ( "http://torrentleech.org/torrents/browse/index/query/" + urllib.quote(query.encode("utf-8")) + filter_url ) log.debug("Using %s as torrentleech search url" % url) page = requests.get(url, cookies=login.cookies).content soup = get_soup(page) for tr in soup.find_all("tr", ["even", "odd"]): # within each even or odd row, find the torrent names link = tr.find("a", attrs={"href": re.compile("/torrent/\d+")}) log.debug("link phase: %s" % link.contents[0]) entry = Entry() # extracts the contents of the <a>titlename/<a> tag entry["title"] = link.contents[0] # find download link torrent_url = tr.find("a", attrs={"href": re.compile("/download/\d+/.*")}).get("href") # parse link and split along /download/12345 and /name.torrent download_url = re.search("(/download/\d+)/(.+\.torrent)", torrent_url) # change link to rss and splice in rss_key torrent_url = ( "http://torrentleech.org/rss" + download_url.group(1) + "/" + rss_key + "/" + download_url.group(2) ) log.debug("RSS-ified download link: %s" % torrent_url) entry["url"] = torrent_url # us tr object for seeders/leechers seeders, leechers = tr.find_all("td", ["seeders", "leechers"]) entry["torrent_seeds"] = int(seeders.contents[0]) entry["torrent_leeches"] = int(leechers.contents[0]) entry["search_sort"] = torrent_availability(entry["torrent_seeds"], entry["torrent_leeches"]) # use tr object for size size = tr.find("td", text=re.compile("([\.\d]+) ([GMK])B")).contents[0] size = re.search("([\.\d]+) ([GMK])B", size) if size: if size.group(2) == "G": entry["content_size"] = int(float(size.group(1)) * 1000 ** 3 / 1024 ** 2) elif size.group(2) == "M": entry["content_size"] = int(float(size.group(1)) * 1000 ** 2 / 1024 ** 2) else: entry["content_size"] = int(float(size.group(1)) * 1000 / 1024 ** 2) entries.add(entry) return sorted(entries, reverse=True, key=lambda x: x.get("search_sort"))
def token_oauth(data): try: return requests.post(get_api_url('oauth/token'), data=data).json() except requests.RequestException as e: raise plugin.PluginError('Token exchange with trakt failed: {0}'.format(e))
def search(self, task, entry, config=None): """ Search for name from torrentleech. """ rss_key = config['rss_key'] # build the form request: data = {'username': config['username'], 'password': config['password'], 'remember_me': 'on', 'submit': 'submit'} # POST the login form: login = requests.post('http://torrentleech.org/', data=data) if not isinstance(config, dict): config = {} # sort = SORT.get(config.get('sort_by', 'seeds')) # if config.get('sort_reverse'): # sort += 1 categories = config.get('category', 'all') # Make sure categories is a list if not isinstance(categories, list): categories = [categories] # If there are any text categories, turn them into their id number categories = [c if isinstance(c, int) else CATEGORIES[c] for c in categories] filter_url = '/categories/%s' % ','.join(str(c) for c in categories) entries = set() for search_string in entry.get('search_strings', [entry['title']]): query = normalize_unicode(search_string).replace(":", "") # urllib.quote will crash if the unicode string has non ascii characters, so encode in utf-8 beforehand url = ('http://torrentleech.org/torrents/browse/index/query/' + urllib.quote(query.encode('utf-8')) + filter_url) log.debug('Using %s as torrentleech search url' % url) page = requests.get(url, cookies=login.cookies).content soup = get_soup(page) for tr in soup.find_all("tr", ["even", "odd"]): # within each even or odd row, find the torrent names link = tr.find("a", attrs={'href': re.compile('/torrent/\d+')}) log.debug('link phase: %s' % link.contents[0]) entry = Entry() # extracts the contents of the <a>titlename/<a> tag entry['title'] = link.contents[0] # find download link torrent_url = tr.find("a", attrs={'href': re.compile('/download/\d+/.*')}).get('href') # parse link and split along /download/12345 and /name.torrent download_url = re.search('(/download/\d+)/(.+\.torrent)', torrent_url) # change link to rss and splice in rss_key torrent_url = 'http://torrentleech.org/rss' + download_url.group(1) + '/' + rss_key + '/' + download_url.group(2) log.debug('RSS-ified download link: %s' % torrent_url) entry['url'] = torrent_url # us tr object for seeders/leechers seeders, leechers = tr.find_all('td', ["seeders", "leechers"]) entry['torrent_seeds'] = int(seeders.contents[0]) entry['torrent_leeches'] = int(leechers.contents[0]) entry['search_sort'] = torrent_availability(entry['torrent_seeds'], entry['torrent_leeches']) # use tr object for size size = tr.find("td", text=re.compile('([\.\d]+) ([TGMK]?)B')).contents[0] size = re.search('([\.\d]+) ([TGMK]?)B', size) if size: if size.group(2) == 'T': entry['content_size'] = int(float(size.group(1)) * 1000 ** 4 / 1024 ** 2) elif size.group(2) == 'G': entry['content_size'] = int(float(size.group(1)) * 1000 ** 3 / 1024 ** 2) elif size.group(2) == 'M': entry['content_size'] = int(float(size.group(1)) * 1000 ** 2 / 1024 ** 2) elif size.group(2) == 'K': entry['content_size'] = int(float(size.group(1)) * 1000 / 1024 ** 2) else: entry['content_size'] = int(float(size.group(1)) / 1024 ** 2) entries.add(entry) return sorted(entries, reverse=True, key=lambda x: x.get('search_sort'))
def token_oauth(data): try: return requests.post(get_api_url('oauth/token'), data=data).json() except requests.RequestException as e: raise plugin.PluginError( 'Token exchange with trakt failed: {0}'.format(e))
def on_task_input(self, task, config): config = self.prepare_config(config) accesstoken = "" urlconfig = {} urlappend = "?" if (config['unwatched_only'] and config['section'] != 'recentlyViewedShows' and config['section'] != 'all'): urlconfig['unwatched'] = '1' plexserver = config['server'] if gethostbyname(config['server']) != config['server']: config['server'] = gethostbyname(config['server']) if config['username'] and config[ 'password'] and config['server'] != '127.0.0.1': header = {'X-Plex-Client-Identifier': 'flexget'} log.debug("Trying to to connect to myplex.") try: r = requests.post('https://my.plexapp.com/users/sign_in.xml', auth=(config['username'], config['password']), headers=header) except requests.RequestException as e: raise plugin.PluginError( 'Could not login to my.plexapp.com: %s. Username: %s Password: %s' % (e, config['username'], config['password'])) log.debug("Connected to myplex.") if 'Invalid email' in r.text: raise plugin.PluginError( 'Could not login to my.plexapp.com: invalid username and/or password!' ) log.debug("Managed to login to myplex.") dom = parseString(r.text) plextoken = dom.getElementsByTagName( 'authentication-token')[0].firstChild.nodeValue log.debug("Got plextoken: %s" % plextoken) try: r = requests.get( "https://my.plexapp.com/pms/servers?X-Plex-Token=%s" % plextoken) except requests.RequestException as e: raise plugin.PluginError( 'Could not get servers from my.plexapp.com using authentication-token: %s.' % plextoken) dom = parseString(r.text) for node in dom.getElementsByTagName('Server'): if node.getAttribute('address') == config['server']: accesstoken = node.getAttribute('accessToken') log.debug("Got accesstoken: %s" % accesstoken) urlconfig['X-Plex-Token'] = accesstoken if accesstoken == "": raise plugin.PluginError( 'Could not retrieve accesstoken for %s.' % config['server']) for key in urlconfig: urlappend += '%s=%s&' % (key, urlconfig[key]) if not isinstance(config['section'], int): try: r = requests.get("http://%s:%d/library/sections/%s" % (config['server'], config['port'], urlappend)) except requests.RequestException as e: raise plugin.PluginError('Error retrieving source: %s' % e) dom = parseString(r.text.encode("utf-8")) for node in dom.getElementsByTagName('Directory'): if node.getAttribute('title') == config['section']: config['section'] = int(node.getAttribute('key')) if not isinstance(config['section'], int): raise plugin.PluginError('Could not find section \'%s\'' % config['section']) log.debug("Fetching http://%s:%d/library/sections/%s/%s%s" % (config['server'], config['port'], config['section'], config['selection'], urlappend)) try: r = requests.get( "http://%s:%d/library/sections/%s/%s%s" % (config['server'], config['port'], config['section'], config['selection'], urlappend)) except requests.RequestException as e: raise plugin.PluginError('Error retrieving source: %s' % e) dom = parseString(r.text.encode("utf-8")) entries = [] plexsectionname = dom.getElementsByTagName( 'MediaContainer')[0].getAttribute('title1') if dom.getElementsByTagName('MediaContainer')[0].getAttribute( 'viewGroup') == "show": for node in dom.getElementsByTagName('Directory'): title = node.getAttribute('title') if config['strip_year']: title = re.sub(r'^(.*)\(\d+\)$', r'\1', title) title = re.sub(r'[\(\)]', r'', title) title = re.sub(r'\&', r'And', title) title = re.sub(r'[^A-Za-z0-9- ]', r'', title) if config['lowercase_title']: title = title.lower() e = Entry() e['title'] = title e['url'] = "NULL" e['plex_server'] = plexserver e['plex_port'] = config['port'] e['plex_section'] = config['section'] e['plex_section_name'] = plexsectionname entries.append(e) elif dom.getElementsByTagName('MediaContainer')[0].getAttribute( 'viewGroup') == "episode": for node in dom.getElementsByTagName('Video'): title = node.getAttribute('grandparentTitle') season = int(node.getAttribute('parentIndex')) episodethumb = "http://%s:%d%s%s" % ( config['server'], config['port'], node.getAttribute('thumb'), urlappend) seriesart = "http://%s:%d%s%s" % ( config['server'], config['port'], node.getAttribute('art'), urlappend) seasoncover = "http://%s:%d%s%s" % ( config['server'], config['port'], node.getAttribute('parentThumb'), urlappend) seriescover = "http://%s:%d%s%s" % ( config['server'], config['port'], node.getAttribute('grandparentThumb'), urlappend) episodetitle = node.getAttribute('title') episodesummary = node.getAttribute('summary') if node.getAttribute('parentIndex') == node.getAttribute( 'year'): season = node.getAttribute('originallyAvailableAt') filenamemap = "%s_%s%s_%s_%s_%s.%s" episode = "" elif node.getAttribute('index'): episode = int(node.getAttribute('index')) filenamemap = "%s_%02dx%02d_%s_%s_%s.%s" else: log.debug( "Could not get episode number for '%s' (Hint, ratingKey: %s)" % (title, node.getAttribute('ratingKey'))) break for media in node.getElementsByTagName('Media'): vcodec = media.getAttribute('videoCodec') acodec = media.getAttribute('audioCodec') if config['fetch'] == "file" or not config['fetch']: container = media.getAttribute('container') else: container = "jpg" resolution = media.getAttribute('videoResolution') + "p" for part in media.getElementsByTagName('Part'): key = part.getAttribute('key') e = Entry() duration = part.getAttribute('duration') if config['original_filename']: filename, fileext = os.path.splitext( basename(part.getAttribute('file'))) if config['fetch'] != 'file': e['title'] = "%s.jpg" % filename else: e['title'] = "%s.%s" % (filename, fileext) else: if config['strip_year']: title = re.sub(r'^(.*)\(\d+\)$', r'\1', title) title = re.sub(r'[\(\)]', r'', title) title = re.sub(r'\&', r'And', title).strip() title = re.sub(r'[^A-Za-z0-9- ]', r'', title).replace(" ", ".") if config['lowercase_title']: title = title.lower() e['title'] = filenamemap % (title, season, episode, resolution, vcodec, acodec, container) e['filename'] = e['title'] e['plex_url'] = "http://%s:%d%s%s" % ( config['server'], config['port'], key, urlappend) e['url'] = "http://%s:%d%s%s" % ( config['server'], config['port'], key, urlappend) e['plex_server'] = plexserver e['plex_server_ip'] = config['server'] e['plex_port'] = config['port'] e['plex_section'] = config['section'] e['plex_section_name'] = plexsectionname e['plex_path'] = key e['plex_duration'] = duration e['plex_episode_thumb'] = episodethumb e['plex_series_art'] = seriesart e['plex_season_cover'] = seasoncover e['plex_episode_title'] = episodetitle e['plex_episode_summary'] = episodesummary if config['fetch'] == "file" or not config['fetch']: e['url'] = e['plex_url'] elif config['fetch'] == "episode_thumb": e['url'] = e['plex_episode_thumb'] elif config['fetch'] == "series_art": e['url'] = e['plex_series_art'] elif config['fetch'] == "season_cover": e['url'] = e['plex_season_cover'] log.debug("Setting url to %s since %s was selected." % (e['url'], config['fetch'])) if find(e['url'], '/library/') == -1: log.debug( 'Seems like the chosen item could not be found in the PMS. Oh, well. NEXT!' ) break entries.append(e) else: raise plugin.PluginError('Selected section is not a TV section.') return entries
def search(self, entry, config=None): """ Search for name from torrentleech. """ rss_key = config['rss_key'] # build the form request: data = {'username': config['username'], 'password': config['password'], 'remember_me': 'on', 'submit': 'submit'} # POST the login form: login = requests.post('http://torrentleech.org/', data=data) if not isinstance(config, dict): config = {} # sort = SORT.get(config.get('sort_by', 'seeds')) # if config.get('sort_reverse'): # sort += 1 if isinstance(config.get('category'), int): category = config['category'] else: category = CATEGORIES.get(config.get('category', 'all')) filter_url = '/categories/%d' % (category) query = normalize_unicode(entry['title']) # urllib.quote will crash if the unicode string has non ascii characters, so encode in utf-8 beforehand url = 'http://torrentleech.org/torrents/browse/index/query/' + urllib.quote(query.encode('utf-8')) + filter_url log.debug('Using %s as torrentleech search url' % url) page = requests.get(url, cookies=login.cookies).content soup = get_soup(page) entries = [] for tr in soup.find_all("tr", ["even", "odd"]): # within each even or odd row, find the torrent names link = tr.find("a", attrs={'href': re.compile('/torrent/\d+')}) log.debug('link phase: %s' % link.contents[0]) entry = Entry() # extracts the contents of the <a>titlename/<a> tag entry['title'] = link.contents[0] # find download link torrent_url = tr.find("a", attrs={'href': re.compile('/download/\d+/.*')}).get('href') # parse link and split along /download/12345 and /name.torrent download_url = re.search('(/download/\d+)/(.+\.torrent)', torrent_url) # change link to rss and splice in rss_key torrent_url = 'http://torrentleech.org/rss' + download_url.group(1) + '/' + rss_key + '/' + download_url.group(2) log.debug('RSS-ified download link: %s' % torrent_url) entry['url'] = torrent_url # us tr object for seeders/leechers seeders, leechers = tr.find_all('td', ["seeders", "leechers"]) entry['torrent_seeds'] = int(seeders.contents[0]) entry['torrent_leeches'] = int(leechers.contents[0]) entry['search_sort'] = torrent_availability(entry['torrent_seeds'], entry['torrent_leeches']) # use tr object for size size = tr.find("td", text=re.compile('([\.\d]+) ([GMK])B')).contents[0] size = re.search('([\.\d]+) ([GMK])B', size) if size: if size.group(2) == 'G': entry['content_size'] = int(float(size.group(1)) * 1000 ** 3 / 1024 ** 2) elif size.group(2) == 'M': entry['content_size'] = int(float(size.group(1)) * 1000 ** 2 / 1024 ** 2) else: entry['content_size'] = int(float(size.group(1)) * 1000 / 1024 ** 2) entries.append(entry) if not entries: dashindex = query.rfind('-') if dashindex != -1: return self.search(query[:dashindex]) else: raise PluginWarning('No close matches for %s' % query, log, log_once=True) entries.sort(reverse=True, key=lambda x: x.get('search_sort')) return entries
def search(self, entry, config=None): """ Search for name from torrentleech. """ rss_key = config['rss_key'] # build the form request: data = { 'username': config['username'], 'password': config['password'], 'remember_me': 'on', 'submit': 'submit' } # POST the login form: login = requests.post('http://torrentleech.org/', data=data) if not isinstance(config, dict): config = {} # sort = SORT.get(config.get('sort_by', 'seeds')) # if config.get('sort_reverse'): # sort += 1 if isinstance(config.get('category'), int): category = config['category'] else: category = CATEGORIES.get(config.get('category', 'all')) filter_url = '/categories/%d' % (category) query = normalize_unicode(entry['title']) # urllib.quote will crash if the unicode string has non ascii characters, so encode in utf-8 beforehand url = 'http://torrentleech.org/torrents/browse/index/query/' + urllib.quote( query.encode('utf-8')) + filter_url log.debug('Using %s as torrentleech search url' % url) page = requests.get(url, cookies=login.cookies).content soup = get_soup(page) entries = [] for tr in soup.find_all("tr", ["even", "odd"]): # within each even or odd row, find the torrent names link = tr.find("a", attrs={'href': re.compile('/torrent/\d+')}) log.debug('link phase: %s' % link.contents[0]) entry = Entry() # extracts the contents of the <a>titlename/<a> tag entry['title'] = link.contents[0] # find download link torrent_url = tr.find("a", attrs={ 'href': re.compile('/download/\d+/.*') }).get('href') # parse link and split along /download/12345 and /name.torrent download_url = re.search('(/download/\d+)/(.+\.torrent)', torrent_url) # change link to rss and splice in rss_key torrent_url = 'http://torrentleech.org/rss' + download_url.group( 1) + '/' + rss_key + '/' + download_url.group(2) log.debug('RSS-ified download link: %s' % torrent_url) entry['url'] = torrent_url # us tr object for seeders/leechers seeders, leechers = tr.find_all('td', ["seeders", "leechers"]) entry['torrent_seeds'] = int(seeders.contents[0]) entry['torrent_leeches'] = int(leechers.contents[0]) entry['search_sort'] = torrent_availability( entry['torrent_seeds'], entry['torrent_leeches']) # use tr object for size size = tr.find("td", text=re.compile('([\.\d]+) ([GMK])B')).contents[0] size = re.search('([\.\d]+) ([GMK])B', size) if size: if size.group(2) == 'G': entry['content_size'] = int( float(size.group(1)) * 1000**3 / 1024**2) elif size.group(2) == 'M': entry['content_size'] = int( float(size.group(1)) * 1000**2 / 1024**2) else: entry['content_size'] = int( float(size.group(1)) * 1000 / 1024**2) entries.append(entry) if not entries: dashindex = query.rfind('-') if dashindex != -1: return self.search(query[:dashindex]) else: raise PluginWarning('No close matches for %s' % query, log, log_once=True) entries.sort(reverse=True, key=lambda x: x.get('search_sort')) return entries