示例#1
0
def mass_upsert_sql(table_name, value_dict, key_dict, sanitise=True):
    # type: (AnyStr, Dict, Dict, bool) -> List[List[AnyStr]]
    """
    use with cl.extend(mass_upsert_sql(tableName, valueDict, keyDict))

    :param table_name: table name
    :param value_dict: dict of values to be set {'table_fieldname': value}
    :param key_dict: dict of restrains for update {'table_fieldname': value}
    :param sanitise: True to remove k, v pairs in keyDict from valueDict as they must not exist in both.
    This option has a performance hit so it's best to remove key_dict keys from value_dict and set this False instead.
    :type sanitise: Boolean
    :return: list of 2 sql command
    """
    cl = []

    gen_params = (lambda my_dict: [x + ' = ?' for x in iterkeys(my_dict)])

    # sanity: remove k, v pairs in keyDict from valueDict
    if sanitise:
        value_dict = dict(filter_iter(lambda k: k[0] not in key_dict, iteritems(value_dict)))

    # noinspection SqlResolve
    cl.append(['UPDATE [%s] SET %s WHERE %s' %
               (table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict))),
               list_values(value_dict) + list_values(key_dict)])

    # noinspection SqlResolve
    cl.append(['INSERT INTO [' + table_name + '] (' +
               ', '.join(["'%s'" % ('%s' % v).replace("'", "''") for v in
                          itertools.chain(iterkeys(value_dict), iterkeys(key_dict))]) + ')' +
               ' SELECT ' +
               ', '.join(["'%s'" % ('%s' % v).replace("'", "''") for v in
                          itertools.chain(itervalues(value_dict), itervalues(key_dict))]) +
               ' WHERE changes() = 0'])
    return cl
示例#2
0
 def search_sources(self):
     # type: () -> Dict[int, AnyStr]
     return dict([(int(x['id']), x['name'])
                  for x in list_values(tvinfo_config)
                  if not x['mapped_only'] and x.get('active')
                  and not x.get('defunct') and True is not x.get('fallback')
                  ])
示例#3
0
 def fallback_sources(self):
     # type: () -> Dict[int, AnyStr]
     """
     :return: return all fallback indexers
     """
     return dict([(int(x['id']), x['name'])
                  for x in list_values(tvinfo_config)
                  if True is x.get('fallback')])
示例#4
0
 def all_sources(self):
     # type: () -> Dict[int, AnyStr]
     """
     :return: return all indexers including mapped only indexers excluding fallback indexers
     """
     return dict([(int(x['id']), x['name'])
                  for x in list_values(tvinfo_config)
                  if True is not x.get('fallback')])
示例#5
0
    def search(self, term=None, key=None):
        """
        Search all episodes in show. Can search all data, or a specific key (for
        example, episodename)

        Always returns an array (can be empty). First index contains the first
        match, and so on.

        Each array index is an Episode() instance, so doing
        search_results[0]['episodename'] will retrieve the episode name of the
        first match.

        Search terms are converted to lower case (unicode) strings.

        # Examples

        These examples assume t is an instance of Tvdb():

        >> t = Tvdb()
        >>

        To search for all episodes of Scrubs with a bit of data
        containing "my first day":

        >> t['Scrubs'].search("my first day")
        [<Episode 01x01 - My First Day>]
        >>

        Search for "My Name Is Earl" episode named "Faked His Own Death":

        >> t['My Name Is Earl'].search('Faked His Own Death', key = 'episodename')
        [<Episode 01x04 - Faked His Own Death>]
        >>

        To search Scrubs for all episodes with "mentor" in the episode name:

        >> t['scrubs'].search('mentor', key = 'episodename')
        [<Episode 01x02 - My Mentor>, <Episode 03x15 - My Tormented Mentor>]
        >>

        # Using search results

        >> results = t['Scrubs'].search("my first")
        >> print results[0]['episodename']
        My First Day
        >> for x in results: print x['episodename']
        My First Day
        My First Step
        My First Kill
        >>
        """
        results = []
        for cur_season in list_values(self):
            searchresult = cur_season.search(term=term, key=key)
            if 0 != len(searchresult):
                results.extend(searchresult)

        return results
示例#6
0
 def enabled(self):
     """
     Generator to yield iterable IDs for enabled notifiers
     :return: ID String
     :rtype: String
     """
     for n in filter_iter(lambda v: v.is_enabled(),
                          list_values(self.notifiers)):
         yield n.id()
示例#7
0
    def upsert(self, table_name, value_dict, key_dict):
        # type: (AnyStr, Dict, Dict) -> None

        changes_before = self.connection.total_changes

        gen_params = (lambda my_dict: [x + ' = ?' for x in iterkeys(my_dict)])

        # noinspection SqlResolve
        query = 'UPDATE [%s] SET %s WHERE %s' % (
            table_name, ', '.join(gen_params(value_dict)), ' AND '.join(gen_params(key_dict)))

        self.action(query, list_values(value_dict) + list_values(key_dict))

        if self.connection.total_changes == changes_before:
            # noinspection SqlResolve
            query = 'INSERT INTO [' + table_name + ']' \
                    + ' (%s)' % ', '.join(itertools.chain(iterkeys(value_dict), iterkeys(key_dict))) \
                    + ' VALUES (%s)' % ', '.join(['?'] * (len(value_dict) + len(key_dict)))
            self.action(query, list_values(value_dict) + list_values(key_dict))
示例#8
0
    def search(self, term=None, key=None):
        """Search all episodes in season, returns a list of matching Episode
        instances.

        >> t = Tvdb()
        >> t['scrubs'][1].search('first day')
        [<Episode 01x01 - My First Day>]
        >>

        See Show.search documentation for further information on search
        """
        results = []
        for ep in list_values(self):
            searchresult = ep.search(term=term, key=key)
            if None is not searchresult:
                results.append(searchresult)
        return results
示例#9
0
    def search(self, series):
        # type: (AnyStr) -> List
        """This searches TheTVDB.com for the series name
        and returns the result list
        """
        if PY2:
            series = series.encode('utf-8')
        self.config['params_search_series']['name'] = series
        log.debug('Searching for show %s' % series)

        try:
            series_found = self._getetsrc(self.config['url_search_series'], params=self.config['params_search_series'],
                                          language=self.config['language'])
            if series_found:
                return list_values(series_found)[0]
        except (BaseException, Exception):
            pass

        return []
示例#10
0
 def xem_supported_sources(self):
     # type: () -> Dict[int, AnyStr]
     return dict([(int(x['id']), x['name'])
                  for x in list_values(tvinfo_config)
                  if x.get('xem_origin')])
示例#11
0
    def _search_provider(self, search_params, **kwargs):

        results = []
        if not self._authorised():
            return results

        items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}

        rc = dict([(k, re.compile('(?i)' + v))
                   for (k, v) in iteritems({'get': 'magnet'})])
        urls = []
        for mode in search_params:
            for search_string in search_params[mode]:
                if 'Cache' == mode:
                    search_url = self.urls['browse']
                else:
                    search_string = unidecode(search_string)
                    show_name = filter_list(
                        lambda x: x.lower() == re.sub(r'\s.*', '',
                                                      search_string.lower()),
                        list_values(self.shows))
                    if not show_name:
                        continue
                    search_url = self.urls['search'] % list_keys(
                        self.shows)[list_values(self.shows).index(
                            show_name[0])]

                if search_url in urls:
                    continue
                urls += [search_url]

                html = self.get_url(search_url)
                if self.should_skip():
                    return results

                cnt = len(items[mode])
                try:
                    if not html or self._has_no_results(html):
                        raise generic.HaltParseException

                    with BS4Parser(html) as soup:
                        tbl_rows = soup.select('ul.user-timeline > li')

                        if not len(tbl_rows):
                            raise generic.HaltParseException

                        for tr in tbl_rows:
                            try:
                                anchor = tr.find('a', href=rc['get'])
                                title = self.regulate_title(anchor)
                                download_url = self._link(anchor['href'])
                            except (AttributeError, TypeError, ValueError):
                                continue

                            if title and download_url:
                                items[mode].append(
                                    (title, download_url, None, None))

                except generic.HaltParseException:
                    pass
                except (BaseException, Exception):
                    logger.log(
                        u'Failed to parse. Traceback: %s' %
                        traceback.format_exc(), logger.ERROR)
                self._log_search(mode, len(items[mode]) - cnt, search_url)

            results = self._sort_seeding(mode, results + items[mode])

        return results
示例#12
0
def _get_proper_list(aired_since_shows,  # type: datetime.datetime
                     recent_shows,  # type: List[Tuple[int, int]]
                     recent_anime,  # type:  List[Tuple[int, int]]
                     proper_dict=None  # type:  Dict[AnyStr, List[Proper]]
                     ):
    # type: (...) -> List[Proper]
    """

    :param aired_since_shows: date since aired
    :param recent_shows: list of recent shows
    :param recent_anime: list of recent anime shows
    :param proper_dict: dict with provider keys containing Proper objects
    :return: list of propers
    """
    propers = {}
    # make sure the episode has been downloaded before
    history_limit = datetime.datetime.now() - datetime.timedelta(days=30)

    my_db = db.DBConnection()
    # for each provider get a list of arbitrary Propers
    orig_thread_name = threading.currentThread().name
    # filter provider list for:
    # 1. from recent search: recent search enabled providers
    # 2. native proper search: active search enabled providers
    provider_list = filter_list(
        lambda p: p.is_active() and (p.enable_recentsearch, p.enable_backlog)[None is proper_dict],
        sickbeard.providers.sortedProviderList())
    search_threads = []

    if None is proper_dict:
        # if not a recent proper search create a thread per provider to search for Propers
        proper_dict = {}
        for cur_provider in provider_list:
            if not recent_anime and cur_provider.anime_only:
                continue

            provider_id = cur_provider.get_id()

            logger.log('Searching for new Proper releases at [%s]' % cur_provider.name)
            proper_dict[provider_id] = []

            search_threads.append(threading.Thread(target=_search_provider,
                                                   kwargs={'cur_provider': cur_provider,
                                                           'provider_propers': proper_dict[provider_id],
                                                           'aired_since_shows': aired_since_shows,
                                                           'recent_shows': recent_shows,
                                                           'recent_anime': recent_anime},
                                                   name='%s :: [%s]' % (orig_thread_name, cur_provider.name)))

            search_threads[-1].start()

        # wait for all searches to finish
        for cur_thread in search_threads:
            cur_thread.join()

    for cur_provider in provider_list:
        if not recent_anime and cur_provider.anime_only:
            continue

        found_propers = proper_dict.get(cur_provider.get_id(), [])
        if not found_propers:
            continue

        # if they haven't been added by a different provider than add the Proper to the list
        for cur_proper in found_propers:
            name = _generic_name(cur_proper.name)
            if name in propers:
                continue

            try:
                np = NameParser(False, show_obj=cur_proper.parsed_show_obj, indexer_lookup=False)
                parse_result = np.parse(cur_proper.name)
            except (InvalidNameException, InvalidShowException, Exception):
                continue

            # get the show object
            cur_proper.parsed_show_obj = (cur_proper.parsed_show_obj
                                          or helpers.find_show_by_id(parse_result.show_obj.tvid_prodid))
            if None is cur_proper.parsed_show_obj:
                logger.log('Skip download; cannot find show with ID [%s] at %s' %
                           (cur_proper.prodid, sickbeard.TVInfoAPI(cur_proper.tvid).name), logger.ERROR)
                continue

            cur_proper.tvid = cur_proper.parsed_show_obj.tvid
            cur_proper.prodid = cur_proper.parsed_show_obj.prodid

            if not (-1 != cur_proper.prodid and parse_result.series_name and parse_result.episode_numbers
                    and (cur_proper.tvid, cur_proper.prodid) in recent_shows + recent_anime):
                continue

            # only get anime Proper if it has release group and version
            if parse_result.is_anime and not parse_result.release_group and -1 == parse_result.version:
                logger.log('Ignored Proper with no release group and version in name [%s]' % cur_proper.name,
                           logger.DEBUG)
                continue

            if not show_name_helpers.pass_wordlist_checks(cur_proper.name, parse=False, indexer_lookup=False,
                                                          show_obj=cur_proper.parsed_show_obj):
                logger.log('Ignored unwanted Proper [%s]' % cur_proper.name, logger.DEBUG)
                continue

            re_x = dict(re_prefix='.*', re_suffix='.*')
            result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show_obj.rls_ignore_words,
                                                    rx=cur_proper.parsed_show_obj.rls_ignore_words_regex, **re_x)
            if None is not result and result:
                logger.log('Ignored Proper containing ignore word [%s]' % cur_proper.name, logger.DEBUG)
                continue

            result = show_name_helpers.contains_any(cur_proper.name, cur_proper.parsed_show_obj.rls_require_words,
                                                    rx=cur_proper.parsed_show_obj.rls_require_words_regex, **re_x)
            if None is not result and not result:
                logger.log('Ignored Proper for not containing any required word [%s]' % cur_proper.name, logger.DEBUG)
                continue

            cur_size = getattr(cur_proper, 'size', None)
            if failed_history.has_failed(cur_proper.name, cur_size, cur_provider.name):
                continue

            cur_proper.season = parse_result.season_number if None is not parse_result.season_number else 1
            cur_proper.episode = parse_result.episode_numbers[0]
            # check if we actually want this Proper (if it's the right quality)
            sql_result = my_db.select(
                'SELECT release_group, status, version, release_name'
                ' FROM tv_episodes'
                ' WHERE indexer = ? AND showid = ?'
                ' AND season = ? AND episode = ?'
                ' LIMIT 1',
                [cur_proper.tvid, cur_proper.prodid,
                 cur_proper.season, cur_proper.episode])
            if not sql_result:
                continue

            # only keep the Proper if we already retrieved the same quality ep (don't get better/worse ones)
            # check if we want this release: same quality as current, current has correct status
            # restrict other release group releases to Proper's
            old_status, old_quality = Quality.splitCompositeStatus(int(sql_result[0]['status']))
            cur_proper.quality = Quality.nameQuality(cur_proper.name, parse_result.is_anime)
            cur_proper.is_repack, cur_proper.properlevel = Quality.get_proper_level(
                parse_result.extra_info_no_name(), parse_result.version, parse_result.is_anime, check_is_repack=True)
            cur_proper.proper_level = cur_proper.properlevel    # local non global value
            if old_status in SNATCHED_ANY:
                old_release_group = ''
                # noinspection SqlResolve
                history_results = my_db.select(
                    'SELECT resource FROM history'
                    ' WHERE indexer = ? AND showid = ?'
                    ' AND season = ? AND episode = ? AND quality = ? AND date >= ?'
                    ' AND (%s) ORDER BY date DESC LIMIT 1' % ' OR '.join(
                        ['action = "%d%02d"' % (old_quality, x) for x in SNATCHED_ANY]),
                    [cur_proper.tvid, cur_proper.prodid,
                     cur_proper.season, cur_proper.episode, cur_proper.quality,
                     history_limit.strftime(history.dateFormat)])
                if len(history_results):
                    try:
                        old_release_group = np.parse(history_results[0]['resource']).release_group
                    except (BaseException, Exception):
                        pass
            else:
                old_release_group = sql_result[0]['release_group']
            try:
                same_release_group = parse_result.release_group.lower() == old_release_group.lower()
            except (BaseException, Exception):
                same_release_group = parse_result.release_group == old_release_group
            if old_status not in SNATCHED_ANY + [DOWNLOADED, ARCHIVED] \
                    or cur_proper.quality != old_quality \
                    or (cur_proper.is_repack and not same_release_group):
                continue

            np = NameParser(False, show_obj=cur_proper.parsed_show_obj, indexer_lookup=False)
            try:
                extra_info = np.parse(sql_result[0]['release_name']).extra_info_no_name()
            except (BaseException, Exception):
                extra_info = None
            # don't take Proper of the same level we already downloaded
            old_proper_level, old_extra_no_name, old_name = \
                get_old_proper_level(cur_proper.parsed_show_obj, cur_proper.tvid, cur_proper.prodid,
                                     cur_proper.season, parse_result.episode_numbers,
                                     old_status, cur_proper.quality, extra_info,
                                     parse_result.version, parse_result.is_anime)
            if cur_proper.proper_level <= old_proper_level:
                continue

            is_web = (old_quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD4KWEB) or
                      (old_quality == Quality.SDTV and
                       isinstance(sql_result[0]['release_name'], string_types) and
                       re.search(r'\Wweb.?(dl|rip|.([hx]\W?26[45]|hevc))\W', sql_result[0]['release_name'], re.I)))

            if is_web:
                old_name = (old_name, sql_result[0]['release_name'])[old_name in ('', None)]
                old_webdl_type = get_webdl_type(old_extra_no_name, old_name)
                new_webdl_type = get_webdl_type(parse_result.extra_info_no_name(), cur_proper.name)
                if old_webdl_type != new_webdl_type:
                    logger.log('Ignored Proper webdl source [%s], does not match existing webdl source [%s] for [%s]'
                               % (old_webdl_type, new_webdl_type, cur_proper.name), logger.DEBUG)
                    continue

            # for webdls, prevent Propers from different groups
            log_same_grp = 'Ignored Proper from release group [%s] does not match existing group [%s] for [%s]' \
                           % (parse_result.release_group, old_release_group, cur_proper.name)
            if sickbeard.PROPERS_WEBDL_ONEGRP and is_web and not same_release_group:
                logger.log(log_same_grp, logger.DEBUG)
                continue

            # check if we actually want this Proper (if it's the right release group and a higher version)
            if parse_result.is_anime:
                old_version = int(sql_result[0]['version'])
                if not (-1 < old_version < parse_result.version):
                    continue
                if not same_release_group:
                    logger.log(log_same_grp, logger.DEBUG)
                    continue
                found_msg = 'Found anime Proper v%s to replace v%s' % (parse_result.version, old_version)
            else:
                found_msg = 'Found Proper [%s]' % cur_proper.name

            # noinspection SqlResolve
            history_results = my_db.select(
                'SELECT resource FROM history'
                ' WHERE indexer = ? AND showid = ?'
                ' AND season = ? AND episode = ? AND quality = ? AND date >= ?'
                ' AND (%s)' % ' OR '.join(['action LIKE "%%%02d"' % x for x in SNATCHED_ANY + [DOWNLOADED, ARCHIVED]]),
                [cur_proper.tvid, cur_proper.prodid,
                 cur_proper.season, cur_proper.episode, cur_proper.quality,
                 history_limit.strftime(history.dateFormat)])

            # skip if the episode has never downloaded, because a previous quality is required to match the Proper
            if not len(history_results):
                logger.log('Ignored Proper cannot find a recent history item for [%s]' % cur_proper.name, logger.DEBUG)
                continue

            # make sure that none of the existing history downloads are the same Proper as the download candidate
            clean_proper_name = _generic_name(helpers.remove_non_release_groups(
                cur_proper.name, cur_proper.parsed_show_obj.is_anime))
            is_same = False
            for hitem in history_results:
                # if the result exists in history already we need to skip it
                if clean_proper_name == _generic_name(helpers.remove_non_release_groups(
                        ek.ek(os.path.basename, hitem['resource']))):
                    is_same = True
                    break
            if is_same:
                logger.log('Ignored Proper already in history [%s]' % cur_proper.name)
                continue

            logger.log(found_msg, logger.DEBUG)

            # finish populating the Proper instance
            # cur_proper.show_obj = cur_proper.parsed_show_obj.prodid
            cur_proper.provider = cur_provider
            cur_proper.extra_info = parse_result.extra_info
            cur_proper.extra_info_no_name = parse_result.extra_info_no_name
            cur_proper.release_group = parse_result.release_group

            cur_proper.is_anime = parse_result.is_anime
            cur_proper.version = parse_result.version

            propers[name] = cur_proper

        cur_provider.log_result('Propers', len(propers), '%s' % cur_provider.name)

    return list_values(propers)
示例#13
0
 def enabled_library(self):
     for n in filter_iter(
             lambda v: v.is_enabled() and v.is_enabled_library(),
             list_values(self.notifiers)):
         yield n.id()
示例#14
0
 def enabled_onsubtitledownload(self):
     for n in filter_iter(
             lambda v: v.is_enabled() and v.is_enabled_onsubtitledownload(),
             list_values(self.notifiers)):
         yield n.id()
示例#15
0
文件: plex.py 项目: valnar1/SickGear
    def update_library(self,
                       ep_obj=None,
                       host=None,
                       username=None,
                       password=None,
                       location=None,
                       **kwargs):
        """Handles updating the Plex Media Server host via HTTP API

        Plex Media Server currently only supports updating the whole video library and not a specific path.

        Returns:
            Returns None for no issue, else a string of host with connection issues

        """
        host = self._choose(host, sickbeard.PLEX_SERVER_HOST)
        if not host:
            msg = u'No Plex Media Server host specified, check your settings'
            self._log_debug(msg)
            return '%sFail: %s' % (('', '<br>')[self._testing], msg)

        username = self._choose(username, sickbeard.PLEX_USERNAME)
        password = self._choose(password, sickbeard.PLEX_PASSWORD)

        # if username and password were provided, fetch the auth token from plex.tv
        token_arg = None
        if username and password:

            self._log_debug(u'Fetching plex.tv credentials for user: '******'https://plex.tv/users/sign_in.xml',
                                         data=b'')
            req.add_header(
                'Authorization',
                'Basic %s' % b64encodestring('%s:%s' % (username, password)))
            req.add_header('X-Plex-Device-Name', 'SickGear')
            req.add_header('X-Plex-Product', 'SickGear Notifier')
            req.add_header('X-Plex-Client-Identifier',
                           '5f48c063eaf379a565ff56c9bb2b401e')
            req.add_header('X-Plex-Version', '1.0')
            token_arg = False

            try:
                http_response_obj = urllib.request.urlopen(
                    req)  # PY2 http_response_obj has no `with` context manager
                auth_tree = XmlEtree.parse(http_response_obj)
                http_response_obj.close()
                token = auth_tree.findall('.//authentication-token')[0].text
                token_arg = '?X-Plex-Token=' + token

            except urllib.error.URLError as e:
                self._log(
                    u'Error fetching credentials from plex.tv for user %s: %s'
                    % (username, ex(e)))

            except (ValueError, IndexError) as e:
                self._log(u'Error parsing plex.tv response: ' + ex(e))

        file_location = location if None is not location else '' if None is ep_obj else ep_obj.location
        host_validate = self._get_host_list(host, all([token_arg]))
        hosts_all = {}
        hosts_match = {}
        hosts_failed = []
        for cur_host in host_validate:
            response = sickbeard.helpers.get_url('%s/library/sections%s' %
                                                 (cur_host, token_arg or ''),
                                                 timeout=10,
                                                 mute_connect_err=True,
                                                 mute_read_timeout=True,
                                                 mute_connect_timeout=True)
            if response:
                response = sickbeard.helpers.parse_xml(response)
            if None is response or not len(response):
                hosts_failed.append(cur_host)
                continue

            sections = response.findall('.//Directory')
            if not sections:
                self._log(u'Plex Media Server not running on: ' + cur_host)
                hosts_failed.append(cur_host)
                continue

            for section in filter_iter(lambda x: 'show' == x.attrib['type'],
                                       sections):
                if str(section.attrib['key']) in hosts_all:
                    continue
                keyed_host = [(str(section.attrib['key']), cur_host)]
                hosts_all.update(keyed_host)
                if not file_location:
                    continue

                for section_location in section.findall('.//Location'):
                    section_path = re.sub(
                        r'[/\\]+', '/',
                        section_location.attrib['path'].lower())
                    section_path = re.sub(r'^(.{,2})[/\\]', '', section_path)
                    location_path = re.sub(r'[/\\]+', '/',
                                           file_location.lower())
                    location_path = re.sub(r'^(.{,2})[/\\]', '', location_path)

                    if section_path in location_path:
                        hosts_match.update(keyed_host)
                        break

        if not self._testing:
            hosts_try = (hosts_all.copy(),
                         hosts_match.copy())[any(hosts_match)]
            host_list = []
            for section_key, cur_host in iteritems(hosts_try):
                refresh_result = None
                if not self._testing:
                    refresh_result = sickbeard.helpers.get_url(
                        '%s/library/sections/%s/refresh%s' %
                        (cur_host, section_key, token_arg or ''))
                if (not self._testing
                        and '' == refresh_result) or self._testing:
                    host_list.append(cur_host)
                else:
                    hosts_failed.append(cur_host)
                    self._log_error(
                        u'Error updating library section for Plex Media Server: %s'
                        % cur_host)

            if len(hosts_failed) == len(host_validate):
                self._log(u'No successful Plex host updated')
                return 'Fail no successful Plex host updated: %s' % ', '.join(
                    [host for host in hosts_failed])
            else:
                hosts = ', '.join(set(host_list))
                if len(hosts_match):
                    self._log(
                        u'Hosts updating where TV section paths match the downloaded show: %s'
                        % hosts)
                else:
                    self._log(u'Updating all hosts with TV sections: %s' %
                              hosts)
                return ''

        hosts = [
            host.replace('http://', '') for host in filter_iter(
                lambda x: x.startswith('http:'), list_values(hosts_all))
        ]
        secured = [
            host.replace('https://', '') for host in filter_iter(
                lambda x: x.startswith('https:'), list_values(hosts_all))
        ]
        failed = ', '.join([
            host.replace('http://', '') for host in filter_iter(
                lambda x: x.startswith('http:'), hosts_failed)
        ])
        failed_secured = ', '.join(
            filter_iter(lambda x: x not in hosts, [
                host.replace('https://', '') for host in filter_iter(
                    lambda x: x.startswith('https:'), hosts_failed)
            ]))

        return '<br>' + '<br>'.join([
            result for result in
            [('',
              'Fail: username/password when fetching credentials from plex.tv'
              )[False is token_arg],
             ('',
              'OK (secure connect): %s' % ', '.join(secured))[any(secured)],
             ('', 'OK%s: %s' %
              ((' (legacy connect)', '')[None is token_arg], ', '.join(hosts))
              )[any(hosts)], ('', 'Fail (secure connect): %s' %
                              failed_secured)[any(failed_secured)],
             ('', 'Fail%s: %s' % ((' (legacy connect)', '')[None is token_arg],
                                  failed))[bool(failed)]] if result
        ])