Ejemplo n.º 1
0
    def run(self):
        generic_queue.QueueItem.run(self)

        try:
            self._change_missing_episodes()

            self.update_providers()

            show_list = sickbeard.showList
            fromDate = datetime.date.fromordinal(1)
            for curShow in show_list:
                if curShow.paused:
                    continue

                self.episodes.extend(wantedEpisodes(curShow, fromDate))

            if not self.episodes:
                logger.log(u'No search of cache for episodes required')
                self.success = True
            else:
                num_shows = len(set([ep.show.name for ep in self.episodes]))
                logger.log(u'Found %d needed episode%s spanning %d show%s' %
                           (len(self.episodes),
                            helpers.maybe_plural(len(self.episodes)),
                            num_shows, helpers.maybe_plural(num_shows)))

                try:
                    logger.log(u'Beginning recent search for episodes')
                    found_results = search.searchForNeededEpisodes(
                        self.episodes)

                    if not len(found_results):
                        logger.log(u'No needed episodes found')
                    else:
                        for result in found_results:
                            # just use the first result for now
                            logger.log(u'Downloading %s from %s' %
                                       (result.name, result.provider.name))
                            self.success = search.snatchEpisode(result)

                            # give the CPU a break
                            time.sleep(
                                common.cpu_presets[sickbeard.CPU_PRESET])

                except Exception:
                    logger.log(traceback.format_exc(), logger.DEBUG)

                if self.success is None:
                    self.success = False

        finally:
            self.finish()
Ejemplo n.º 2
0
    def run(self):
        generic_queue.QueueItem.run(self)

        try:
            self._change_missing_episodes()

            self.update_providers()

            show_list = sickbeard.showList
            fromDate = datetime.date.fromordinal(1)
            for curShow in show_list:
                if curShow.paused:
                    continue

                self.episodes.extend(wantedEpisodes(curShow, fromDate))

            if not self.episodes:
                logger.log(u'No search of cache for episodes required')
                self.success = True
            else:
                num_shows = len(set([ep.show.name for ep in self.episodes]))
                logger.log(u'Found %d needed episode%s spanning %d show%s'
                           % (len(self.episodes), helpers.maybe_plural(len(self.episodes)),
                              num_shows, helpers.maybe_plural(num_shows)))

                try:
                    logger.log(u'Beginning recent search for episodes')
                    found_results = search.searchForNeededEpisodes(self.episodes)

                    if not len(found_results):
                        logger.log(u'No needed episodes found')
                    else:
                        for result in found_results:
                            # just use the first result for now
                            logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
                            self.success = search.snatchEpisode(result)

                            # give the CPU a break
                            time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])

                except Exception:
                    logger.log(traceback.format_exc(), logger.DEBUG)

                if self.success is None:
                    self.success = False

        finally:
            self.finish()
Ejemplo n.º 3
0
def _xem_get_ids(indexer_name, xem_origin):
    xem_ids = []

    url = 'http://thexem.de/map/havemap?origin=%s' % xem_origin

    task = 'Fetching show ids with%s xem scene mapping%s for origin'
    logger.log(u'%s %s' % (task % ('', 's'), indexer_name))
    parsed_json = helpers.getURL(url, json=True, timeout=90)
    if not parsed_json:
        logger.log(u'Failed %s %s, Unable to get URL: %s'
                   % (task.lower() % ('', 's'), indexer_name, url), logger.ERROR)
    else:
        if 'result' in parsed_json and 'success' == parsed_json['result'] and 'data' in parsed_json:
            try:
                for indexerid in parsed_json['data']:
                    xem_id = helpers.tryInt(indexerid)
                    if xem_id and xem_id not in xem_ids:
                        xem_ids.append(xem_id)
            except:
                pass
            if 0 == len(xem_ids):
                logger.log(u'Failed %s %s, no data items parsed from URL: %s'
                           % (task.lower() % ('', 's'), indexer_name, url), logger.WARNING)

    logger.log(u'Finished %s %s' % (task.lower() % (' %s' % len(xem_ids), helpers.maybe_plural(len(xem_ids))),
                                    indexer_name))
    return xem_ids
Ejemplo n.º 4
0
def _xem_get_ids(indexer_name, xem_origin):
    xem_ids = []

    url = 'http://thexem.de/map/havemap?origin=%s' % xem_origin

    task = 'Fetching show ids with%s xem scene mapping%s for origin'
    logger.log(u'%s %s' % (task % ('', 's'), indexer_name))
    parsed_json = helpers.getURL(url, json=True, timeout=90)
    if not parsed_json:
        logger.log(
            u'Failed %s %s, Unable to get URL: %s' %
            (task.lower() % ('', 's'), indexer_name, url), logger.ERROR)
    else:
        if 'result' in parsed_json and 'success' == parsed_json[
                'result'] and 'data' in parsed_json:
            try:
                for indexerid in parsed_json['data']:
                    xem_id = helpers.tryInt(indexerid)
                    if xem_id and xem_id not in xem_ids:
                        xem_ids.append(xem_id)
            except:
                pass
            if 0 == len(xem_ids):
                logger.log(
                    u'Failed %s %s, no data items parsed from URL: %s' %
                    (task.lower() % ('', 's'), indexer_name, url),
                    logger.WARNING)

    logger.log(u'Finished %s %s' %
               (task.lower() %
                (' %s' % len(xem_ids), helpers.maybe_plural(len(xem_ids))),
                indexer_name))
    return xem_ids
Ejemplo n.º 5
0
 def _log_result(mode='cache', count=0, url='url missing'):
     """
     Simple function to log the result of a search
     :param count: count of successfully processed items
     :param url: source url of item(s)
     """
     mode = mode.lower()
     logger.log(u'%s in response from %s' % (('No %s items' % mode,
                                              '%s %s item%s' % (count, mode, maybe_plural(count)))[0 < count], url))
Ejemplo n.º 6
0
def wanted_episodes(show, from_date, make_dict=False, unaired=False):

    ep_count, ep_count_scene, sql_results_org = get_aired_in_season(show, return_sql=True)

    from_date_ord = from_date.toordinal()
    if unaired:
        sql_results = [s for s in sql_results_org if s['airdate'] > from_date_ord or s['airdate'] == 1]
    else:
        sql_results = [s for s in sql_results_org if s['airdate'] > from_date_ord]

    if make_dict:
        wanted = {}
    else:
        wanted = []

    total_wanted = total_replacing = total_unaired = 0

    if 0 < len(sql_results) and 2 < len(sql_results) - len(show.episodes):
        myDB = db.DBConnection()
        show_ep_sql = myDB.select('SELECT * FROM tv_episodes WHERE showid = ? AND indexer = ?',
                                  [show.indexerid, show.indexer])
    else:
        show_ep_sql = None

    for result in sql_results:
        ep_obj = show.getEpisode(int(result['season']), int(result['episode']), ep_sql=show_ep_sql)
        cur_status, cur_quality = common.Quality.splitCompositeStatus(ep_obj.status)
        ep_obj.wantedQuality = get_wanted_qualities(ep_obj, cur_status, cur_quality, unaired=unaired)
        if not ep_obj.wantedQuality:
            continue

        ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0)
        ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
            helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else ep_obj.eps_aired_in_season
        if make_dict:
            wanted.setdefault(ep_obj.scene_season if ep_obj.show.is_scene else ep_obj.season, []).append(ep_obj)
        else:
            wanted.append(ep_obj)

        if cur_status in (common.WANTED, common.FAILED):
            total_wanted += 1
        elif cur_status in (common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN):
            total_unaired += 1
        else:
            total_replacing += 1

    if 0 < total_wanted + total_replacing + total_unaired:
        actions = []
        for msg, total in ['%d episode%s', total_wanted], \
                          ['to upgrade %d episode%s', total_replacing], \
                          ['%d unaired episode%s', total_unaired]:
            if 0 < total:
                actions.append(msg % (total, helpers.maybe_plural(total)))
        logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))

    return wanted
Ejemplo n.º 7
0
def wanted_episodes(show, from_date, make_dict=False, unaired=False):

    ep_count, ep_count_scene, sql_results_org = get_aired_in_season(show, return_sql=True)

    from_date_ord = from_date.toordinal()
    if unaired:
        sql_results = [s for s in sql_results_org if s['airdate'] > from_date_ord or s['airdate'] == 1]
    else:
        sql_results = [s for s in sql_results_org if s['airdate'] > from_date_ord]

    if make_dict:
        wanted = {}
    else:
        wanted = []

    total_wanted = total_replacing = total_unaired = 0

    if 0 < len(sql_results) and 2 < len(sql_results) - len(show.episodes):
        myDB = db.DBConnection()
        show_ep_sql = myDB.select('SELECT * FROM tv_episodes WHERE showid = ? AND indexer = ?',
                                  [show.indexerid, show.indexer])
    else:
        show_ep_sql = None

    for result in sql_results:
        ep_obj = show.getEpisode(int(result['season']), int(result['episode']), ep_sql=show_ep_sql)
        cur_status, cur_quality = common.Quality.splitCompositeStatus(ep_obj.status)
        ep_obj.wantedQuality = get_wanted_qualities(ep_obj, cur_status, cur_quality, unaired=unaired)
        if not ep_obj.wantedQuality:
            continue

        ep_obj.eps_aired_in_season = ep_count.get(helpers.tryInt(result['season']), 0)
        ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
            helpers.tryInt(result['scene_season']), 0) if result['scene_season'] else ep_obj.eps_aired_in_season
        if make_dict:
            wanted.setdefault(ep_obj.scene_season if ep_obj.show.is_scene else ep_obj.season, []).append(ep_obj)
        else:
            wanted.append(ep_obj)

        if cur_status in (common.WANTED, common.FAILED):
            total_wanted += 1
        elif cur_status in (common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN):
            total_unaired += 1
        else:
            total_replacing += 1

    if 0 < total_wanted + total_replacing + total_unaired:
        actions = []
        for msg, total in ['%d episode%s', total_wanted], \
                          ['to upgrade %d episode%s', total_replacing], \
                          ['%d unaired episode%s', total_unaired]:
            if 0 < total:
                actions.append(msg % (total, helpers.maybe_plural(total)))
        logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))

    return wanted
Ejemplo n.º 8
0
 def _log_result(mode='cache', count=0, url='url missing'):
     """
     Simple function to log the result of a search
     :param count: count of successfully processed items
     :param url: source url of item(s)
     """
     mode = mode.lower()
     logger.log(u'%s in response from %s' %
                (('No %s items' % mode, '%s %s item%s' %
                  (count, mode, maybe_plural(count)))[0 < count], url))
Ejemplo n.º 9
0
 def log_result(self, mode='Cache', count=0, url='url missing'):
     """
     Simple function to log the result of any search
     :param count: count of successfully processed items
     :param url: source url of item(s)
     """
     str1, thing, str3 = (('', '%s item' % mode.lower(), ''),
                          (' usable', 'proper',
                           ' found'))['Propers' == mode]
     logger.log(
         u'%s %s in response from %s' %
         (('No' + str1, count)[0 < count],
          ('%s%s%s%s' %
           (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing,
            maybe_plural(count), str3)), re.sub('(\s)\s+', r'\1', url)))
Ejemplo n.º 10
0
 def log_result(self, mode='Cache', count=0, url='url missing'):
     """
     Simple function to log the result of any search
     :param count: count of successfully processed items
     :param url: source url of item(s)
     """
     str1, thing, str3 = (('', '%s item' % mode.lower(), ''), (' usable', 'proper', ' found'))['Propers' == mode]
     logger.log(u'%s %s in response from %s' % (('No' + str1, count)[0 < count], (
         '%s%s%s%s' % (('', 'freeleech ')[getattr(self, 'freeleech', False)], thing, maybe_plural(count), str3)),
         re.sub('(\s)\s+', r'\1', url)))
Ejemplo n.º 11
0
    def _cache_image_from_indexer(self, show_obj, img_type, num_files=0, max_files=500):
        """
        Retrieves an image of the type specified from indexer and saves it to the cache folder

        returns: bool representing success

        show_obj: TVShow object that we want to cache an image for
        img_type: BANNER, POSTER, or FANART
        """

        # generate the path based on the type & indexer_id
        if img_type == self.POSTER:
            img_type_name = 'poster'
            dest_path = self.poster_path(show_obj.indexerid)
        elif img_type == self.BANNER:
            img_type_name = 'banner'
            dest_path = self.banner_path(show_obj.indexerid)
        elif img_type == self.FANART:
            img_type_name = 'fanart_all'
            dest_path = self.fanart_path(show_obj.indexerid).replace('fanart.jpg', '*')
        elif img_type == self.POSTER_THUMB:
            img_type_name = 'poster_thumb'
            dest_path = self.poster_thumb_path(show_obj.indexerid)
        elif img_type == self.BANNER_THUMB:
            img_type_name = 'banner_thumb'
            dest_path = self.banner_thumb_path(show_obj.indexerid)
        else:
            logger.log(u'Invalid cache image type: ' + str(img_type), logger.ERROR)
            return False

        # retrieve the image from indexer using the generic metadata class
        metadata_generator = GenericMetadata()
        if img_type == self.FANART:
            image_urls = metadata_generator.retrieve_show_image(img_type_name, show_obj)
            if None is image_urls:
                return False

            crcs = []
            for cache_file_name in ek.ek(glob.glob, dest_path):
                with open(cache_file_name, mode='rb') as resource:
                    crc = '%05X' % (zlib.crc32(resource.read()) & 0xFFFFFFFF)
                if crc not in crcs:
                    crcs += [crc]

            success = 0
            count_urls = len(image_urls)
            sources = []
            for image_url in image_urls or []:
                img_data = helpers.getURL(image_url, nocache=True)
                if None is img_data:
                    continue
                crc = '%05X' % (zlib.crc32(img_data) & 0xFFFFFFFF)
                if crc in crcs:
                    count_urls -= 1
                    continue
                crcs += [crc]
                img_source = (((('', 'tvdb')['thetvdb.com' in image_url],
                                'tvrage')['tvrage.com' in image_url],
                               'fatv')['fanart.tv' in image_url],
                              'tmdb')['tmdb' in image_url]
                img_xtra = ''
                if 'tmdb' == img_source:
                    match = re.search(r'(?:.*\?(\d+$))?', image_url, re.I | re.M)
                    if match and None is not match.group(1):
                        img_xtra = match.group(1)
                file_desc = '%s.%03d%s.%s' % (
                    show_obj.indexerid, num_files, ('.%s%s' % (img_source, img_xtra), '')['' == img_source], crc)
                cur_file_path = self.fanart_path(file_desc)
                result = metadata_generator.write_image(img_data, cur_file_path)
                if result and self.FANART != self.which_type(cur_file_path):
                    try:
                        ek.ek(os.remove, cur_file_path)
                    except OSError as e:
                        logger.log(u'Unable to remove %s: %s / %s' % (cur_file_path, repr(e), str(e)), logger.WARNING)
                    continue
                if img_source:
                    sources += [img_source]
                num_files += (0, 1)[result]
                success += (0, 1)[result]
                if num_files > max_files:
                    break
            if count_urls:
                total = len(ek.ek(glob.glob, dest_path))
                logger.log(u'Saved %s of %s fanart images%s. Cached %s of max %s fanart file%s'
                           % (success, count_urls,
                              ('', ' from ' + ', '.join([x for x in list(set(sources))]))[0 < len(sources)],
                              total, sickbeard.FANART_LIMIT, helpers.maybe_plural(total)))
            return bool(count_urls) and not bool(count_urls - success)

        img_data = metadata_generator.retrieve_show_image(img_type_name, show_obj)
        if None is img_data:
            return False
        result = metadata_generator.write_image(img_data, dest_path)
        if result:
            logger.log(u'Saved image type %s' % img_type_name)
        return result
Ejemplo n.º 12
0
def wanted_episodes(show, from_date, make_dict=False):
    initial_qualities, archive_qualities = common.Quality.splitQuality(
        show.quality)
    all_qualities = list(set(initial_qualities + archive_qualities))

    my_db = db.DBConnection()

    if show.air_by_date:
        sql_string = 'SELECT ep.status, ep.season, ep.episode, ep.airdate FROM [tv_episodes] AS ep, [tv_shows] AS show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.showid = ? AND show.air_by_date = 1'
    else:
        sql_string = 'SELECT status, season, episode, airdate FROM [tv_episodes] WHERE showid = ? AND season > 0'

    if sickbeard.SEARCH_UNAIRED:
        status_list = [common.WANTED, common.FAILED, common.UNAIRED]
        sql_string += ' AND ( airdate > ? OR airdate = 1 )'
    else:
        status_list = [common.WANTED, common.FAILED]
        sql_string += ' AND airdate > ?'

    sql_results = my_db.select(
        sql_string, [show.indexerid, from_date.toordinal()])

    # check through the list of statuses to see if we want any
    if make_dict:
        wanted = {}
    else:
        wanted = []
    total_wanted = total_replacing = total_unaired = 0
    downloaded_status_list = (common.DOWNLOADED, common.SNATCHED,
                              common.SNATCHED_PROPER, common.SNATCHED_BEST)
    for result in sql_results:
        not_downloaded = True
        cur_composite_status = int(result['status'])
        cur_status, cur_quality = common.Quality.splitCompositeStatus(
            cur_composite_status)

        if show.archive_firstmatch and cur_status in downloaded_status_list and cur_quality in archive_qualities:
            continue

        # special case: already downloaded quality is not in any of the wanted Qualities
        other_quality_downloaded = False
        if cur_status in downloaded_status_list and cur_quality not in all_qualities:
            other_quality_downloaded = True
            wanted_qualities = all_qualities
        else:
            wanted_qualities = archive_qualities

        if archive_qualities:
            highest_wanted_quality = max(wanted_qualities)
        else:
            if other_quality_downloaded:
                highest_wanted_quality = max(initial_qualities)
            else:
                highest_wanted_quality = 0

        # if we need a better one then say yes
        if (cur_status in downloaded_status_list and cur_quality < highest_wanted_quality) or \
            cur_status in status_list or \
                (sickbeard.SEARCH_UNAIRED and 1 == result['airdate'] and cur_status in (common.SKIPPED, common.IGNORED,
                                                                                        common.UNAIRED, common.UNKNOWN,
                                                                                        common.FAILED)):

            if cur_status in (common.WANTED, common.FAILED):
                total_wanted += 1
            elif cur_status in (common.UNAIRED, common.SKIPPED, common.IGNORED,
                                common.UNKNOWN):
                total_unaired += 1
            else:
                total_replacing += 1
                not_downloaded = False

            ep_obj = show.getEpisode(int(result['season']),
                                     int(result['episode']))
            if make_dict:
                wanted.setdefault(ep_obj.season, []).append(ep_obj)
            else:
                ep_obj.wantedQuality = [
                    i for i in
                    (initial_qualities if not_downloaded else wanted_qualities)
                    if (i > cur_quality and i != common.Quality.UNKNOWN)
                ]
                wanted.append(ep_obj)

    if 0 < total_wanted + total_replacing + total_unaired:
        actions = []
        for msg, total in ['%d episode%s', total_wanted], \
                          ['to upgrade %d episode%s', total_replacing], \
                          ['%d unaired episode%s', total_unaired]:
            if 0 < total:
                actions.append(msg % (total, helpers.maybe_plural(total)))
        logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))

    return wanted
Ejemplo n.º 13
0
    def process_dir(self, dir_name, nzb_name=None, process_method=None, force=False, force_replace=None,
                    failed=False, pp_type='auto', cleanup=False, showObj=None):
        """
        Scans through the files in dir_name and processes whatever media files it finds

        dir_name: The folder name to look in
        nzb_name: The NZB name which resulted in this folder being downloaded
        force: True to postprocess already postprocessed files
        failed: Boolean for whether or not the download failed
        pp_type: Type of postprocessing auto or manual
        """

        # if they passed us a real directory then assume it's the one we want
        if dir_name and ek.ek(os.path.isdir, dir_name):
            dir_name = ek.ek(os.path.realpath, dir_name)

        # if the client and SickGear are not on the same machine translate the directory in a network directory
        elif dir_name and sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR)\
                and ek.ek(os.path.normpath, dir_name) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
            dir_name = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR,
                             ek.ek(os.path.abspath, dir_name).split(os.path.sep)[-1])
            self._log_helper(u'SickGear PP Config, completed TV downloads folder: ' + sickbeard.TV_DOWNLOAD_DIR)

        if dir_name:
            self._log_helper(u'Checking folder... ' + dir_name)

        # if we didn't find a real directory then process "failed" or just quit
        if not dir_name or not ek.ek(os.path.isdir, dir_name):
            if nzb_name and failed:
                self._process_failed(dir_name, nzb_name, showObj=showObj)
            else:
                self._log_helper(u'Unable to figure out what folder to process. ' +
                                 u'If your downloader and SickGear aren\'t on the same PC then make sure ' +
                                 u'you fill out your completed TV download folder in the PP config.')
            return self.result

        parent = self.find_parent(dir_name)
        if parent:
            self._log_helper('Dir %s is subdir of show root dir: %s, not processing.' % (dir_name, parent))
            return self.result

        if dir_name == sickbeard.TV_DOWNLOAD_DIR:
            self.is_basedir = True

        if None is showObj:
            if isinstance(nzb_name, basestring):
                showObj = self.check_name(re.sub(r'\.(nzb|torrent)$', '', nzb_name, flags=re.I))

            if None is showObj and dir_name:
                showObj = self.check_name(ek.ek(os.path.basename, dir_name))

        path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)

        if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
            self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
            return self.result

        if not process_method:
            process_method = sickbeard.PROCESS_METHOD

        self._log_helper(u'Processing folder... %s' % path)

        work_files = []
        joined = self.join(path)
        if joined:
            work_files += [joined]

        rar_files, rarfile_history = self.unused_archives(
            path, filter(helpers.is_first_rar_volume, files), pp_type, process_method)
        rar_content = self._unrar(path, rar_files, force)
        if self.fail_detected:
            self._process_failed(dir_name, nzb_name, showObj=showObj)
            return self.result
        rar_content = [x for x in rar_content if not helpers.is_link(ek.ek(os.path.join, path, x))]
        path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
        files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, path, x))]
        video_files = filter(helpers.has_media_ext, files)
        video_in_rar = filter(helpers.has_media_ext, rar_content)
        work_files += [ek.ek(os.path.join, path, item) for item in rar_content]

        if 0 < len(files):
            self._log_helper(u'Process file%s: %s' % (helpers.maybe_plural(files), str(files)))
        if 0 < len(video_files):
            self._log_helper(u'Process video file%s: %s' % (helpers.maybe_plural(video_files), str(video_files)))
        if 0 < len(rar_content):
            self._log_helper(u'Process rar content: ' + str(rar_content))
        if 0 < len(video_in_rar):
            self._log_helper(u'Process video%s in rar: %s' % (helpers.maybe_plural(video_in_rar), str(video_in_rar)))

        # If nzb_name is set and there's more than one videofile in the folder, files will be lost (overwritten).
        nzb_name_original = nzb_name
        if 2 <= len(video_files):
            nzb_name = None

        if None is showObj and 0 < len(video_files):
            showObj = self.check_video_filenames(path, video_files)

        # self._set_process_success()

        # Don't Link media when the media is extracted from a rar in the same path
        if process_method in ('hardlink', 'symlink') and video_in_rar:
            soh = showObj
            if None is showObj:
                soh = self.check_video_filenames(path, video_in_rar)
            self._process_media(path, video_in_rar, nzb_name, 'move', force, force_replace, showObj=soh)
            self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True)
            video_batch = set(video_files) - set(video_in_rar)
        else:
            video_batch = video_files

        try:
            while 0 < len(video_batch):
                video_pick = ['']
                video_size = 0
                for cur_video_file in video_batch:
                    cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, path, cur_video_file))
                    if 0 == video_size or cur_video_size > video_size:
                        video_size = cur_video_size
                        video_pick = [cur_video_file]

                video_batch = set(video_batch) - set(video_pick)

                self._process_media(path, video_pick, nzb_name, process_method, force, force_replace,
                                    use_trash=cleanup, showObj=showObj)

        except OSError as e:
            logger.log('Batch skipped, %s%s' %
                       (ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING)

        # Process video files in TV subdirectories
        for directory in [x for x in dirs if self._validate_dir(
                path, x, nzb_name_original, failed,
                showObj=self.showObj_helper(showObj, dir_name, x, nzb_name, pp_type))]:

            # self._set_process_success(reset=True)

            for walk_path, walk_dir, files in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False):

                if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
                    self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
                    return self.result

                parent = self.find_parent(walk_path)
                if parent:
                    self._log_helper('Dir %s is subdir of show root dir: %s, not processing files.' %
                                     (walk_path, parent))
                    continue

                # Ignore any symlinks at this stage to avoid the potential for unraring a symlinked archive
                files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, walk_path, x))]

                rar_files, rarfile_history = self.unused_archives(
                    walk_path, filter(helpers.is_first_rar_volume, files), pp_type, process_method, rarfile_history)
                rar_content = self._unrar(walk_path, rar_files, force)
                work_files += [ek.ek(os.path.join, walk_path, item) for item in rar_content]
                if self.fail_detected:
                    self._process_failed(dir_name, nzb_name, showObj=self.showObj_helper(showObj, directory))
                    continue
                rar_content = [x for x in rar_content if not helpers.is_link(ek.ek(os.path.join, walk_path, x))]
                files = list(set(files + rar_content))
                video_files = filter(helpers.has_media_ext, files)
                video_in_rar = filter(helpers.has_media_ext, rar_content)
                notwanted_files = [x for x in files if x not in video_files]

                # Don't Link media when the media is extracted from a rar in the same path
                if process_method in ('hardlink', 'symlink') and video_in_rar:
                    self._process_media(walk_path, video_in_rar, nzb_name, 'move', force, force_replace,
                                        showObj=self.showObj_helper(showObj, dir_name, directory, nzb_name, pp_type,
                                                                    self.check_video_filenames(walk_dir, video_in_rar)))
                    video_batch = set(video_files) - set(video_in_rar)
                else:
                    video_batch = video_files

                try:
                    while 0 < len(video_batch):
                        video_pick = ['']
                        video_size = 0
                        for cur_video_file in video_batch:
                            cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, walk_path, cur_video_file))

                            if 0 == video_size or cur_video_size > video_size:
                                video_size = cur_video_size
                                video_pick = [cur_video_file]

                        video_batch = set(video_batch) - set(video_pick)

                        self._process_media(
                            walk_path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup,
                            showObj=self.showObj_helper(showObj, dir_name, directory, nzb_name, pp_type,
                                                        self.check_video_filenames(walk_dir, video_pick)))

                except OSError as e:
                    logger.log('Batch skipped, %s%s' %
                               (ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING)

                if process_method in ('hardlink', 'symlink') and video_in_rar:
                    self._delete_files(walk_path, rar_content)
                else:
                    # Delete all file not needed
                    if not self.any_vid_processed\
                        or 'move' != process_method\
                            or ('manual' == pp_type and not cleanup):  # Avoid deleting files if Manual Postprocessing
                        continue

                    self._delete_files(walk_path, notwanted_files, use_trash=cleanup)

                    if 'move' == process_method\
                            and ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR) != ek.ek(os.path.normpath, walk_path):
                        self._delete_folder(walk_path, check_empty=False)

        if 'copy' == process_method and work_files:
            self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True)
            for f in sorted(list(set([ek.ek(os.path.dirname, item) for item in work_files]) - {path}),
                            key=len, reverse=True):
                self._delete_folder(f)

        def _bottom_line(text, log_level=logger.DEBUG):
            self._buffer('-' * len(text))
            self._log_helper(text, log_level)

        notifiers.notify_update_library(ep_obj=None, flush_q=True)

        if self.any_vid_processed:
            if not self.files_failed:
                _bottom_line(u'Successfully processed.', logger.MESSAGE)
            else:
                _bottom_line(u'Successfully processed at least one video file%s.' %
                             (', others were skipped', ' and skipped another')[1 == self.files_failed], logger.MESSAGE)
        else:
            _bottom_line(u'Failed! Did not process any files.', logger.WARNING)

        return self.result
Ejemplo n.º 14
0
def search_for_needed_episodes(episodes):
    found_results = {}

    search_done = False

    orig_thread_name = threading.currentThread().name

    providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]

    for cur_provider in providers:
        threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)

        cur_found_results = cur_provider.search_rss(episodes)

        search_done = True

        # pick a single result for each episode, respecting existing results
        for cur_ep in cur_found_results:

            if cur_ep.show.paused:
                logger.log(u'Show %s is paused, ignoring all RSS items for %s' %
                           (cur_ep.show.name, cur_ep.prettyName()), logger.DEBUG)
                continue

            # find the best result for the current episode
            best_result = pick_best_result(cur_found_results[cur_ep], cur_ep.show, filter_rls=orig_thread_name)

            # if all results were rejected move on to the next episode
            if not best_result:
                logger.log(u'All found results for %s were rejected.' % cur_ep.prettyName(), logger.DEBUG)
                continue

            # if it's already in the list (from another provider) and the newly found quality is no better then skip it
            if cur_ep in found_results and best_result.quality <= found_results[cur_ep].quality:
                continue

            # filter out possible bad torrents from providers
            if 'torrent' == best_result.resultType and 'blackhole' != sickbeard.TORRENT_METHOD:
                best_result.content = None
                if not best_result.url.startswith('magnet'):
                    best_result.content = best_result.provider.get_url(best_result.url)
                    if best_result.provider.should_skip():
                        break
                    if not best_result.content:
                        continue

            found_results[cur_ep] = best_result

            try:
                cur_provider.save_list()
            except (StandardError, Exception):
                pass

    threading.currentThread().name = orig_thread_name

    if not len(providers):
        logger.log('No NZB/Torrent providers in Media Providers/Options are enabled to match recent episodes', logger.WARNING)
    elif not search_done:
        logger.log('Failed recent search of %s enabled provider%s. More info in debug log.' % (
            len(providers), helpers.maybe_plural(len(providers))), logger.ERROR)

    return found_results.values()
Ejemplo n.º 15
0
def search_for_needed_episodes(episodes):
    found_results = {}

    search_done = False

    orig_thread_name = threading.currentThread().name

    providers = [
        x for x in sickbeard.providers.sortedProviderList()
        if x.is_active() and x.enable_recentsearch
    ]

    for cur_provider in providers:
        threading.currentThread().name = '%s :: [%s]' % (orig_thread_name,
                                                         cur_provider.name)

        cur_found_results = cur_provider.search_rss(episodes)

        search_done = True

        # pick a single result for each episode, respecting existing results
        for cur_ep in cur_found_results:

            if cur_ep.show.paused:
                logger.log(
                    u'Show %s is paused, ignoring all RSS items for %s' %
                    (cur_ep.show.name, cur_ep.prettyName()), logger.DEBUG)
                continue

            # find the best result for the current episode
            best_result = pick_best_result(cur_found_results[cur_ep],
                                           cur_ep.show)

            # if all results were rejected move on to the next episode
            if not best_result:
                logger.log(
                    u'All found results for %s were rejected.' %
                    cur_ep.prettyName(), logger.DEBUG)
                continue

            # if it's already in the list (from another provider) and the newly found quality is no better then skip it
            if cur_ep in found_results and best_result.quality <= found_results[
                    cur_ep].quality:
                continue

            # filter out possible bad torrents from providers
            if 'torrent' == best_result.resultType and 'blackhole' != sickbeard.TORRENT_METHOD:
                best_result.content = None
                if not best_result.url.startswith('magnet'):
                    best_result.content = best_result.provider.get_url(
                        best_result.url)
                    if not best_result.content:
                        continue

            found_results[cur_ep] = best_result

    threading.currentThread().name = orig_thread_name

    if not len(providers):
        logger.log(
            'No NZB/Torrent sources enabled in Search Provider options to do recent searches',
            logger.WARNING)
    elif not search_done:
        logger.log(
            'Failed recent search of %s enabled provider%s. More info in debug log.'
            % (len(providers), helpers.maybe_plural(len(providers))),
            logger.ERROR)

    return found_results.values()
Ejemplo n.º 16
0
    def _search_provider(self, search_params, **kwargs):

        api_key = self._check_auth()

        base_params = {
            "t": "tvsearch",
            "maxage": sickbeard.USENET_RETENTION or 0,
            "limit": 100,
            "attrs": "rageid",
            "offset": 0,
        }

        if isinstance(api_key, basestring):
            base_params["apikey"] = api_key

        results = []
        total, cnt, search_url, exit_log = 0, len(results), "", False

        for mode in search_params.keys():
            for i, params in enumerate(search_params[mode]):

                # category ids
                cat = []
                cat_anime = ("5070", "6070")["nzbs_org" == self.get_id()]
                cat_sport = "5060"
                if "Episode" == mode or "Season" == mode:
                    if not ("rid" in params or "tvdbid" in params or "q" in params or not self.supports_tvdbid()):
                        logger.log("Error no rid, tvdbid, or search term available for search.")
                        continue

                    if self.show:
                        if self.show.is_sports:
                            cat = [cat_sport]
                        elif self.show.is_anime:
                            cat = [cat_anime]
                else:
                    cat = [cat_sport, cat_anime]

                if self.cat_ids or len(cat):
                    base_params["cat"] = ",".join(sorted(set(self.cat_ids.split(",") + cat)))

                request_params = base_params.copy()
                request_params.update(params)

                offset = 0
                batch_count = not 0

                # hardcoded to stop after a max of 4 hits (400 items) per query
                while (offset <= total) and (offset < (200, 400)[self.supports_tvdbid()]) and batch_count:
                    cnt = len(results)
                    search_url = "%sapi?%s" % (self.url, urllib.urlencode(request_params))

                    data = self.cache.getRSSFeed(search_url)
                    i and time.sleep(1.1)

                    if not data or not self.check_auth_from_data(data):
                        break

                    for item in data.entries:

                        title, url = self._title_and_url(item)
                        if title and url:
                            results.append(item)
                        else:
                            logger.log(
                                u"The data returned from %s is incomplete, this result is unusable" % self.name,
                                logger.DEBUG,
                            )

                    # get total and offset attribs
                    try:
                        if 0 == total:
                            total = int(data.feed.newznab_response["total"] or 0)
                            hits = total / 100 + int(0 < (total % 100))
                            hits += int(0 == hits)
                        offset = int(data.feed.newznab_response["offset"] or 0)
                    except AttributeError:
                        break

                    # No items found, prevent from doing another search
                    if 0 == total:
                        break

                    # Cache mode, prevent from doing another search
                    if "Cache" == mode:
                        exit_log = True
                        break

                    if offset != request_params["offset"]:
                        logger.log("Tell your newznab provider to fix their bloody newznab responses")
                        break

                    request_params["offset"] += request_params["limit"]
                    if total <= request_params["offset"]:
                        exit_log = True
                        logger.log(
                            "%s item%s found that will be used for episode matching"
                            % (total, helpers.maybe_plural(total)),
                            logger.DEBUG,
                        )
                        break

                    # there are more items available than the amount given in one call, grab some more
                    items = total - request_params["offset"]
                    logger.log(
                        "%s more item%s to fetch from a batch of up to %s items."
                        % (items, helpers.maybe_plural(items), request_params["limit"]),
                        logger.DEBUG,
                    )

                    batch_count = self._log_result(results, mode, cnt, search_url)

                if exit_log:
                    self._log_result(results, mode, cnt, search_url)
                    exit_log = False

                if "tvdbid" in request_params and len(results):
                    break

        return results
Ejemplo n.º 17
0
    def run(self):
        generic_queue.QueueItem.run(self)

        try:
            self._change_missing_episodes()

            show_list = sickbeard.showList
            from_date = datetime.date.fromordinal(1)
            needed = common.neededQualities()
            for curShow in show_list:
                if curShow.paused:
                    continue

                wanted_eps = wanted_episodes(curShow, from_date, unaired=sickbeard.SEARCH_UNAIRED)

                if wanted_eps:
                    if not needed.all_needed:
                        if not needed.all_types_needed:
                            needed.check_needed_types(curShow)
                        if not needed.all_qualities_needed:
                            for w in wanted_eps:
                                if needed.all_qualities_needed:
                                    break
                                if not w.show.is_anime and not w.show.is_sports:
                                    needed.check_needed_qualities(w.wantedQuality)

                    self.episodes.extend(wanted_eps)

            if sickbeard.DOWNLOAD_PROPERS:
                properFinder.get_needed_qualites(needed)

            self.update_providers(needed=needed)
            self._check_for_propers(needed)

            if not self.episodes:
                logger.log(u'No search of cache for episodes required')
                self.success = True
            else:
                num_shows = len(set([ep.show.name for ep in self.episodes]))
                logger.log(u'Found %d needed episode%s spanning %d show%s'
                           % (len(self.episodes), helpers.maybe_plural(len(self.episodes)),
                              num_shows, helpers.maybe_plural(num_shows)))

                try:
                    logger.log(u'Beginning recent search for episodes')
                    found_results = search.search_for_needed_episodes(self.episodes)

                    if not len(found_results):
                        logger.log(u'No needed episodes found')
                    else:
                        for result in found_results:
                            # just use the first result for now
                            logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
                            self.success = search.snatch_episode(result)
                            if self.success:
                                for ep in result.episodes:
                                    self.snatched_eps.add((ep.show.indexer, ep.show.indexerid, ep.season, ep.episode))

                            helpers.cpu_sleep()

                except (StandardError, Exception):
                    logger.log(traceback.format_exc(), logger.ERROR)

                if None is self.success:
                    self.success = False

        finally:
            self.finish()
Ejemplo n.º 18
0
    def _do_search(self,
                   search_params,
                   search_mode='eponly',
                   epcount=0,
                   age=0):

        api_key = self._check_auth()

        if 'rid' not in search_params and 'q' not in search_params:
            logger.log('Error no rid or search term given.')
            return []

        params = {
            't': 'tvsearch',
            'maxage': sickbeard.USENET_RETENTION,
            'limit': 100,
            'attrs': 'rageid',
            'offset': 0
        }

        # category ids
        cat = []
        if self.show:
            if self.show.is_sports:
                cat = ['5060']
            elif self.show.is_anime:
                cat = ['5070']
        params['cat'] = ','.join([self.cat_ids] + cat)

        # if max_age is set, use it, don't allow it to be missing
        if not params['maxage'] or age:
            params['maxage'] = age

        if search_params:
            params.update(search_params)

        if isinstance(api_key, basestring):
            params['apikey'] = api_key

        results = []
        offset = total = 0

        # hardcoded to stop after a max of 4 hits (400 items) per query
        while (offset <= total) and (offset < 400):
            search_url = '%sapi?%s' % (self.url, urllib.urlencode(params))
            logger.log(u'Search url: ' + search_url, logger.DEBUG)

            data = self.cache.getRSSFeed(search_url)
            time.sleep(1.1)
            if not data or not self.check_auth_from_data(data):
                break

            for item in data.entries:

                title, url = self._get_title_and_url(item)
                if title and url:
                    results.append(item)
                else:
                    logger.log(
                        u'The data returned from %s is incomplete, this result is unusable'
                        % self.name, logger.DEBUG)

            # get total and offset attribs
            try:
                if 0 == total:
                    total = int(data.feed.newznab_response['total'] or 0)
                    hits = (total / 100 + int(0 < (total % 100)))
                    hits += int(0 == hits)
                offset = int(data.feed.newznab_response['offset'] or 0)
            except AttributeError:
                break

            # No items found, prevent from doing another search
            if 0 == total:
                break

            if offset != params['offset']:
                logger.log(
                    'Tell your newznab provider to fix their bloody newznab responses'
                )
                break

            params['offset'] += params['limit']
            if total <= params['offset']:
                logger.log(
                    '%s item%s found that will be used for episode matching' %
                    (total, helpers.maybe_plural(total)), logger.DEBUG)
                break

            # there are more items available than the amount given in one call, grab some more
            items = total - params['offset']
            logger.log(
                '%s more item%s to fetch from a batch of up to %s items.' %
                (items, helpers.maybe_plural(items), params['limit']),
                logger.DEBUG)
        return results
Ejemplo n.º 19
0
def wantedEpisodes(show, fromDate, make_dict=False):
    initialQualities, archiveQualities = common.Quality.splitQuality(show.quality)
    allQualities = list(set(initialQualities + archiveQualities))

    myDB = db.DBConnection()

    if show.air_by_date:
        sqlString = 'SELECT ep.status, ep.season, ep.episode, ep.airdate FROM [tv_episodes] AS ep, [tv_shows] AS show WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 AND ep.showid = ? AND show.air_by_date = 1'
    else:
        sqlString = 'SELECT status, season, episode, airdate FROM [tv_episodes] WHERE showid = ? AND season > 0'

    if sickbeard.SEARCH_UNAIRED:
        statusList = [common.WANTED, common.FAILED, common.UNAIRED]
        sqlString += ' AND ( airdate > ? OR airdate = 1 )'
    else:
        statusList = [common.WANTED, common.FAILED]
        sqlString += ' AND airdate > ?'

    sqlResults = myDB.select(sqlString, [show.indexerid, fromDate.toordinal()])

    # check through the list of statuses to see if we want any
    if make_dict:
        wanted = {}
    else:
        wanted = []
    total_wanted = total_replacing = total_unaired = 0
    downloadedStatusList = (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST)
    for result in sqlResults:
        not_downloaded = True
        curCompositeStatus = int(result["status"])
        curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)

        if show.archive_firstmatch and curStatus in downloadedStatusList and curQuality in archiveQualities:
            continue

        # special case: already downloaded quality is not in any of the wanted Qualities
        other_quality_downloaded = False
        if curStatus in downloadedStatusList and curQuality not in allQualities:
            other_quality_downloaded = True
            wantedQualities = allQualities
        else:
            wantedQualities = archiveQualities

        if archiveQualities:
            highestWantedQuality = max(wantedQualities)
        else:
            if other_quality_downloaded:
                highestWantedQuality = max(initialQualities)
            else:
                highestWantedQuality = 0

        # if we need a better one then say yes
        if (curStatus in downloadedStatusList and curQuality < highestWantedQuality) or curStatus in statusList or (sickbeard.SEARCH_UNAIRED and result['airdate'] == 1 and curStatus in (common.SKIPPED, common.IGNORED, common.UNAIRED, common.UNKNOWN, common.FAILED)):

            if curStatus in (common.WANTED, common.FAILED):
                total_wanted += 1
            elif curStatus in (common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN):
                total_unaired += 1
            else:
                total_replacing += 1
                not_downloaded = False

            epObj = show.getEpisode(int(result["season"]), int(result["episode"]))
            if make_dict:
                wanted.setdefault(epObj.season, []).append(epObj)
            else:
                epObj.wantedQuality = [i for i in (initialQualities if not_downloaded else wantedQualities) if (i > curQuality and i != common.Quality.UNKNOWN)]
                wanted.append(epObj)

    if 0 < total_wanted + total_replacing + total_unaired:
        actions = []
        for msg, total in ['%d episode%s', total_wanted], ['to upgrade %d episode%s', total_replacing], ['%d unaired episode%s', total_unaired]:
            if 0 < total:
                actions.append(msg % (total, helpers.maybe_plural(total)))
        logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))

    return wanted
Ejemplo n.º 20
0
    def _search_provider(self, search_params, **kwargs):

        api_key = self._check_auth()

        base_params = {
            't': 'tvsearch',
            'maxage': sickbeard.USENET_RETENTION or 0,
            'limit': 100,
            'attrs': 'rageid',
            'offset': 0
        }

        if isinstance(api_key, basestring):
            base_params['apikey'] = api_key

        results = []
        total, cnt, search_url, exit_log = 0, len(results), '', False

        for mode in search_params.keys():
            for i, params in enumerate(search_params[mode]):

                # category ids
                cat = []
                cat_anime = ('5070', '6070')['nzbs_org' == self.get_id()]
                cat_sport = '5060'
                if 'Episode' == mode or 'Season' == mode:
                    if not ('rid' in params or 'tvdbid' in params
                            or 'q' in params or not self.supports_tvdbid()):
                        logger.log(
                            'Error no rid, tvdbid, or search term available for search.'
                        )
                        continue

                    if self.show:
                        if self.show.is_sports:
                            cat = [cat_sport]
                        elif self.show.is_anime:
                            cat = [cat_anime]
                else:
                    cat = [cat_sport, cat_anime]

                if self.cat_ids or len(cat):
                    base_params['cat'] = ','.join(
                        sorted(set(self.cat_ids.split(',') + cat)))

                request_params = base_params.copy()
                request_params.update(params)

                offset = 0
                batch_count = not 0

                # hardcoded to stop after a max of 4 hits (400 items) per query
                while (offset <= total) and (
                        offset <
                    (200, 400)[self.supports_tvdbid()]) and batch_count:
                    cnt = len(results)
                    search_url = '%sapi?%s' % (
                        self.url, urllib.urlencode(request_params))

                    data = self.cache.getRSSFeed(search_url)
                    i and time.sleep(1.1)

                    if not data or not self.check_auth_from_data(data):
                        break

                    for item in data.entries:

                        title, url = self._title_and_url(item)
                        if title and url:
                            results.append(item)
                        else:
                            logger.log(
                                u'The data returned from %s is incomplete, this result is unusable'
                                % self.name, logger.DEBUG)

                    # get total and offset attribs
                    try:
                        if 0 == total:
                            total = int(data.feed.newznab_response['total']
                                        or 0)
                            hits = (total / 100 + int(0 < (total % 100)))
                            hits += int(0 == hits)
                        offset = int(data.feed.newznab_response['offset'] or 0)
                    except AttributeError:
                        break

                    # No items found, prevent from doing another search
                    if 0 == total:
                        break

                    # Cache mode, prevent from doing another search
                    if 'Cache' == mode:
                        exit_log = True
                        break

                    if offset != request_params['offset']:
                        logger.log(
                            'Tell your newznab provider to fix their bloody newznab responses'
                        )
                        break

                    request_params['offset'] += request_params['limit']
                    if total <= request_params['offset']:
                        exit_log = True
                        logger.log(
                            '%s item%s found that will be used for episode matching'
                            % (total, helpers.maybe_plural(total)),
                            logger.DEBUG)
                        break

                    # there are more items available than the amount given in one call, grab some more
                    items = total - request_params['offset']
                    logger.log(
                        '%s more item%s to fetch from a batch of up to %s items.'
                        % (items, helpers.maybe_plural(items),
                           request_params['limit']), logger.DEBUG)

                    batch_count = self._log_result(results, mode, cnt,
                                                   search_url)

                if exit_log:
                    self._log_result(results, mode, cnt, search_url)
                    exit_log = False

                if 'tvdbid' in request_params and len(results):
                    break

        return results
Ejemplo n.º 21
0
    def process_dir(self, dir_name, nzb_name=None, process_method=None, force=False, force_replace=None,
                    failed=False, pp_type='auto', cleanup=False, showObj=None):
        """
        Scans through the files in dir_name and processes whatever media files it finds

        dir_name: The folder name to look in
        nzb_name: The NZB name which resulted in this folder being downloaded
        force: True to postprocess already postprocessed files
        failed: Boolean for whether or not the download failed
        pp_type: Type of postprocessing auto or manual
        """

        # if they passed us a real directory then assume it's the one we want
        if dir_name and ek.ek(os.path.isdir, dir_name):
            dir_name = ek.ek(os.path.realpath, dir_name)

        # if the client and SickGear are not on the same machine translate the directory in a network directory
        elif dir_name and sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR)\
                and ek.ek(os.path.normpath, dir_name) != ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR):
            dir_name = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR,
                             ek.ek(os.path.abspath, dir_name).split(os.path.sep)[-1])
            self._log_helper(u'SickGear PP Config, completed TV downloads folder: ' + sickbeard.TV_DOWNLOAD_DIR)

        if dir_name:
            self._log_helper(u'Checking folder... ' + dir_name)

        # if we didn't find a real directory then process "failed" or just quit
        if not dir_name or not ek.ek(os.path.isdir, dir_name):
            if nzb_name and failed:
                self._process_failed(dir_name, nzb_name, showObj=showObj)
            else:
                self._log_helper(u'Unable to figure out what folder to process. ' +
                                 u'If your downloader and SickGear aren\'t on the same PC then make sure ' +
                                 u'you fill out your completed TV download folder in the PP config.')
            return self.result

        if dir_name == sickbeard.TV_DOWNLOAD_DIR:
            self.is_basedir = True

        if None is showObj:
            if isinstance(nzb_name, basestring):
                showObj = self.check_name(re.sub(r'\.(nzb|torrent)$', '', nzb_name, flags=re.I))

            if None is showObj and dir_name:
                showObj = self.check_name(ek.ek(os.path.basename, dir_name))

        path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)

        if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
            self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
            return self.result

        if not process_method:
            process_method = sickbeard.PROCESS_METHOD

        self._log_helper(u'Processing folder... %s' % path)

        work_files = []
        joined = self.join(path)
        if joined:
            work_files += [joined]

        rar_files, rarfile_history = self.unused_archives(
            path, filter(helpers.is_first_rar_volume, files), pp_type, process_method)
        rar_content = self._unrar(path, rar_files, force)
        if self.fail_detected:
            self._process_failed(dir_name, nzb_name, showObj=showObj)
            return self.result
        rar_content = [x for x in rar_content if not helpers.is_link(ek.ek(os.path.join, path, x))]
        path, dirs, files = self._get_path_dir_files(dir_name, nzb_name, pp_type)
        files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, path, x))]
        video_files = filter(helpers.has_media_ext, files)
        video_in_rar = filter(helpers.has_media_ext, rar_content)
        work_files += [ek.ek(os.path.join, path, item) for item in rar_content]

        if 0 < len(files):
            self._log_helper(u'Process file%s: %s' % (helpers.maybe_plural(files), str(files)))
        if 0 < len(video_files):
            self._log_helper(u'Process video file%s: %s' % (helpers.maybe_plural(video_files), str(video_files)))
        if 0 < len(rar_content):
            self._log_helper(u'Process rar content: ' + str(rar_content))
        if 0 < len(video_in_rar):
            self._log_helper(u'Process video%s in rar: %s' % (helpers.maybe_plural(video_in_rar), str(video_in_rar)))

        # If nzb_name is set and there's more than one videofile in the folder, files will be lost (overwritten).
        nzb_name_original = nzb_name
        if 2 <= len(video_files):
            nzb_name = None

        if None is showObj and 0 < len(video_files):
            showObj = self.check_video_filenames(path, video_files)

        # self._set_process_success()

        # Don't Link media when the media is extracted from a rar in the same path
        if process_method in ('hardlink', 'symlink') and video_in_rar:
            soh = showObj
            if None is showObj:
                soh = self.check_video_filenames(path, video_in_rar)
            self._process_media(path, video_in_rar, nzb_name, 'move', force, force_replace, showObj=soh)
            self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True)
            video_batch = set(video_files) - set(video_in_rar)
        else:
            video_batch = video_files

        try:
            while 0 < len(video_batch):
                video_pick = ['']
                video_size = 0
                for cur_video_file in video_batch:
                    cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, path, cur_video_file))
                    if 0 == video_size or cur_video_size > video_size:
                        video_size = cur_video_size
                        video_pick = [cur_video_file]

                video_batch = set(video_batch) - set(video_pick)

                self._process_media(path, video_pick, nzb_name, process_method, force, force_replace,
                                    use_trash=cleanup, showObj=showObj)

        except OSError as e:
            logger.log('Batch skipped, %s%s' %
                       (ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING)

        # Process video files in TV subdirectories
        for directory in [x for x in dirs if self._validate_dir(
                path, x, nzb_name_original, failed,
                showObj=self.showObj_helper(showObj, dir_name, x, nzb_name, pp_type))]:

            # self._set_process_success(reset=True)

            for walk_path, walk_dir, files in ek.ek(os.walk, ek.ek(os.path.join, path, directory), topdown=False):

                if sickbeard.POSTPONE_IF_SYNC_FILES and any(filter(helpers.isSyncFile, files)):
                    self._log_helper(u'Found temporary sync files, skipping post process', logger.ERROR)
                    return self.result

                # Ignore any symlinks at this stage to avoid the potential for unraring a symlinked archive
                files = [x for x in files if not helpers.is_link(ek.ek(os.path.join, walk_path, x))]

                rar_files, rarfile_history = self.unused_archives(
                    walk_path, filter(helpers.is_first_rar_volume, files), pp_type, process_method, rarfile_history)
                rar_content = self._unrar(walk_path, rar_files, force)
                work_files += [ek.ek(os.path.join, walk_path, item) for item in rar_content]
                if self.fail_detected:
                    self._process_failed(dir_name, nzb_name, showObj=self.showObj_helper(showObj, directory))
                    continue
                rar_content = [x for x in rar_content if not helpers.is_link(ek.ek(os.path.join, walk_path, x))]
                files = list(set(files + rar_content))
                video_files = filter(helpers.has_media_ext, files)
                video_in_rar = filter(helpers.has_media_ext, rar_content)
                notwanted_files = [x for x in files if x not in video_files]

                # Don't Link media when the media is extracted from a rar in the same path
                if process_method in ('hardlink', 'symlink') and video_in_rar:
                    self._process_media(walk_path, video_in_rar, nzb_name, 'move', force, force_replace,
                                        showObj=self.showObj_helper(showObj, dir_name, directory, nzb_name, pp_type,
                                                                    self.check_video_filenames(walk_dir, video_in_rar)))
                    video_batch = set(video_files) - set(video_in_rar)
                else:
                    video_batch = video_files

                try:
                    while 0 < len(video_batch):
                        video_pick = ['']
                        video_size = 0
                        for cur_video_file in video_batch:
                            cur_video_size = ek.ek(os.path.getsize, ek.ek(os.path.join, walk_path, cur_video_file))

                            if 0 == video_size or cur_video_size > video_size:
                                video_size = cur_video_size
                                video_pick = [cur_video_file]

                        video_batch = set(video_batch) - set(video_pick)

                        self._process_media(
                            walk_path, video_pick, nzb_name, process_method, force, force_replace, use_trash=cleanup,
                            showObj=self.showObj_helper(showObj, dir_name, directory, nzb_name, pp_type,
                                                        self.check_video_filenames(walk_dir, video_pick)))

                except OSError as e:
                    logger.log('Batch skipped, %s%s' %
                               (ex(e), e.filename and (' (file %s)' % e.filename) or ''), logger.WARNING)

                if process_method in ('hardlink', 'symlink') and video_in_rar:
                    self._delete_files(walk_path, rar_content)
                else:
                    # Delete all file not needed
                    if not self.any_vid_processed\
                        or 'move' != process_method\
                            or ('manual' == pp_type and not cleanup):  # Avoid deleting files if Manual Postprocessing
                        continue

                    self._delete_files(walk_path, notwanted_files, use_trash=cleanup)

                    if 'move' == process_method\
                            and ek.ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR) != ek.ek(os.path.normpath, walk_path):
                        self._delete_folder(walk_path, check_empty=False)

        if 'copy' == process_method and work_files:
            self._delete_files(path, [ek.ek(os.path.relpath, item, path) for item in work_files], force=True)
            for f in sorted(list(set([ek.ek(os.path.dirname, item) for item in work_files]) - {path}),
                            key=len, reverse=True):
                self._delete_folder(f)

        def _bottom_line(text, log_level=logger.DEBUG):
            self._buffer('-' * len(text))
            self._log_helper(text, log_level)

        notifiers.notify_update_library(ep_obj=None, flush_q=True)

        if self.any_vid_processed:
            if not self.files_failed:
                _bottom_line(u'Successfully processed.', logger.MESSAGE)
            else:
                _bottom_line(u'Successfully processed at least one video file%s.' %
                             (', others were skipped', ' and skipped another')[1 == self.files_failed], logger.MESSAGE)
        else:
            _bottom_line(u'Failed! Did not process any files.', logger.WARNING)

        return self.result
Ejemplo n.º 22
0
    def _search_provider(self,
                         search_params,
                         needed=neededQualities(need_all=True),
                         max_items=400,
                         try_all_searches=False,
                         **kwargs):

        results, n_spaces = [], {}
        if self.should_skip():
            return results, n_spaces

        api_key = self._check_auth()
        if isinstance(api_key, bool) and not api_key:
            return results, n_spaces

        base_params = {
            't':
            'tvsearch',
            'maxage':
            sickbeard.USENET_RETENTION or 0,
            'limit':
            self.limits,
            'attrs':
            ','.join([
                k for k, v in
                NewznabConstants.providerToIndexerMapping.iteritems()
                if v in self.caps
            ]),
            'offset':
            0
        }

        uc_only = all([re.search('(?i)usenet_crawler', self.get_id())])
        base_params_uc = {'num': self.limits, 'dl': '1', 'i': '64660'}

        if isinstance(api_key, basestring) and api_key not in ('0', ''):
            base_params['apikey'] = api_key
            base_params_uc['r'] = api_key

        results, n_spaces = [], {}
        total, cnt, search_url, exit_log = 0, len(results), '', True

        cat_sport = self.cats.get(NewznabConstants.CAT_SPORT, ['5060'])
        cat_anime = self.cats.get(NewznabConstants.CAT_ANIME, ['5070'])
        cat_hd = self.cats.get(NewznabConstants.CAT_HD, ['5040'])
        cat_sd = self.cats.get(NewznabConstants.CAT_SD, ['5030'])
        cat_uhd = self.cats.get(NewznabConstants.CAT_UHD)
        cat_webdl = self.cats.get(NewznabConstants.CAT_WEBDL)

        for mode in search_params.keys():
            if self.should_skip(log_warning=False):
                break
            for i, params in enumerate(search_params[mode]):

                if self.should_skip(log_warning=False):
                    break

                # category ids
                cat = []
                if 'Episode' == mode or 'Season' == mode:
                    if not (any(x in params for x in [
                            v for c, v in self.caps.iteritems() if c not in [
                                NewznabConstants.SEARCH_EPISODE,
                                NewznabConstants.SEARCH_SEASON
                            ]
                    ]) or not self.supports_tvdbid()):
                        logger.log(
                            'Show is missing either an id or search term for search'
                        )
                        continue

                if needed.need_anime:
                    cat.extend(cat_anime)
                if needed.need_sports:
                    cat.extend(cat_sport)

                if needed.need_hd:
                    cat.extend(cat_hd)
                if needed.need_sd:
                    cat.extend(cat_sd)
                if needed.need_uhd and cat_uhd is not None:
                    cat.extend(cat_uhd)
                if needed.need_webdl and cat_webdl is not None:
                    cat.extend(cat_webdl)

                if self.cat_ids or len(cat):
                    base_params['cat'] = ','.join(
                        sorted(
                            set((self.cat_ids.split(',') if self.
                                 cat_ids else []) + cat)))
                    base_params_uc['t'] = base_params['cat']

                request_params = base_params.copy()
                # if ('Propers' == mode or 'nzbs_org' == self.get_id()) \
                if 'Propers' == mode \
                        and 'q' in params and not (any(x in params for x in ['season', 'ep'])):
                    request_params['t'] = 'search'
                request_params.update(params)

                # deprecated; kept here as bookmark for new haspretime:0|1 + nuked:0|1 can be used here instead
                # if hasattr(self, 'filter'):
                #     if 'nzbs_org' == self.get_id():
                #         request_params['rls'] = ((0, 1)['so' in self.filter], 2)['snn' in self.filter]

                # workaround a strange glitch
                if sum(ord(i) for i in self.get_id()) in [
                        383
                ] and 5 == 14 - request_params['maxage']:
                    request_params['maxage'] += 1

                offset = 0
                batch_count = not 0
                first_date = last_date = None

                # hardcoded to stop after a max of 4 hits (400 items) per query
                while (offset <= total) and (offset <
                                             max_items) and batch_count:
                    cnt = len(results)

                    if 'Cache' == mode and uc_only:
                        search_url = '%srss?%s' % (
                            self.url, urllib.urlencode(base_params_uc))
                    else:
                        search_url = '%sapi?%s' % (
                            self.url, urllib.urlencode(request_params))
                    i and time.sleep(2.1)

                    data = self.get_url(search_url)

                    if self.should_skip() or not data:
                        break

                    # hack this in until it's fixed server side
                    if not data.startswith('<?xml'):
                        data = '<?xml version="1.0" encoding="ISO-8859-1" ?>%s' % data

                    try:
                        parsed_xml, n_spaces = self.cache.parse_and_get_ns(
                            data)
                        items = parsed_xml.findall('channel/item')
                    except (StandardError, Exception):
                        logger.log(
                            'Error trying to load %s RSS feed' % self.name,
                            logger.WARNING)
                        break

                    if not self._check_auth_from_data(parsed_xml, search_url):
                        break

                    if 'rss' != parsed_xml.tag:
                        logger.log(
                            'Resulting XML from %s isn\'t RSS, not parsing it'
                            % self.name, logger.WARNING)
                        break

                    i and time.sleep(2.1)

                    for item in items:

                        title, url = self._title_and_url(item)
                        if title and url:
                            results.append(item)
                        else:
                            logger.log(
                                'The data returned from %s is incomplete, this result is unusable'
                                % self.name, logger.DEBUG)

                    # get total and offset attributes
                    try:
                        if 0 == total:
                            total = (helpers.tryInt(
                                parsed_xml.find(
                                    './/%sresponse' % n_spaces['newznab']).get(
                                        'total', 0)), 1000)['Cache' == mode]
                            hits = (total // self.limits +
                                    int(0 < (total % self.limits)))
                            hits += int(0 == hits)
                        offset = helpers.tryInt(
                            parsed_xml.find('.//%sresponse' %
                                            n_spaces['newznab']).get(
                                                'offset', 0))
                    except (AttributeError, KeyError):
                        if not uc_only:
                            break
                        total = len(items)

                    # No items found, prevent from doing another search
                    if 0 == total:
                        break

                    # Cache mode, prevent from doing another search
                    if 'Cache' == mode:
                        if items and len(items):
                            if not first_date:
                                first_date = self._parse_pub_date(items[0])
                            last_date = self._parse_pub_date(items[-1])
                        if not first_date or not last_date or not self._last_recent_search or \
                                last_date <= self.last_recent_search or uc_only:
                            break

                    if offset != request_params['offset']:
                        logger.log(
                            'Ask your newznab provider to fix their newznab responses'
                        )
                        break

                    request_params['offset'] += request_params['limit']
                    if total <= request_params['offset']:
                        break

                    # there are more items available than the amount given in one call, grab some more
                    items = total - request_params['offset']
                    logger.log(
                        '%s more item%s to fetch from a batch of up to %s items.'
                        % (items, helpers.maybe_plural(items),
                           request_params['limit']), logger.DEBUG)

                    batch_count = self._log_result(results, mode, cnt,
                                                   search_url)
                    exit_log = False

                if 'Cache' == mode and first_date:
                    self.last_recent_search = first_date

                if exit_log:
                    self._log_search(mode, total, search_url)

                if not try_all_searches and any(x in request_params for x in [
                        v for c, v in self.caps.iteritems() if c not in [
                            NewznabConstants.SEARCH_EPISODE, NewznabConstants.
                            SEARCH_SEASON, NewznabConstants.SEARCH_TEXT
                        ]
                ]) and len(results):
                    break

        return results, n_spaces
Ejemplo n.º 23
0
    def run(self):
        generic_queue.QueueItem.run(self)

        try:
            self._change_missing_episodes()

            show_list = sickbeard.showList
            from_date = datetime.date.fromordinal(1)
            need_anime = need_sports = need_sd = need_hd = need_uhd = False
            max_sd = Quality.SDDVD
            hd_qualities = [Quality.HDTV, Quality.FULLHDTV, Quality.HDWEBDL, Quality.FULLHDWEBDL,
                            Quality.HDBLURAY, Quality.FULLHDBLURAY]
            max_hd = Quality.FULLHDBLURAY
            for curShow in show_list:
                if curShow.paused:
                    continue

                wanted_eps = wanted_episodes(curShow, from_date, unaired=sickbeard.SEARCH_UNAIRED)
                if wanted_eps:
                    if not need_anime and curShow.is_anime:
                        need_anime = True
                    if not need_sports and curShow.is_sports:
                        need_sports = True
                    if not need_sd or not need_hd or not need_uhd:
                        for w in wanted_eps:
                            if need_sd and need_hd and need_uhd:
                                break
                            if not w.show.is_anime and not w.show.is_sports:
                                if Quality.UNKNOWN in w.wantedQuality:
                                    need_sd = need_hd = need_uhd = True
                                else:
                                    if not need_sd and max_sd >= min(w.wantedQuality):
                                        need_sd = True
                                    if not need_hd and any(i in hd_qualities for i in w.wantedQuality):
                                        need_hd = True
                                    if not need_uhd and max_hd < max(w.wantedQuality):
                                        need_uhd = True
                self.episodes.extend(wanted_eps)

            self.update_providers(need_anime=need_anime, need_sports=need_sports,
                                  need_sd=need_sd, need_hd=need_hd, need_uhd=need_uhd)

            if not self.episodes:
                logger.log(u'No search of cache for episodes required')
                self.success = True
            else:
                num_shows = len(set([ep.show.name for ep in self.episodes]))
                logger.log(u'Found %d needed episode%s spanning %d show%s'
                           % (len(self.episodes), helpers.maybe_plural(len(self.episodes)),
                              num_shows, helpers.maybe_plural(num_shows)))

                try:
                    logger.log(u'Beginning recent search for episodes')
                    found_results = search.search_for_needed_episodes(self.episodes)

                    if not len(found_results):
                        logger.log(u'No needed episodes found')
                    else:
                        for result in found_results:
                            # just use the first result for now
                            logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
                            self.success = search.snatch_episode(result)

                            helpers.cpu_sleep()

                except Exception:
                    logger.log(traceback.format_exc(), logger.DEBUG)

                if None is self.success:
                    self.success = False

        finally:
            self.finish()
Ejemplo n.º 24
0
    def run(self):
        generic_queue.QueueItem.run(self)

        try:
            self._change_missing_episodes()

            show_list = sickbeard.showList
            from_date = datetime.date.fromordinal(1)
            needed = common.neededQualities()
            for curShow in show_list:
                if curShow.paused:
                    continue

                wanted_eps = wanted_episodes(curShow,
                                             from_date,
                                             unaired=sickbeard.SEARCH_UNAIRED)

                if wanted_eps:
                    if not needed.all_needed:
                        if not needed.all_types_needed:
                            needed.check_needed_types(curShow)
                        if not needed.all_qualities_needed:
                            for w in wanted_eps:
                                if needed.all_qualities_needed:
                                    break
                                if not w.show.is_anime and not w.show.is_sports:
                                    needed.check_needed_qualities(
                                        w.wantedQuality)

                    self.episodes.extend(wanted_eps)

            if sickbeard.DOWNLOAD_PROPERS:
                properFinder.get_needed_qualites(needed)

            self.update_providers(needed=needed)
            self._check_for_propers(needed)

            if not self.episodes:
                logger.log(u'No search of cache for episodes required')
                self.success = True
            else:
                num_shows = len(set([ep.show.name for ep in self.episodes]))
                logger.log(u'Found %d needed episode%s spanning %d show%s' %
                           (len(self.episodes),
                            helpers.maybe_plural(len(self.episodes)),
                            num_shows, helpers.maybe_plural(num_shows)))

                try:
                    logger.log(u'Beginning recent search for episodes')
                    found_results = search.search_for_needed_episodes(
                        self.episodes)

                    if not len(found_results):
                        logger.log(u'No needed episodes found')
                    else:
                        for result in found_results:
                            # just use the first result for now
                            logger.log(u'Downloading %s from %s' %
                                       (result.name, result.provider.name))
                            self.success = search.snatch_episode(result)

                            helpers.cpu_sleep()

                except (StandardError, Exception):
                    logger.log(traceback.format_exc(), logger.ERROR)

                if None is self.success:
                    self.success = False

        finally:
            self.finish()
Ejemplo n.º 25
0
    def _do_search(self, search_params, search_mode='eponly', epcount=0, age=0):

        api_key = self._check_auth()

        if 'rid' not in search_params and 'q' not in search_params:
            logger.log('Error no rid or search term given.')
            return []

        params = {'t': 'tvsearch',
                  'maxage': sickbeard.USENET_RETENTION,
                  'limit': 100,
                  'attrs': 'rageid',
                  'offset': 0}

        # category ids
        cat = []
        if self.show:
            if self.show.is_sports:
                cat = ['5060']
            elif self.show.is_anime:
                cat = ['5070']
        params['cat'] = ','.join([self.cat_ids] + cat)

        # if max_age is set, use it, don't allow it to be missing
        if not params['maxage'] or age:
            params['maxage'] = age

        if search_params:
            params.update(search_params)

        if isinstance(api_key, basestring):
            params['apikey'] = api_key

        results = []
        offset = total = 0

        # hardcoded to stop after a max of 4 hits (400 items) per query
        while (offset <= total) and (offset < 400):
            search_url = '%sapi?%s' % (self.url, urllib.urlencode(params))
            logger.log(u'Search url: ' + search_url, logger.DEBUG)

            data = self.cache.getRSSFeed(search_url)
            time.sleep(1.1)
            if not data or not self.check_auth_from_data(data):
                break

            for item in data.entries:

                title, url = self._get_title_and_url(item)
                if title and url:
                    results.append(item)
                else:
                    logger.log(u'The data returned from %s is incomplete, this result is unusable' % self.name,
                               logger.DEBUG)

            # get total and offset attribs
            try:
                if 0 == total:
                    total = int(data.feed.newznab_response['total'] or 0)
                    hits = (total / 100 + int(0 < (total % 100)))
                    hits += int(0 == hits)
                offset = int(data.feed.newznab_response['offset'] or 0)
            except AttributeError:
                break

            # No items found, prevent from doing another search
            if 0 == total:
                break

            if offset != params['offset']:
                logger.log('Tell your newznab provider to fix their bloody newznab responses')
                break

            params['offset'] += params['limit']
            if total <= params['offset']:
                logger.log('%s item%s found that will be used for episode matching' % (total, helpers.maybe_plural(total)),
                           logger.DEBUG)
                break

            # there are more items available than the amount given in one call, grab some more
            items = total - params['offset']
            logger.log('%s more item%s to fetch from a batch of up to %s items.'
                       % (items, helpers.maybe_plural(items), params['limit']), logger.DEBUG)
        return results
Ejemplo n.º 26
0
def wanted_episodes(show, from_date, make_dict=False, unaired=False):
    initial_qualities, upgrade_qualities = common.Quality.splitQuality(
        show.quality)
    all_qualities = list(set(initial_qualities + upgrade_qualities))

    my_db = db.DBConnection()

    if show.air_by_date:
        sql_string = 'SELECT ep.status, ep.season, ep.scene_season, ep.episode, ep.airdate ' + \
                     'FROM [tv_episodes] AS ep, [tv_shows] AS show ' + \
                     'WHERE season != 0 AND ep.showid = show.indexer_id AND show.paused = 0 ' + \
                     'AND ep.showid = ? AND ep.indexer = ? AND show.air_by_date = 1'
    else:
        sql_string = 'SELECT status, season, scene_season, episode, airdate ' + \
                     'FROM [tv_episodes] ' + \
                     'WHERE showid = ? AND indexer = ? AND season > 0'

    sql_results = my_db.select(sql_string, [show.indexerid, show.indexer])
    ep_count = {}
    ep_count_scene = {}
    tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
    for result in sql_results:
        if 1 < helpers.tryInt(result['airdate']) <= tomorrow:
            cur_season = helpers.tryInt(result['season'])
            ep_count[cur_season] = ep_count.setdefault(cur_season, 0) + 1
            cur_scene_season = helpers.tryInt(result['scene_season'], -1)
            if -1 != cur_scene_season:
                ep_count_scene[cur_scene_season] = ep_count.setdefault(
                    cur_scene_season, 0) + 1

    if unaired:
        status_list = [common.WANTED, common.FAILED, common.UNAIRED]
        sql_string += ' AND ( airdate > ? OR airdate = 1 )'
    else:
        status_list = [common.WANTED, common.FAILED]
        sql_string += ' AND airdate > ?'

    sql_results = my_db.select(
        sql_string, [show.indexerid, show.indexer,
                     from_date.toordinal()])

    # check through the list of statuses to see if we want any
    if make_dict:
        wanted = {}
    else:
        wanted = []
    total_wanted = total_replacing = total_unaired = 0
    downloaded_status_list = (common.DOWNLOADED, common.SNATCHED,
                              common.SNATCHED_PROPER, common.SNATCHED_BEST)
    for result in sql_results:
        not_downloaded = True
        cur_composite_status = int(result['status'])
        cur_status, cur_quality = common.Quality.splitCompositeStatus(
            cur_composite_status)
        cur_snatched = cur_status in downloaded_status_list

        if show.archive_firstmatch and cur_snatched and cur_quality in upgrade_qualities:
            continue

        # special case: already downloaded quality is not in any of the upgrade to Qualities
        other_quality_downloaded = False
        if len(upgrade_qualities
               ) and cur_snatched and cur_quality not in all_qualities:
            other_quality_downloaded = True
            wanted_qualities = all_qualities
        else:
            wanted_qualities = upgrade_qualities

        if upgrade_qualities:
            highest_wanted_quality = max(wanted_qualities)
        else:
            if other_quality_downloaded:
                highest_wanted_quality = max(initial_qualities)
            else:
                highest_wanted_quality = 0

        # if we need a better one then say yes
        if (cur_snatched and cur_quality < highest_wanted_quality) \
                or cur_status in status_list \
                or (sickbeard.SEARCH_UNAIRED and 1 == result['airdate']
                    and cur_status in (common.SKIPPED, common.IGNORED, common.UNAIRED, common.UNKNOWN, common.FAILED)):

            if cur_status in (common.WANTED, common.FAILED):
                total_wanted += 1
            elif cur_status in (common.UNAIRED, common.SKIPPED, common.IGNORED,
                                common.UNKNOWN):
                total_unaired += 1
            else:
                total_replacing += 1
                not_downloaded = False

            ep_obj = show.getEpisode(int(result['season']),
                                     int(result['episode']))
            ep_obj.wantedQuality = [
                i
                for i in (wanted_qualities, initial_qualities)[not_downloaded]
                if cur_quality < i
            ]
            ep_obj.eps_aired_in_season = ep_count.get(
                helpers.tryInt(result['season']), 0)
            ep_obj.eps_aired_in_scene_season = ep_count_scene.get(
                helpers.tryInt(result['scene_season']),
                0) if result['scene_season'] else ep_obj.eps_aired_in_season
            if make_dict:
                wanted.setdefault(
                    ep_obj.scene_season if ep_obj.show.is_scene else
                    ep_obj.season, []).append(ep_obj)
            else:
                wanted.append(ep_obj)

    if 0 < total_wanted + total_replacing + total_unaired:
        actions = []
        for msg, total in ['%d episode%s', total_wanted], \
                          ['to upgrade %d episode%s', total_replacing], \
                          ['%d unaired episode%s', total_unaired]:
            if 0 < total:
                actions.append(msg % (total, helpers.maybe_plural(total)))
        logger.log(u'We want %s for %s' % (' and '.join(actions), show.name))

    return wanted
Ejemplo n.º 27
0
    def _cache_image_from_indexer(self,
                                  show_obj,
                                  img_type,
                                  num_files=0,
                                  max_files=500):
        """
        Retrieves an image of the type specified from indexer and saves it to the cache folder

        returns: bool representing success

        show_obj: TVShow object that we want to cache an image for
        img_type: BANNER, POSTER, or FANART
        """

        # generate the path based on the type & indexer_id
        if img_type == self.POSTER:
            img_type_name = 'poster'
            dest_path = self.poster_path(show_obj.indexerid)
        elif img_type == self.BANNER:
            img_type_name = 'banner'
            dest_path = self.banner_path(show_obj.indexerid)
        elif img_type == self.FANART:
            img_type_name = 'fanart_all'
            dest_path = self.fanart_path(show_obj.indexerid).replace(
                'fanart.jpg', '*')
        elif img_type == self.POSTER_THUMB:
            img_type_name = 'poster_thumb'
            dest_path = self.poster_thumb_path(show_obj.indexerid)
        elif img_type == self.BANNER_THUMB:
            img_type_name = 'banner_thumb'
            dest_path = self.banner_thumb_path(show_obj.indexerid)
        else:
            logger.log(u'Invalid cache image type: ' + str(img_type),
                       logger.ERROR)
            return False

        # retrieve the image from indexer using the generic metadata class
        metadata_generator = GenericMetadata()
        if img_type == self.FANART:
            image_urls = metadata_generator.retrieve_show_image(
                img_type_name, show_obj)
            if None is image_urls:
                return False

            crcs = []
            for cache_file_name in ek.ek(glob.glob, dest_path):
                with open(cache_file_name, mode='rb') as resource:
                    crc = '%05X' % (zlib.crc32(resource.read()) & 0xFFFFFFFF)
                if crc not in crcs:
                    crcs += [crc]

            success = 0
            count_urls = len(image_urls)
            sources = []
            for image_url in image_urls or []:
                img_data = helpers.getURL(image_url, nocache=True)
                if None is img_data:
                    continue
                crc = '%05X' % (zlib.crc32(img_data) & 0xFFFFFFFF)
                if crc in crcs:
                    count_urls -= 1
                    continue
                crcs += [crc]
                img_source = (((('', 'tvdb')['thetvdb.com' in image_url],
                                'tvrage')['tvrage.com' in image_url],
                               'fatv')['fanart.tv' in image_url],
                              'tmdb')['tmdb' in image_url]
                img_xtra = ''
                if 'tmdb' == img_source:
                    match = re.search(r'(?:.*\?(\d+$))?', image_url,
                                      re.I | re.M)
                    if match and None is not match.group(1):
                        img_xtra = match.group(1)
                file_desc = '%s.%03d%s.%s' % (show_obj.indexerid, num_files,
                                              ('.%s%s' %
                                               (img_source, img_xtra),
                                               '')['' == img_source], crc)
                cur_file_path = self.fanart_path(file_desc)
                result = metadata_generator.write_image(
                    img_data, cur_file_path)
                if result and self.FANART != self.which_type(cur_file_path):
                    try:
                        ek.ek(os.remove, cur_file_path)
                    except OSError as e:
                        logger.log(
                            u'Unable to remove %s: %s / %s' %
                            (cur_file_path, repr(e), str(e)), logger.WARNING)
                    continue
                if img_source:
                    sources += [img_source]
                num_files += (0, 1)[result]
                success += (0, 1)[result]
                if num_files > max_files:
                    break
            if count_urls:
                total = len(ek.ek(glob.glob, dest_path))
                logger.log(
                    u'Saved %s of %s fanart images%s. Cached %s of max %s fanart file%s'
                    %
                    (success, count_urls,
                     ('', ' from ' + ', '.join([x for x in list(set(sources))
                                                ]))[0 < len(sources)], total,
                     sickbeard.FANART_LIMIT, helpers.maybe_plural(total)))
            return bool(count_urls) and not bool(count_urls - success)

        img_data = metadata_generator.retrieve_show_image(
            img_type_name, show_obj)
        if None is img_data:
            return False
        result = metadata_generator.write_image(img_data, dest_path)
        if result:
            logger.log(u'Saved image type %s' % img_type_name)
        return result
Ejemplo n.º 28
0
def search_providers(show,
                     episodes,
                     manual_search=False,
                     torrent_only=False,
                     try_other_searches=False):
    found_results = {}
    final_results = []

    search_done = False

    orig_thread_name = threading.currentThread().name

    provider_list = [
        x for x in sickbeard.providers.sortedProviderList()
        if x.is_active() and x.enable_backlog and (
            not torrent_only or x.providerType == GenericProvider.TORRENT)
    ]
    for cur_provider in provider_list:
        if cur_provider.anime_only and not show.is_anime:
            logger.log(u'%s is not an anime, skipping' % show.name,
                       logger.DEBUG)
            continue

        threading.currentThread().name = '%s :: [%s]' % (orig_thread_name,
                                                         cur_provider.name)
        provider_id = cur_provider.get_id()

        found_results[provider_id] = {}

        search_count = 0
        search_mode = cur_provider.search_mode

        while True:
            search_count += 1

            if 'eponly' == search_mode:
                logger.log(u'Performing episode search for %s' % show.name)
            else:
                logger.log(u'Performing season pack search for %s' % show.name)

            try:
                cur_provider.cache._clearCache()
                search_results = cur_provider.find_search_results(
                    show,
                    episodes,
                    search_mode,
                    manual_search,
                    try_other_searches=try_other_searches)
                if any(search_results):
                    logger.log(', '.join([
                        '%s %s candidate%s' %
                        (len(v), (('multiep', 'season')[SEASON_RESULT == k],
                                  'episode')['ep' in search_mode],
                         helpers.maybe_plural(len(v)))
                        for (k, v) in search_results.iteritems()
                    ]))
            except exceptions.AuthException as e:
                logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
                break
            except Exception as e:
                logger.log(
                    u'Error while searching %s, skipping: %s' %
                    (cur_provider.name, ex(e)), logger.ERROR)
                logger.log(traceback.format_exc(), logger.DEBUG)
                break
            finally:
                threading.currentThread().name = orig_thread_name

            search_done = True

            if len(search_results):
                # make a list of all the results for this provider
                for cur_ep in search_results:
                    # skip non-tv crap
                    search_results[cur_ep] = filter(
                        lambda ep_item: show_name_helpers.pass_wordlist_checks(
                            ep_item.name, parse=False) and ep_item.show ==
                        show, search_results[cur_ep])

                    if cur_ep in found_results:
                        found_results[provider_id][cur_ep] += search_results[
                            cur_ep]
                    else:
                        found_results[provider_id][cur_ep] = search_results[
                            cur_ep]

                break
            elif not cur_provider.search_fallback or search_count == 2:
                break

            search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode]
            logger.log(u'Falling back to %s search ...' %
                       ('season pack', 'episode')['ep' in search_mode])

        # skip to next provider if we have no results to process
        if not len(found_results[provider_id]):
            continue

        any_qualities, best_qualities = Quality.splitQuality(show.quality)

        # pick the best season NZB
        best_season_result = None
        if SEASON_RESULT in found_results[provider_id]:
            best_season_result = pick_best_result(
                found_results[provider_id][SEASON_RESULT], show,
                any_qualities + best_qualities)

        highest_quality_overall = 0
        for cur_episode in found_results[provider_id]:
            for cur_result in found_results[provider_id][cur_episode]:
                if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality:
                    highest_quality_overall = cur_result.quality
        logger.log(
            u'%s is the highest quality of any match' %
            Quality.qualityStrings[highest_quality_overall], logger.DEBUG)

        # see if every episode is wanted
        if best_season_result:
            # get the quality of the season nzb
            season_qual = best_season_result.quality
            logger.log(
                u'%s is the quality of the season %s' %
                (Quality.qualityStrings[season_qual],
                 best_season_result.provider.providerType), logger.DEBUG)

            my_db = db.DBConnection()
            sql = 'SELECT episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\
                  (show.indexerid, ','.join([str(x.season) for x in episodes]))
            ep_nums = [int(x['episode']) for x in my_db.select(sql)]

            logger.log(u'Executed query: [%s]' % sql)
            logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG)

            all_wanted = True
            any_wanted = False
            for ep_num in ep_nums:
                for season in set([x.season for x in episodes]):
                    if not show.wantEpisode(season, ep_num, season_qual):
                        all_wanted = False
                    else:
                        any_wanted = True

            # if we need every ep in the season and there's nothing better then just download this and
            # be done with it (unless single episodes are preferred)
            if all_wanted and highest_quality_overall == best_season_result.quality:
                logger.log(
                    u'Every episode in this season is needed, downloading the whole %s %s'
                    % (best_season_result.provider.providerType,
                       best_season_result.name))
                ep_objs = []
                for ep_num in ep_nums:
                    for season in set([x.season for x in episodes]):
                        ep_objs.append(show.getEpisode(season, ep_num))
                best_season_result.episodes = ep_objs

                return [best_season_result]

            elif not any_wanted:
                logger.log(
                    u'No episodes from this season are wanted at this quality, ignoring the result of '
                    + best_season_result.name, logger.DEBUG)
            else:
                if GenericProvider.NZB == best_season_result.provider.providerType:
                    logger.log(
                        u'Breaking apart the NZB and adding the individual ones to our results',
                        logger.DEBUG)

                    # if not, break it apart and add them as the lowest priority results
                    individual_results = nzbSplitter.splitResult(
                        best_season_result)

                    individual_results = filter(
                        lambda r: show_name_helpers.pass_wordlist_checks(
                            r.name, parse=False) and r.show == show,
                        individual_results)

                    for cur_result in individual_results:
                        if 1 == len(cur_result.episodes):
                            ep_num = cur_result.episodes[0].episode
                        elif 1 < len(cur_result.episodes):
                            ep_num = MULTI_EP_RESULT

                        if ep_num in found_results[provider_id]:
                            found_results[provider_id][ep_num].append(
                                cur_result)
                        else:
                            found_results[provider_id][ep_num] = [cur_result]

                # If this is a torrent all we can do is leech the entire torrent,
                # user will have to select which eps not do download in his torrent client
                else:

                    # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it
                    logger.log(
                        u'Adding multi episode result for full season torrent. In your torrent client, set '
                        +
                        u'the episodes that you do not want to "don\'t download"'
                    )
                    ep_objs = []
                    for ep_num in ep_nums:
                        for season in set([x.season for x in episodes]):
                            ep_objs.append(show.getEpisode(season, ep_num))
                    best_season_result.episodes = ep_objs

                    ep_num = MULTI_EP_RESULT
                    if ep_num in found_results[provider_id]:
                        found_results[provider_id][ep_num].append(
                            best_season_result)
                    else:
                        found_results[provider_id][ep_num] = [
                            best_season_result
                        ]

        # go through multi-ep results and see if we really want them or not, get rid of the rest
        multi_results = {}
        if MULTI_EP_RESULT in found_results[provider_id]:
            for multi_result in found_results[provider_id][MULTI_EP_RESULT]:

                logger.log(
                    u'Checking usefulness of multi episode result %s' %
                    multi_result.name, logger.DEBUG)

                if sickbeard.USE_FAILED_DOWNLOADS and failed_history.hasFailed(
                        multi_result.name, multi_result.size,
                        multi_result.provider.name):
                    logger.log(
                        u'%s has previously failed, rejecting this multi episode result'
                        % multi_result.name)
                    continue

                # see how many of the eps that this result covers aren't covered by single results
                needed_eps = []
                not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    # if we have results for the episode
                    if ep_num in found_results[provider_id] and 0 < len(
                            found_results[provider_id][ep_num]):
                        needed_eps.append(ep_num)
                    else:
                        not_needed_eps.append(ep_num)

                logger.log(
                    u'Single episode check result is... needed episodes: %s, not needed episodes: %s'
                    % (needed_eps, not_needed_eps), logger.DEBUG)

                if not not_needed_eps:
                    logger.log(
                        u'All of these episodes were covered by single episode results, '
                        + 'ignoring this multi episode result', logger.DEBUG)
                    continue

                # check if these eps are already covered by another multi-result
                multi_needed_eps = []
                multi_not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in multi_results:
                        multi_not_needed_eps.append(ep_num)
                    else:
                        multi_needed_eps.append(ep_num)

                logger.log(
                    u'Multi episode check result is... multi needed episodes: '
                    + '%s, multi not needed episodes: %s' %
                    (multi_needed_eps, multi_not_needed_eps), logger.DEBUG)

                if not multi_needed_eps:
                    logger.log(
                        u'All of these episodes were covered by another multi episode nzb, '
                        + 'ignoring this multi episode result', logger.DEBUG)
                    continue

                # if we're keeping this multi-result then remember it
                for ep_obj in multi_result.episodes:
                    multi_results[ep_obj.episode] = multi_result

                # don't bother with the single result if we're going to get it with a multi result
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in found_results[provider_id]:
                        logger.log(
                            u'A needed multi episode result overlaps with a single episode result for episode '
                            +
                            '#%s, removing the single episode results from the list'
                            % ep_num, logger.DEBUG)
                        del found_results[provider_id][ep_num]

        # of all the single ep results narrow it down to the best one for each episode
        final_results += set(multi_results.values())
        for cur_ep in found_results[provider_id]:
            if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
                continue

            if 0 == len(found_results[provider_id][cur_ep]):
                continue

            best_result = pick_best_result(found_results[provider_id][cur_ep],
                                           show)

            # if all results were rejected move on to the next episode
            if not best_result:
                continue

            # filter out possible bad torrents from providers
            if 'torrent' == best_result.resultType:
                if best_result.url.startswith('magnet'):
                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = None
                else:
                    td = best_result.provider.get_url(best_result.url)
                    if not td:
                        continue
                    if getattr(best_result.provider, 'chk_td', None):
                        name = None
                        try:
                            hdr = re.findall('(\w+(\d+):)', td[0:6])[0]
                            x, v = len(hdr[0]), int(hdr[1])
                            for item in range(0, 12):
                                y = x + v
                                name = 'name' == td[x:y]
                                w = re.findall('((?:i\d+e|d|l)?(\d+):)',
                                               td[y:y + 32])[0]
                                x, v = y + len(w[0]), int(w[1])
                                if name:
                                    name = td[x:x + v]
                                    break
                        except:
                            continue
                        if name:
                            if not pass_show_wordlist_checks(name, show):
                                continue
                            if not show_name_helpers.pass_wordlist_checks(
                                    name):
                                logger.log(
                                    u'Ignored: %s (debug log has detail)' %
                                    name)
                                continue
                            best_result.name = name

                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = td

            # add result if its not a duplicate and
            found = False
            for i, result in enumerate(final_results):
                for best_result_ep in best_result.episodes:
                    if best_result_ep in result.episodes:
                        if best_result.quality > result.quality:
                            final_results.pop(i)
                        else:
                            found = True
            if not found:
                final_results += [best_result]

        # check that we got all the episodes we wanted first before doing a match and snatch
        wanted_ep_count = 0
        for wanted_ep in episodes:
            for result in final_results:
                if wanted_ep in result.episodes and is_final_result(result):
                    wanted_ep_count += 1

        # make sure we search every provider for results unless we found everything we wanted
        if len(episodes) == wanted_ep_count:
            break

    if not len(provider_list):
        logger.log(
            'No NZB/Torrent sources enabled in Search Provider options to do backlog searches',
            logger.WARNING)
    elif not search_done:
        logger.log(
            'Failed backlog search of %s enabled provider%s. More info in debug log.'
            % (len(provider_list), helpers.maybe_plural(len(provider_list))),
            logger.ERROR)

    return final_results
Ejemplo n.º 29
0
def search_providers(show, episodes, manual_search=False, torrent_only=False, try_other_searches=False, old_status=None, scheduled=False):
    found_results = {}
    final_results = []

    search_done = False

    orig_thread_name = threading.currentThread().name

    use_quality_list = None
    if any([episodes]):
        old_status = old_status or failed_history.find_old_status(episodes[0]) or episodes[0].status
        if old_status:
            status, quality = Quality.splitCompositeStatus(old_status)
            use_quality_list = (status not in (
                common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN))

    provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and
                     (not torrent_only or x.providerType == GenericProvider.TORRENT) and
                     (not scheduled or x.enable_scheduled_backlog)]
    for cur_provider in provider_list:
        if cur_provider.anime_only and not show.is_anime:
            logger.log(u'%s is not an anime, skipping' % show.name, logger.DEBUG)
            continue

        threading.currentThread().name = '%s :: [%s]' % (orig_thread_name, cur_provider.name)
        provider_id = cur_provider.get_id()

        found_results[provider_id] = {}

        search_count = 0
        search_mode = getattr(cur_provider, 'search_mode', 'eponly')

        while True:
            search_count += 1

            if 'eponly' == search_mode:
                logger.log(u'Performing episode search for %s' % show.name)
            else:
                logger.log(u'Performing season pack search for %s' % show.name)

            try:
                cur_provider.cache._clearCache()
                search_results = cur_provider.find_search_results(show, episodes, search_mode, manual_search,
                                                                  try_other_searches=try_other_searches)
                if any(search_results):
                    logger.log(', '.join(['%s %s candidate%s' % (
                        len(v), (('multiep', 'season')[SEASON_RESULT == k], 'episode')['ep' in search_mode],
                        helpers.maybe_plural(len(v))) for (k, v) in search_results.iteritems()]))
            except exceptions.AuthException as e:
                logger.log(u'Authentication error: %s' % ex(e), logger.ERROR)
                break
            except Exception as e:
                logger.log(u'Error while searching %s, skipping: %s' % (cur_provider.name, ex(e)), logger.ERROR)
                logger.log(traceback.format_exc(), logger.ERROR)
                break
            finally:
                threading.currentThread().name = orig_thread_name

            search_done = True

            if len(search_results):
                # make a list of all the results for this provider
                for cur_ep in search_results:
                    # skip non-tv crap
                    search_results[cur_ep] = filter(
                        lambda ep_item: show_name_helpers.pass_wordlist_checks(
                            ep_item.name, parse=False, indexer_lookup=False) and
                                        ep_item.show == show, search_results[cur_ep])

                    if cur_ep in found_results:
                        found_results[provider_id][cur_ep] += search_results[cur_ep]
                    else:
                        found_results[provider_id][cur_ep] = search_results[cur_ep]

                break
            elif not getattr(cur_provider, 'search_fallback', False) or 2 == search_count:
                break

            search_mode = '%sonly' % ('ep', 'sp')['ep' in search_mode]
            logger.log(u'Falling back to %s search ...' % ('season pack', 'episode')['ep' in search_mode])

        # skip to next provider if we have no results to process
        if not len(found_results[provider_id]):
            continue

        any_qualities, best_qualities = Quality.splitQuality(show.quality)

        # pick the best season NZB
        best_season_result = None
        if SEASON_RESULT in found_results[provider_id]:
            best_season_result = pick_best_result(found_results[provider_id][SEASON_RESULT], show,
                                                  any_qualities + best_qualities)

        highest_quality_overall = 0
        for cur_episode in found_results[provider_id]:
            for cur_result in found_results[provider_id][cur_episode]:
                if Quality.UNKNOWN != cur_result.quality and highest_quality_overall < cur_result.quality:
                    highest_quality_overall = cur_result.quality
        logger.log(u'%s is the highest quality of any match' % Quality.qualityStrings[highest_quality_overall],
                   logger.DEBUG)

        # see if every episode is wanted
        if best_season_result:
            # get the quality of the season nzb
            season_qual = best_season_result.quality
            logger.log(u'%s is the quality of the season %s' % (Quality.qualityStrings[season_qual],
                                                                best_season_result.provider.providerType), logger.DEBUG)

            my_db = db.DBConnection()
            sql = 'SELECT season, episode FROM tv_episodes WHERE showid = %s AND (season IN (%s))' %\
                  (show.indexerid, ','.join([str(x.season) for x in episodes]))
            ep_nums = [(int(x['season']), int(x['episode'])) for x in my_db.select(sql)]

            logger.log(u'Executed query: [%s]' % sql)
            logger.log(u'Episode list: %s' % ep_nums, logger.DEBUG)

            all_wanted = True
            any_wanted = False
            for ep_num in ep_nums:
                if not show.wantEpisode(ep_num[0], ep_num[1], season_qual):
                    all_wanted = False
                else:
                    any_wanted = True

            # if we need every ep in the season and there's nothing better then just download this and
            # be done with it (unless single episodes are preferred)
            if all_wanted and highest_quality_overall == best_season_result.quality:
                logger.log(u'Every episode in this season is needed, downloading the whole %s %s' %
                           (best_season_result.provider.providerType, best_season_result.name))
                ep_objs = []
                for ep_num in ep_nums:
                    ep_objs.append(show.getEpisode(ep_num[0], ep_num[1]))
                best_season_result.episodes = ep_objs

                return [best_season_result]

            elif not any_wanted:
                logger.log(u'No episodes from this season are wanted at this quality, ignoring the result of ' +
                           best_season_result.name, logger.DEBUG)
            else:
                if GenericProvider.NZB == best_season_result.provider.providerType:
                    logger.log(u'Breaking apart the NZB and adding the individual ones to our results', logger.DEBUG)

                    # if not, break it apart and add them as the lowest priority results
                    individual_results = nzbSplitter.splitResult(best_season_result)

                    individual_results = filter(
                        lambda r: show_name_helpers.pass_wordlist_checks(
                            r.name, parse=False, indexer_lookup=False) and r.show == show, individual_results)

                    for cur_result in individual_results:
                        if 1 == len(cur_result.episodes):
                            ep_num = cur_result.episodes[0].episode
                        elif 1 < len(cur_result.episodes):
                            ep_num = MULTI_EP_RESULT

                        if ep_num in found_results[provider_id]:
                            found_results[provider_id][ep_num].append(cur_result)
                        else:
                            found_results[provider_id][ep_num] = [cur_result]

                # If this is a torrent all we can do is leech the entire torrent,
                # user will have to select which eps not do download in his torrent client
                else:

                    # Season result from Torrent Provider must be a full-season torrent, creating multi-ep result for it
                    logger.log(u'Adding multi episode result for full season torrent. In your torrent client, set ' +
                               u'the episodes that you do not want to "don\'t download"')
                    ep_objs = []
                    for ep_num in ep_nums:
                        ep_objs.append(show.getEpisode(ep_num[0], ep_num[1]))
                    best_season_result.episodes = ep_objs

                    ep_num = MULTI_EP_RESULT
                    if ep_num in found_results[provider_id]:
                        found_results[provider_id][ep_num].append(best_season_result)
                    else:
                        found_results[provider_id][ep_num] = [best_season_result]

        # go through multi-ep results and see if we really want them or not, get rid of the rest
        multi_results = {}
        if MULTI_EP_RESULT in found_results[provider_id]:
            for multi_result in found_results[provider_id][MULTI_EP_RESULT]:

                logger.log(u'Checking usefulness of multi episode result [%s]' % multi_result.name, logger.DEBUG)

                if sickbeard.USE_FAILED_DOWNLOADS and failed_history.has_failed(multi_result.name, multi_result.size,
                                                                                multi_result.provider.name):
                    logger.log(u'Rejecting previously failed multi episode result [%s]' % multi_result.name)
                    continue

                # see how many of the eps that this result covers aren't covered by single results
                needed_eps = []
                not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    # if we have results for the episode
                    if ep_num in found_results[provider_id] and 0 < len(found_results[provider_id][ep_num]):
                        needed_eps.append(ep_num)
                    else:
                        not_needed_eps.append(ep_num)

                logger.log(u'Single episode check result is... needed episodes: %s, not needed episodes: %s' %
                           (needed_eps, not_needed_eps), logger.DEBUG)

                if not not_needed_eps:
                    logger.log(u'All of these episodes were covered by single episode results, ' +
                               'ignoring this multi episode result', logger.DEBUG)
                    continue

                # check if these eps are already covered by another multi-result
                multi_needed_eps = []
                multi_not_needed_eps = []
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in multi_results:
                        multi_not_needed_eps.append(ep_num)
                    else:
                        multi_needed_eps.append(ep_num)

                logger.log(u'Multi episode check result is... multi needed episodes: ' +
                           '%s, multi not needed episodes: %s' % (multi_needed_eps, multi_not_needed_eps), logger.DEBUG)

                if not multi_needed_eps:
                    logger.log(u'All of these episodes were covered by another multi episode nzb, ' +
                               'ignoring this multi episode result',
                               logger.DEBUG)
                    continue

                # if we're keeping this multi-result then remember it
                for ep_obj in multi_result.episodes:
                    multi_results[ep_obj.episode] = multi_result

                # don't bother with the single result if we're going to get it with a multi result
                for ep_obj in multi_result.episodes:
                    ep_num = ep_obj.episode
                    if ep_num in found_results[provider_id]:
                        logger.log(u'A needed multi episode result overlaps with a single episode result for episode ' +
                                   '#%s, removing the single episode results from the list' % ep_num, logger.DEBUG)
                        del found_results[provider_id][ep_num]

        # of all the single ep results narrow it down to the best one for each episode
        final_results += set(multi_results.values())
        quality_list = use_quality_list and (None, best_qualities)[any(best_qualities)] or None
        for cur_ep in found_results[provider_id]:
            if cur_ep in (MULTI_EP_RESULT, SEASON_RESULT):
                continue

            if 0 == len(found_results[provider_id][cur_ep]):
                continue

            best_result = pick_best_result(found_results[provider_id][cur_ep], show, quality_list,
                                           filter_rls=orig_thread_name)

            # if all results were rejected move on to the next episode
            if not best_result:
                continue

            # filter out possible bad torrents from providers
            if 'torrent' == best_result.resultType:
                if not best_result.url.startswith('magnet') and None is not best_result.get_data_func:
                    best_result.url = best_result.get_data_func(best_result.url)
                    best_result.get_data_func = None  # consume only once
                    if not best_result.url:
                        continue
                if best_result.url.startswith('magnet'):
                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = None
                else:
                    cache_file = ek.ek(os.path.join, sickbeard.CACHE_DIR or helpers._getTempDir(),
                                       '%s.torrent' % (helpers.sanitizeFileName(best_result.name)))
                    if not helpers.download_file(best_result.url, cache_file, session=best_result.provider.session):
                        continue

                    try:
                        with open(cache_file, 'rb') as fh:
                            td = fh.read()
                        setattr(best_result, 'cache_file', cache_file)
                    except (StandardError, Exception):
                        continue

                    if getattr(best_result.provider, 'chk_td', None):
                        name = None
                        try:
                            hdr = re.findall('(\w+(\d+):)', td[0:6])[0]
                            x, v = len(hdr[0]), int(hdr[1])
                            while x < len(td):
                                y = x + v
                                name = 'name' == td[x: y]
                                w = re.findall('((?:i-?\d+e|e+|d|l+)*(\d+):)', td[y: y + 32])[0]
                                x, v = y + len(w[0]), int(w[1])
                                if name:
                                    name = td[x: x + v]
                                    break
                        except (StandardError, Exception):
                            continue
                        if name:
                            if not pass_show_wordlist_checks(name, show):
                                continue
                            if not show_name_helpers.pass_wordlist_checks(name, indexer_lookup=False):
                                logger.log('Ignored: %s (debug log has detail)' % name)
                                continue
                            best_result.name = name

                    if 'blackhole' != sickbeard.TORRENT_METHOD:
                        best_result.content = td

            # add result if its not a duplicate and
            found = False
            for i, result in enumerate(final_results):
                for best_result_ep in best_result.episodes:
                    if best_result_ep in result.episodes:
                        if best_result.quality > result.quality:
                            final_results.pop(i)
                        else:
                            found = True
            if not found:
                final_results += [best_result]

        # check that we got all the episodes we wanted first before doing a match and snatch
        wanted_ep_count = 0
        for wanted_ep in episodes:
            for result in final_results:
                if wanted_ep in result.episodes and is_final_result(result):
                    wanted_ep_count += 1

        # make sure we search every provider for results unless we found everything we wanted
        if len(episodes) == wanted_ep_count:
            break

    if not len(provider_list):
        logger.log('No NZB/Torrent providers in Media Providers/Options are allowed for active searching', logger.WARNING)
    elif not search_done:
        logger.log('Failed active search of %s enabled provider%s. More info in debug log.' % (
            len(provider_list), helpers.maybe_plural(len(provider_list))), logger.ERROR)
    elif not any(final_results):
        logger.log('No suitable candidates')

    return final_results