Beispiel #1
0
def run_subs_extra_scripts(episode, subtitle, video, single=False):
    for script_name in settings.SUBTITLES_EXTRA_SCRIPTS:
        script_cmd = [
            piece for piece in re.split("( |\\\".*?\\\"|'.*?')", script_name)
            if piece.strip()
        ]
        script_cmd[0] = os.path.abspath(script_cmd[0])
        logger.debug('Absolute path to script: {0}'.format(script_cmd[0]))

        subtitle_path = subliminal.subtitle.get_subtitle_path(
            video.name, None if single else subtitle.language)

        inner_cmd = script_cmd + [
            video.name, subtitle_path, subtitle.language.opensubtitles,
            episode.show.name,
            str(episode.season),
            str(episode.episode), episode.name,
            str(episode.show.indexerid)
        ]

        # use subprocess to run the command and capture output
        logger.info('Executing command: {0}'.format(inner_cmd))
        try:
            process = subprocess.Popen(inner_cmd,
                                       stdin=subprocess.PIPE,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.STDOUT,
                                       cwd=settings.DATA_DIR,
                                       universal_newlines=True)

            stdout, stderr = process.communicate()
            logger.debug('Script result: {0}'.format(
                str(stdout or stderr).strip()))

        except Exception as error:
            logger.info('Unable to run subs_extra_script: {0}'.format(
                str(error)))
Beispiel #2
0
    def login(self):
        cookie_dict = dict_from_cookiejar(self.session.cookies)
        if cookie_dict.get("uid") and cookie_dict.get("pass"):
            return True

        if self.cookies:
            success, status = self.add_cookies_from_ui()
            if not success:
                logger.info(status)
                return False

            login_params = {"username": self.username, "password": self.password, "submit.x": 0, "submit.y": 0}
            login_url = self.urls["login"]
            if self.custom_url:
                if not validators.url(self.custom_url):
                    logger.warning("Invalid custom url: {0}".format(self.custom_url))
                    return False

                login_url = urljoin(self.custom_url, self.urls["login"].split(self.url)[1])

            response = self.get_url(login_url, post_data=login_params, returns="response")
            if not response or response.status_code != 200:
                logger.warning("Unable to connect to provider")
                return False

            if re.search("You tried too often", response.text):
                logger.warning("Too many login access attempts")
                return False

            if dict_from_cookiejar(self.session.cookies).get("uid") in response.text:
                return True
            else:
                logger.warning("Failed to login, check your cookies")
                return False
        else:
            logger.info("You need to set your cookies to use torrentday")
            return False
Beispiel #3
0
    def login(self):
        cookie_dict = dict_from_cookiejar(self.session.cookies)
        if cookie_dict.get('uid') and cookie_dict.get('pass'):
            return True

        if self.cookies:
            success, status = self.add_cookies_from_ui()
            if not success:
                logger.info(status)
                return False

            login_params = {'username': self.username, 'password': self.password, 'submit.x': 0, 'submit.y': 0}
            login_url = self.urls['login']
            if self.custom_url:
                if not validators.url(self.custom_url):
                    logger.warning("Invalid custom url: {0}".format(self.custom_url))
                    return False

                login_url = urljoin(self.custom_url, self.urls['login'].split(self.url)[1])

            response = self.get_url(login_url, post_data=login_params, returns='response')
            if not response or response.status_code != 200:
                logger.warning('Unable to connect to provider')
                return False

            if re.search('You tried too often', response.text):
                logger.warning('Too many login access attempts')
                return False

            if dict_from_cookiejar(self.session.cookies).get('uid') in response.text:
                return True
            else:
                logger.warning('Failed to login, check your cookies')
                return False
        else:
            logger.info('You need to set your cookies to use torrentday')
            return False
Beispiel #4
0
    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if 'NO_DOWNLOAD_NAME' in url:
                continue

            if isinstance(url, tuple):
                referer = url[1]
                url = url[0]
            else:
                referer = '/'.join(url.split('/')[:3]) + '/'

            if url.startswith('http'):
                self.headers.update({
                    'Referer': referer
                })

            logger.info('Downloading a result from {0} at {1}'.format(self.name, url))

            downloaded_filename = download_file(url, filename, session=self.session, headers=self.headers,
                                                hooks={'response': self.get_url_hook}, return_filename=True)
            if downloaded_filename:
                if self._verify_download(downloaded_filename):
                    logger.info('Saved result to {0}'.format(downloaded_filename))
                    return True

                logger.warning('Could not download {0}'.format(url))
                remove_file_failed(downloaded_filename)

        if urls:
            logger.warning('Failed to download any results')

        return False
Beispiel #5
0
    def fix_duplicate_episodes(self):

        sql_results = self.connection.select(
            "SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1")

        for cur_duplicate in sql_results:
            dupe_id = cur_duplicate["showid"]
            dupe_season = cur_duplicate["season"]
            dupe_episode = cur_duplicate["episode"],
            dupe_count = cur_duplicate["count"]
            logger.debug(_("Duplicate episode detected! showid: {dupe_id} season: {dupe_season} episode {dupe_episode} count: {dupe_count}".format(
                dupe_id=dupe_id, dupe_season=dupe_season, dupe_episode=dupe_episode, dupe_count=dupe_count))
            )

            cur_dupe_results = self.connection.select(
                "SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?",
                [cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"],
                 int(cur_duplicate["count"]) - 1]
            )

            for cur_dupe_id in cur_dupe_results:
                current_episode_id = cur_dupe_id["episode_id"]
                logger.info(_("Deleting duplicate episode with episode_id: {current_episode_id}".format(current_episode_id=current_episode_id)))
                self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [current_episode_id])
Beispiel #6
0
    def download_result(self, result):
        if not self.login():
            return False

        urls, filename = self._make_url(result)

        for url in urls:
            if "NO_DOWNLOAD_NAME" in url:
                continue

            if isinstance(url, tuple):
                referer = url[1]
                url = url[0]
            else:
                referer = "/".join(url.split("/")[:3]) + "/"

            if url.startswith("http"):
                self.headers.update({"Referer": referer})

            logger.info(f"Downloading a result from {self.name} at {url}")

            downloaded_filename = download_file(
                url, filename, session=self.session, headers=self.headers, hooks={"response": self.get_url_hook}, return_filename=True
            )
            if downloaded_filename:
                if self._verify_download(downloaded_filename):
                    logger.info(f"Saved result to {downloaded_filename}")
                    return True

                logger.warning(f"Could not download {url}")
                remove_file_failed(downloaded_filename)

        if urls:
            logger.warning(f"{self.name} : Failed to download any results")

        return False
Beispiel #7
0
def change_version_notify(version_notify):
    """
    Enable/Disable versioncheck thread

    :param version_notify: New desired state
    """
    version_notify = checkbox_to_value(version_notify)

    if settings.VERSION_NOTIFY == version_notify:
        return True

    settings.VERSION_NOTIFY = version_notify
    if settings.VERSION_NOTIFY:
        if not settings.versionCheckScheduler.enable:
            logger.info("Starting VERSIONCHECK thread")
            settings.versionCheckScheduler.silent = False
            settings.versionCheckScheduler.enable = True
            settings.versionCheckScheduler.forceRun()
    else:
        settings.versionCheckScheduler.enable = False
        settings.versionCheckScheduler.silent = True
        logger.info("Stopping VERSIONCHECK thread")

    return True
Beispiel #8
0
    def play_episode(self, episode, connection_index=0):
        """Handles playing videos on a KODI host via HTTP JSON-RPC

        Attempts to play an episode on a KODI host.

        Args:
            episode: The episode to play
            connection_index: Index of the selected host to play the episode on

        Returns:
            Returns True or False

        """
        try:
            connection = self.connections[int(connection_index)]
        except IndexError:
            logger.warning('Incorrect KODI host passed to play an episode, aborting play')
            return False

        logger.debug("Trying to play episode on Kodi for host: " + connection.host)

        response = connection.VideoLibrary.GetTVShows(filter={"field": "title", "operator": "is", "value": episode.show.name})

        shows = []
        tvshowid = None
        if response and "result" in response and "tvshows" in response["result"]:
            shows = response["result"]["tvshows"]

        check = (episode.show.name, unquote_plus(episode.show.name))
        for show in shows:
            if ("label" in show and show["label"] in check) or ("title" in show and show["title"] in check):
                tvshowid = show["tvshowid"]

        del shows

        if tvshowid is None:
            logger.info('Could not play the item, could not find the show on Kodi')
            return

        response = connection.VideoLibrary.GetEpisodes(
            filter={"field": "title", "operator": "is", "value": episode.name},
            season=episode.season,
            tvshowid=tvshowid,
            properties=["file"]
        )

        if response and "result" in response and "episodes" in response["result"]:
            episodes = response["result"]["episodes"]

            if len(episodes) > 1:
                logger.info('Could not play the item, too many files were returned as options and we could not choose')

            if episodes:
                connection.Player.Open(item={'file': episodes[0]['file']})
            else:
                logger.info('Could not find the episode on Kodi to play')
Beispiel #9
0
def setEpisodeToWanted(show, s, e):
    """
    Sets an episode to wanted, only if it is currently skipped
    """
    epObj = show.getEpisode(s, e)
    if epObj:

        with epObj.lock:
            if epObj.status != SKIPPED or epObj.airdate == datetime.date.min:
                return

            logger.info("Setting episode {show} {ep} to wanted".format(
                show=show.name, ep=episode_num(s, e)))
            # figure out what segment the episode is in and remember it so we can backlog it

            epObj.status = WANTED
            epObj.saveToDB()

        cur_backlog_queue_item = search_queue.BacklogQueueItem(show, [epObj])
        settings.searchQueueScheduler.action.add_item(cur_backlog_queue_item)

        logger.info(
            "Starting backlog search for {show} {ep} because some episodes were set to wanted"
            .format(show=show.name, ep=episode_num(s, e)))
Beispiel #10
0
    def _compile_regexes(self, regexMode):
        if regexMode == self.ANIME_REGEX:
            dbg_str = "ANIME"
            uncompiled_regex = [regexes.anime_regexes]
        elif regexMode == self.NORMAL_REGEX:
            dbg_str = "NORMAL"
            uncompiled_regex = [regexes.normal_regexes]
        else:
            dbg_str = "ALL"
            uncompiled_regex = [regexes.normal_regexes, regexes.anime_regexes]

        self.compiled_regexes = []
        for regexItem in uncompiled_regex:
            for cur_pattern_num, (cur_pattern_name,
                                  cur_pattern) in enumerate(regexItem):
                try:
                    cur_regex = re.compile(cur_pattern, re.VERBOSE | re.I)
                except re.error as errormsg:
                    logger.info(
                        f"WARNING: Invalid episode_pattern using {dbg_str} regexs, {errormsg}. {cur_pattern}"
                    )
                else:
                    self.compiled_regexes.append(
                        (cur_pattern_num, cur_pattern_name, cur_regex))
Beispiel #11
0
    def run(self):
        super().run()
        self.started = True

        try:
            for epObj in self.segment:

                logger.info(f"Marking episode as bad: [{epObj.pretty_name()}]")

                failed_history.markFailed(epObj)

                (release, provider) = failed_history.findRelease(epObj)
                if release:
                    failed_history.logFailed(release)
                    history.logFailed(epObj, release, provider)

                failed_history.revertEpisode(epObj)
                logger.info(
                    f"Beginning failed download search for: [{epObj.pretty_name()}]"
                )

            # If it is wanted, self.downCurQuality doesnt matter
            # if it isnt wanted, we need to make sure to not overwrite the existing ep that we reverted to!
            search_result = search.searchProviders(self.show, self.segment,
                                                   True)

            if search_result:
                for result in search_result:
                    # just use the first result for now
                    logger.info(
                        f"Downloading {result.name} from {result.provider.name}"
                    )
                    search.snatchEpisode(result)

                    # give the CPU a break
                    time.sleep(common.cpu_presets[settings.CPU_PRESET])
            else:
                pass
                # logger.info(f"No valid episode found to retry for: [{self.segment.pretty_name()}]")
        except Exception:
            logger.debug(traceback.format_exc())

        # ## Keep a list with the 100 last executed searches
        fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)

        if self.success is None:
            self.success = False

        super().finish()
        self.finish()
Beispiel #12
0
    def migrate_config(self):
        """
        Calls each successive migration until the config is the same version as SB expects
        """

        if self.config_version > self.expected_config_version:
            logger.log_error_and_exit(
                """Your config version ({0:d}) has been incremented past what this version of SickChill supports ({1:d}).
                If you have used other forks or a newer version of SickChill, your config file may be unusable due to their modifications."""
                .format(self.config_version, self.expected_config_version))

        settings.CONFIG_VERSION = self.config_version

        while self.config_version < self.expected_config_version:
            next_version = self.config_version + 1

            if next_version in self.migration_names:
                migration_name = ": " + self.migration_names[next_version]
            else:
                migration_name = ""

            logger.info("Backing up config before upgrade")
            if not helpers.backupVersionedFile(settings.CONFIG_FILE,
                                               self.config_version):
                logger.log_error_and_exit(
                    "Config backup failed, abort upgrading config")
            else:
                logger.info("Proceeding with upgrade")

            # do the migration, expect a method named _migrate_v<num>
            logger.info("Migrating config up to version " + str(next_version) +
                        migration_name)
            getattr(self, "_migrate_v" + str(next_version))()
            self.config_version = next_version

            # save new config after migration
            settings.CONFIG_VERSION = self.config_version
            logger.info("Saving config file to disk")
            sickchill.start.save_config()
Beispiel #13
0
def determine_release_name(directory=None, release_name=None):
    """Determine a release name from an nzb and/or folder name"""

    if release_name is not None:
        if validators.url(release_name):
            logger.info(
                _('Downloader returned a download url rather than a release name'
                  ))
            return release_name

        logger.info(_("Using release for release name."))
        return release_name.rpartition('.')[0]

    if directory is None:
        return None

    # try to get the release name from nzb/nfo
    file_types = ["*.nzb", "*.nfo"]

    for search in file_types:

        reg_expr = re.compile(fnmatch.translate(search), re.I)
        files = [
            filename for filename in os.listdir(directory)
            if os.path.isfile(os.path.join(directory, filename))
        ]

        results = [f for f in files if reg_expr.search(f)]

        if len(results) == 1:
            found_file = os.path.basename(results[0])
            found_file = found_file.rpartition('.')[0]
            if filter_bad_releases(found_file):
                logger.info("Release name (" + found_file +
                            ") found from file (" + results[0] + ")")
                return found_file.rpartition('.')[0]

    # If that fails, we try the folder
    folder = os.path.basename(directory)
    if filter_bad_releases(folder):
        # NOTE: Multiple failed downloads will change the folder name.
        # (e.g., appending #s)
        # Should we handle that?
        logger.debug("Folder name (" + folder +
                     ") appears to be a valid release name. Using it.")
        return folder

    return None
Beispiel #14
0
    def run(self):
        if self.enable_https:
            protocol = "https"
            ssl_options = {"certfile": self.https_cert, "keyfile": self.https_key}
        else:
            protocol = "http"
            ssl_options = None

        logger.info("Starting SickChill on " + protocol + "://" + str(self.options['host']) + ":" + str(
            self.options['port']) + "/")

        try:
            self.server = self.app.listen(self.options['port'], self.options['host'], ssl_options=ssl_options,
                                          xheaders=settings.HANDLE_REVERSE_PROXY, protocol=protocol)
        except socket_error as ex:
            err_msg = ""
            if ex.errno == errno.EADDRINUSE:  # Address/port combination already in use
                if settings.LAUNCH_BROWSER and not self.daemon:
                    sickchill.start.launchBrowser('https' if settings.ENABLE_HTTPS else 'http', self.options['port'], settings.WEB_ROOT)
                    logger.info("Launching browser and exiting")
                err_msg = "already in use!"

            logger.info(f"Could not start webserver on port {self.options['port']}: {err_msg or ex}")
            # noinspection PyProtectedMember
            os._exit(1)
        except Exception as ex:
            logger.info(f"Could not start webserver on port {self.options['port']}: {ex}")

            # noinspection PyProtectedMember
            os._exit(1)

        try:
            IOLoop.current().start()
            IOLoop.current().close(True)
        except (IOError, ValueError):
            # Ignore errors like "ValueError: I/O operation on closed kqueue fd". These might be thrown during a reload.
            pass
    def _send_discord(self):
        discord_webhook = settings.DISCORD_WEBHOOK
        discord_name = settings.DISCORD_NAME
        avatar_icon = settings.DISCORD_AVATAR_URL
        discord_tts = bool(settings.DISCORD_TTS)

        logger.info("Sending discord message: " +
                    ', '.join(f['value'] for f in self.embed['fields']))
        logger.info("Sending discord message to url: " + discord_webhook)

        headers = {"Content-Type": "application/json"}
        try:
            r = requests.post(discord_webhook,
                              data=json.dumps(
                                  dict(embeds=[self.embed],
                                       username=discord_name,
                                       avatar_url=avatar_icon,
                                       tts=discord_tts)),
                              headers=headers)
            r.raise_for_status()
        except HTTPError as error:
            if error.response.status_code != 429 or int(
                    error.response.headers.get('X-RateLimit-Remaining')) != 0:
                raise error

            logger.info(
                'Discord rate limiting, retrying after {} seconds'.format(
                    error.response.headers.get('X-RateLimit-Reset-After')))
            time.sleep(
                int(error.response.headers.get('X-RateLimit-Reset-After')) + 1)
            r = requests.post(discord_webhook,
                              data=json.dumps(
                                  dict(embeds=[self.embed],
                                       username=discord_name,
                                       avatar_url=avatar_icon,
                                       tts=discord_tts)),
                              headers=headers)
            r.raise_for_status()
        except Exception as error:
            logger.exception("Error Sending Discord message: " + str(error))

            return False

        return True
Beispiel #16
0
    def _check_response(self, data=None, files=None):
        """
        Checks the response from Download Station, and logs any errors
        params: :data: post data sent in the original request, in case we need to send it with adjusted parameters
                :file: file data being sent with the post request, if any
        """
        try:
            jdata = self.response.json()
        except (ValueError, AttributeError):
            logger.info(
                "Could not convert response to json, check the host:port: {0!r}"
                .format(self.response))
            return False

        if not jdata.get("success"):
            error_code = jdata.get("error", {}).get("code")
            if error_code == 403:
                destination = (data or {}).get("destination")
                if destination and os.path.isabs(destination):
                    data["destination"] = re.sub(r"^/volume\d/", "",
                                                 destination).lstrip("/")
                    self._request(method="post", data=data, files=files)

                    try:
                        jdata = self.response.json()
                    except ValueError:
                        return False

                    if jdata.get("success"):
                        if destination == settings.SYNOLOGY_DSM_PATH:
                            settings.SYNOLOGY_DSM_PATH = data["destination"]
                        elif destination == settings.TORRENT_PATH:
                            settings.TORRENT_PATH = data["destination"]

        if not jdata.get("success"):
            error_code = jdata.get("error", {}).get("code")
            api_method = (data or {}).get("method", "login")
            log_string = self.error_map.get(api_method).get(error_code, None)
            if not log_string:
                logger.info(jdata)
            else:
                logger.info("{0}".format(log_string))

        return jdata.get("success")
Beispiel #17
0
def delete_folder(folder, check_empty=True):
    """
    Removes a folder from the filesystem

    :param folder: Path to folder to remove
    :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True
    :return: True on success, False on failure
    """

    # check if it's a folder
    if not os.path.isdir(folder):
        return False

    # check if it isn't TV_DOWNLOAD_DIR
    if settings.TV_DOWNLOAD_DIR and helpers.real_path(
            folder) == helpers.real_path(settings.TV_DOWNLOAD_DIR):
        return False

    # check if it's empty folder when wanted checked
    if check_empty:
        check_files = os.listdir(folder)
        if check_files:
            logger.info(
                "Not deleting folder {0} found the following files: {1}".
                format(folder, check_files))
            return False

        try:
            logger.info("Deleting folder (if it's empty): {0}".format(folder))
            os.rmdir(folder)
        except (OSError, IOError) as e:
            logger.warning("Warning: unable to delete folder: {0}: {1}".format(
                folder, str(e)))
            return False
    else:
        try:
            logger.info("Deleting folder: " + folder)
            shutil.rmtree(folder)
        except (OSError, IOError) as e:
            logger.warning("Warning: unable to delete folder: {0}: {1}".format(
                folder, str(e)))
            return False

    return True
Beispiel #18
0
    def run(self):
        super().run()

        try:
            logger.info(
                f"Beginning manual search for: [{self.segment.pretty_name()}]")
            self.started = True

            search_result = search.searchProviders(self.show, [self.segment],
                                                   True, self.downCurQuality)

            if search_result:
                # just use the first result for now
                logger.info(
                    f"Downloading {search_result[0].name} from {search_result[0].provider.name}"
                )
                self.success = search.snatchEpisode(search_result[0])

                # give the CPU a break
                time.sleep(common.cpu_presets[settings.CPU_PRESET])

            else:
                ui.notifications.message(
                    'No downloads were found',
                    "Couldn't find a download for <i>{0}</i>".format(
                        self.segment.pretty_name()))

                logger.info(
                    f"Unable to find a download for: [{self.segment.pretty_name()}]"
                )

        except Exception:
            logger.debug(traceback.format_exc())

        # ## Keep a list with the 100 last executed searches
        fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)

        if self.success is None:
            self.success = False

        super().finish()
        self.finish()
Beispiel #19
0
    def run(self):
        super().run()

        if not self.movie.paused:
            try:
                logger.info(f"Beginning backlog search for: [{self.movie.name}]")
                settings.movie_list.search_providers(self.movie)
                for result in self.movie.results:
                    # just use the first result for now
                    logger.info(f"Downloading {result.name} from {result.provider}")
                    settings.movie_list.snatch_movie(result)

                    # give the CPU a break
                    time.sleep(common.cpu_presets[settings.CPU_PRESET])
                else:
                    logger.info(_("No needed movie results found during backlog search for: [{name}]".format(name=self.movie.name)))
            except Exception:
                logger.debug(traceback.format_exc())

        super().finish()
        self.finish()
Beispiel #20
0
    def _error_log_helper(self, exception, severity, local_variables, attempts,
                          called_method):
        if attempts in (0, self.MAX_ATTEMPTS
                        ):  # Only log the first try and the final failure
            prefix = ("Database", "Fatal")[severity == logger.ERROR]
            # noinspection PyUnresolvedReferences
            logger.log(
                severity,
                _("{exception_severity} error executing query with {method} in database {db_location}: "
                  ).format(db_location=self.full_path,
                           method=called_method,
                           exception_severity=prefix) + str(exception),
            )

            # Lets print out all of the arguments so we can debug this better
            logger.info(traceback.format_exc())
            logger.info(
                _("If this happened in cache.db, you can safely stop SickChill, and delete the cache.db file without losing any data"
                  ))
            logger.info(
                _(f"Here are the arguments that were passed to this function (This is what the developers need to know): {local_variables}"
                  ))
Beispiel #21
0
    def run(self):
        super().run()

        if not self.show.paused:
            try:
                logger.info(f"Beginning backlog search for: [{self.show.name}]")
                searchResult = search.searchProviders(self.show, self.segment, False)

                if searchResult:
                    for result in searchResult:
                        # just use the first result for now
                        logger.info(f"Downloading {result.name} from {result.provider.name}")
                        search.snatchEpisode(result)

                        # give the CPU a break
                        time.sleep(common.cpu_presets[settings.CPU_PRESET])
                else:
                    logger.info(f"No needed episodes found during backlog search for: [{self.show.name}]")
            except Exception:
                logger.debug(traceback.format_exc())

        super().finish()
        self.finish()
Beispiel #22
0
    def _cache_image_from_file(self, image_path, img_type, indexer_id):
        """
        Takes the image provided and copies it to the cache folder

        :param image_path: path to the image we're caching
        :param img_type: BANNER or POSTER or FANART
        :param indexer_id: id of the show this image belongs to
        :return: bool representing success
        """

        # generate the path based on the type & indexer_id
        if img_type == self.POSTER:
            dest_path = self.poster_path(indexer_id)
        elif img_type == self.BANNER:
            dest_path = self.banner_path(indexer_id)
        elif img_type == self.FANART:
            dest_path = self.fanart_path(indexer_id)
        else:
            logger.exception("Invalid cache image type: " + str(img_type))
            return False

        # make sure the cache folder exists before we try copying to it
        if not os.path.isdir(self._cache_dir()):
            logger.info("Image cache dir didn't exist, creating it at " +
                        str(self._cache_dir()))
            os.makedirs(self._cache_dir())

        if not os.path.isdir(self._thumbnails_dir()):
            logger.info("Thumbnails cache dir didn't exist, creating it at " +
                        str(self._thumbnails_dir()))
            os.makedirs(self._thumbnails_dir())

        logger.info("Copying from " + image_path + " to " + dest_path)
        helpers.copyFile(image_path, dest_path)

        return True
Beispiel #23
0
    def _check_response(self, data=None):
        """
        Checks the response from Download Station, and logs any errors
        params: :data: post data sent in the original request, in case we need to send it with adjusted parameters
                :file: file data being sent with the post request, if any
        """
        try:
            jdata = self.response.json()
        except (ValueError, AttributeError):
            logger.info(
                "Could not convert response to json, check the host:port: {0!r}"
                .format(self.response))
            return False

        if not jdata.get("success"):
            error_code = jdata.get("error", {}).get("code")
            api_method = (data or {}).get("method", "login")
            log_string = self.error_map.get(api_method).get(error_code, None)
            if not log_string:
                logger.info(jdata)
            else:
                logger.info("{0}".format(log_string))

        return jdata.get("success")
Beispiel #24
0
    def start(self):
        """
        Start SickChill
        """
        # do some preliminary stuff
        settings.MY_FULLNAME = os.path.normpath(os.path.abspath(__file__))
        settings.MY_NAME = os.path.basename(settings.MY_FULLNAME)

        settings.DATA_DIR = os.path.dirname(settings.PROG_DIR)
        profile_path = str(Path.home().joinpath("sickchill").absolute())
        if check_installed():
            settings.DATA_DIR = profile_path

        if settings.DATA_DIR != profile_path:
            checks = ["sickbeard.db", "sickchill.db", "config.ini"]
            if not any([
                    os.path.isfile(os.path.join(settings.DATA_DIR, check))
                    for check in checks
            ]):
                settings.DATA_DIR = profile_path

        settings.MY_ARGS = sys.argv[1:]

        # Rename the main thread
        threading.currentThread().name = "MAIN"

        args = SickChillArgumentParser(settings.DATA_DIR).parse_args()

        if args.force_update:
            result = self.force_update()
            sys.exit(int(not result))  # Ok -> 0 , Error -> 1

        settings.NO_RESIZE = args.noresize
        self.console_logging = not (hasattr(sys, "frozen") or args.quiet
                                    or args.daemon)
        self.no_launch = args.nolaunch or args.daemon
        self.forced_port = args.port
        self.run_as_daemon = args.daemon and platform.system() != "Windows"

        # The pid file is only useful in daemon mode, make sure we can write the file properly
        if bool(args.pidfile) and not self.run_as_daemon:
            if self.console_logging:
                sys.stdout.write(
                    "Not running in daemon mode. PID file creation disabled.\n"
                )

        settings.DATA_DIR = os.path.abspath(
            args.datadir) if args.datadir else settings.DATA_DIR
        settings.CONFIG_FILE = os.path.abspath(
            args.config) if args.config else os.path.join(
                settings.DATA_DIR, "config.ini")

        # Make sure that we can create the data dir
        if not os.access(settings.DATA_DIR, os.F_OK):
            try:
                os.makedirs(settings.DATA_DIR, 0o744)
            except os.error:
                raise SystemExit("Unable to create data directory: {0}".format(
                    settings.DATA_DIR))

        # Make sure we can write to the data dir
        if not os.access(settings.DATA_DIR, os.W_OK):
            raise SystemExit("Data directory must be writeable: {0}".format(
                settings.DATA_DIR))

        # Make sure we can write to the config file
        if not os.access(settings.CONFIG_FILE, os.W_OK):
            if os.path.isfile(settings.CONFIG_FILE):
                raise SystemExit("Config file must be writeable: {0}".format(
                    settings.CONFIG_FILE))
            elif not os.access(os.path.dirname(settings.CONFIG_FILE), os.W_OK):
                raise SystemExit(
                    "Config file root dir must be writeable: {0}".format(
                        os.path.dirname(settings.CONFIG_FILE)))

        os.chdir(settings.DATA_DIR)

        # Check if we need to perform a restore first
        restore_dir = os.path.join(settings.DATA_DIR, "restore")
        if os.path.exists(restore_dir):
            success = self.restore_db(restore_dir, settings.DATA_DIR)
            if self.console_logging:
                sys.stdout.write(
                    "Restore: restoring DB and config.ini {0}!\n".format(
                        ("FAILED", "SUCCESSFUL")[success]))

        # Load the config and publish it to the oldbeard package
        if self.console_logging and not os.path.isfile(settings.CONFIG_FILE):
            sys.stdout.write(
                "Unable to find {0}, all settings will be default!\n".format(
                    settings.CONFIG_FILE))

        settings.CFG = ConfigObj(settings.CONFIG_FILE,
                                 encoding="UTF-8",
                                 indent_type="  ")

        # Initialize the config and our threads
        sickchill.start.initialize(consoleLogging=self.console_logging)

        # Get PID
        settings.PID = os.getpid()

        # Build from the DB to start with
        self.load_shows_from_db()

        logger.info("Starting SickChill [{branch}] using '{config}'".format(
            branch=settings.BRANCH, config=settings.CONFIG_FILE))

        self.clear_cache()

        if settings.DEVELOPER:
            settings.movie_list = MovieList()

        web_options = {}
        if self.forced_port:
            logger.info("Forcing web server to port {port}".format(
                port=self.forced_port))
            self.start_port = self.forced_port
            web_options.update({
                "port": int(self.start_port),
            })
        else:
            self.start_port = settings.WEB_PORT

        # start web server
        self.web_server = SRWebServer(web_options)
        self.web_server.start()

        # Fire up all our threads
        sickchill.start.start()

        # Build internal name cache
        name_cache.build_name_cache()

        # Pre-populate network timezones, it isn't thread safe
        network_timezones.update_network_dict()

        # sure, why not?
        if settings.USE_FAILED_DOWNLOADS:
            failed_history.trimHistory()

        # Check for metadata indexer updates for shows (sets the next aired ep!)
        # oldbeard.showUpdateScheduler.forceRun()

        # Launch browser
        if settings.LAUNCH_BROWSER and not self.no_launch:
            sickchill.start.launchBrowser(
                "https" if settings.ENABLE_HTTPS else "http", self.start_port,
                settings.WEB_ROOT)

        # main loop
        while True:
            time.sleep(1)
Beispiel #25
0
    def search(self, search_strings, age=0, ep_obj=None):
        results = []
        if not self.login():
            return results

        # Search Params
        search_params = {
            'user': self.username,
            'passkey': self.passkey,
            'search':
            '.',  # Dummy query for RSS search, needs the search param sent.
            'latest': 'true'
        }

        # Units
        units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']

        for mode in search_strings:
            items = []
            logger.debug(_("Search Mode: {mode}".format(mode=mode)))

            for search_string in search_strings[mode]:

                if mode != 'RSS':
                    logger.debug("Search string: {0}".format(search_string))

                    search_params['latest'] = 'false'
                    search_params['search'] = search_string

                data = self.get_url(self.urls['search'],
                                    params=search_params,
                                    returns='text')
                if not data:
                    logger.debug("No data returned from provider")
                    continue

                result = json.loads(data)
                if 'results' in result:
                    for torrent in result['results']:
                        title = torrent['release_name']
                        download_url = torrent['download_url']
                        seeders = torrent['seeders']
                        leechers = torrent['leechers']
                        if seeders < self.minseed or leechers < self.minleech:
                            logger.info(
                                "Discarded {0} because with {1}/{2} seeders/leechers does not meet the requirement of {3}/{4} seeders/leechers"
                                .format(title, seeders, leechers, self.minseed,
                                        self.minleech))
                            continue

                        freeleech = torrent['freeleech']
                        if self.freeleech and not freeleech:
                            continue

                        size = torrent['size']
                        size = convert_size(size, units=units) or -1
                        item = {
                            'title': title,
                            'link': download_url,
                            'size': size,
                            'seeders': seeders,
                            'leechers': leechers,
                            'hash': ''
                        }
                        logger.debug(
                            "Found result: {0} with {1} seeders and {2} leechers"
                            .format(title, seeders, leechers))
                        items.append(item)

                if 'error' in result:
                    logger.warning(result['error'])

            # For each search mode sort all the items by seeders if available
            items.sort(key=lambda d: try_int(d.get('seeders', 0)),
                       reverse=True)
            results += items

        return results
Beispiel #26
0
    def find_needed_episodes(self,
                             episode,
                             manualSearch=False,
                             downCurQuality=False):
        needed_eps = {}
        cl = []

        cache_db_con = self._get_db()
        if not episode:
            sql_results = cache_db_con.select(
                "SELECT * FROM results WHERE provider = ?", [self.provider_id])
        elif not isinstance(episode, list):
            sql_results = cache_db_con.select(
                "SELECT * FROM results WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ?",
                [
                    self.provider_id, episode.show.indexerid, episode.season,
                    "%|" + str(episode.episode) + "|%"
                ])
        else:
            for ep_obj in episode:
                cl.append([
                    "SELECT * FROM results WHERE provider = ? AND indexerid = ? AND season = ? AND episodes LIKE ? AND quality IN ("
                    + ",".join([str(x) for x in ep_obj.wantedQuality]) + ")",
                    [
                        self.provider_id, ep_obj.show.indexerid, ep_obj.season,
                        "%|" + str(ep_obj.episode) + "|%"
                    ]
                ])

            sql_results = cache_db_con.mass_action(cl, fetchall=True)
            sql_results = list(itertools.chain(*sql_results))

        # for each cache entry
        for cur_result in sql_results:
            # get the show object, or if it's not one of our shows then ignore it
            show_obj = Show.find(settings.showList,
                                 int(cur_result["indexerid"]))
            if not show_obj:
                continue

            # ignored/required words, and non-tv junk
            if not show_name_helpers.filter_bad_releases(cur_result["name"],
                                                         show=show_obj):
                continue

            # skip if provider is anime only and show is not anime
            if self.provider.anime_only and not show_obj.is_anime:
                logger.debug("" + str(show_obj.name) +
                             " is not an anime, skiping")
                continue

            # get season and ep data (ignoring multi-eps for now)
            cur_season = int(cur_result["season"])
            if cur_season == -1:
                continue

            cur_ep = cur_result["episodes"].split("|")[1]
            if not cur_ep:
                continue

            cur_ep = int(cur_ep)

            cur_quality = int(cur_result["quality"])
            cur_release_group = cur_result["release_group"]
            cur_version = cur_result["version"]

            # if the show says we want that episode then add it to the list
            if not show_obj.wantEpisode(cur_season, cur_ep, cur_quality,
                                        manualSearch, downCurQuality):
                logger.debug("Ignoring " + cur_result["name"])
                continue

            ep_obj = show_obj.getEpisode(cur_season, cur_ep)

            # build a result object
            title = cur_result["name"]
            url = cur_result["url"]

            logger.info("Found result " + title + " at " + url)

            result = self.provider.get_result([ep_obj])
            result.show = show_obj
            result.url = url
            result.name = title
            result.quality = cur_quality
            result.release_group = cur_release_group
            result.version = cur_version
            result.content = None

            # add it to the list
            if ep_obj not in needed_eps:
                needed_eps[ep_obj] = [result]
            else:
                needed_eps[ep_obj].append(result)

        # datetime stamp this search so cache gets cleared
        self.set_last_search()

        return needed_eps
Beispiel #27
0
    def search(self, search_strings, age=0, ep_obj=None):
        results = []
        if not (self.url and self.urls):
            self.find_domain()
            if not (self.url and self.urls):
                return results

        anime = (self.show
                 and self.show.anime) or (ep_obj and ep_obj.show
                                          and ep_obj.show.anime) or False
        search_params = {
            "field": "seeders",
            "sorder": "desc",
            "category": ("tv", "anime")[anime]
        }

        for mode in search_strings:
            items = []
            logger.debug(_("Search Mode: {mode}".format(mode=mode)))
            for search_string in search_strings[mode]:

                # search_params["q"] = (search_string, None)[mode == "RSS"]
                search_params["field"] = ("seeders", "time_add")[mode == "RSS"]

                if mode != "RSS":
                    if anime:
                        continue

                    logger.debug(
                        _("Search String: {search_string}".format(
                            search_string=search_string)))

                    search_url = self.urls["search"].format(q=search_string)
                else:
                    search_url = self.urls["rss"]

                if self.custom_url:
                    if not validators.url(self.custom_url):
                        logger.warning("Invalid custom url: {0}".format(
                            self.custom_url))
                        return results
                    search_url = urljoin(self.custom_url,
                                         search_url.split(self.url)[1])

                data = self.get_url(search_url,
                                    params=OrderedDict(
                                        sorted(list(search_params.items()),
                                               key=lambda x: x[0])),
                                    returns="text")
                if not data:
                    logger.info(
                        "{url} did not return any data, it may be disabled. Trying to get a new domain"
                        .format(url=self.url))
                    self.disabled_mirrors.append(self.url)
                    self.find_domain()
                    if self.url in self.disabled_mirrors:
                        logger.info("Could not find a better mirror to try.")
                        logger.info(
                            "The search did not return data, if the results are on the site maybe try a custom url, or a different one"
                        )
                        return results

                    # This will recurse a few times until all of the mirrors are exhausted if none of them work.
                    return self.search(search_strings, age, ep_obj)

                with BS4Parser(data, "html5lib") as html:
                    labels = [
                        cell.get_text()
                        for cell in html.find(class_="firstr")("th")
                    ]
                    logger.info("Found {} results".format(
                        len(html("tr", **self.rows_selector))))
                    for result in html("tr", **self.rows_selector):
                        try:
                            download_url = urllib.parse.unquote_plus(
                                result.find(
                                    title="Torrent magnet link")["href"].split(
                                        "url=")[1]) + self._custom_trackers
                            parsed_magnet = urllib.parse.parse_qs(download_url)
                            torrent_hash = self.hash_from_magnet(download_url)
                            title = result.find(class_="torrentname").find(
                                class_="cellMainLink").get_text(strip=True)
                            if title.endswith("..."):
                                title = parsed_magnet['dn'][0]

                            if not (title and download_url):
                                if mode != "RSS":
                                    logger.debug(
                                        "Discarding torrent because We could not parse the title and url"
                                    )
                                continue

                            seeders = try_int(
                                result.find(class_="green").get_text(
                                    strip=True))
                            leechers = try_int(
                                result.find(class_="red").get_text(strip=True))

                            # Filter unseeded torrent
                            if seeders < self.minseed or leechers < self.minleech:
                                if mode != "RSS":
                                    logger.debug(
                                        "Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})"
                                        .format(title, seeders, leechers))
                                continue

                            if self.confirmed and not result.find(
                                    class_="ka-green"):
                                if mode != "RSS":
                                    logger.debug(
                                        "Found result " + title +
                                        " but that doesn't seem like a verified result so I'm ignoring it"
                                    )
                                continue

                            torrent_size = result("td")[labels.index(
                                "size")].get_text(strip=True)
                            size = convert_size(torrent_size) or -1

                            item = {
                                'title': title,
                                'link': download_url,
                                'size': size,
                                'seeders': seeders,
                                'leechers': leechers,
                                'hash': torrent_hash
                            }
                            if mode != "RSS":
                                logger.debug(
                                    "Found result: {0} with {1} seeders and {2} leechers"
                                    .format(title, seeders, leechers))

                            items.append(item)

                        except (AttributeError, TypeError, KeyError,
                                ValueError, Exception):
                            logger.info(traceback.format_exc())
                            continue

            # For each search mode sort all the items by seeders if available
            items.sort(key=lambda d: try_int(d.get('seeders', 0)),
                       reverse=True)

            results += items

        return results
Beispiel #28
0
    def calendar(self):
        """ Provides a subscribeable URL for iCal subscriptions
        """

        logger.info(f"Receiving iCal request from {self.request.remote_ip}")

        # Create a iCal string
        ical = 'BEGIN:VCALENDAR\r\n'
        ical += 'VERSION:2.0\r\n'
        ical += 'X-WR-CALNAME:SickChill\r\n'
        ical += 'X-WR-CALDESC:SickChill\r\n'
        ical += 'PRODID://SickChill Upcoming Episodes//\r\n'

        future_weeks = try_int(self.get_argument('future', '52'), 52)
        past_weeks = try_int(self.get_argument('past', '52'), 52)

        # Limit dates
        past_date = (datetime.date.today() +
                     datetime.timedelta(weeks=-past_weeks)).toordinal()
        future_date = (datetime.date.today() +
                       datetime.timedelta(weeks=future_weeks)).toordinal()

        # Get all the shows that are not paused and are currently on air (from kjoconnor Fork)
        main_db_con = db.DBConnection()
        # noinspection PyPep8
        calendar_shows = main_db_con.select(
            "SELECT show_name, indexer_id, network, airs, runtime FROM tv_shows WHERE "
            "( status = 'Continuing' OR status = 'Returning Series' ) AND paused != '1'"
        )
        for show in calendar_shows:
            # Get all episodes of this show airing between today and next month
            episode_list = main_db_con.select(
                "SELECT indexerid, name, season, episode, description, airdate FROM tv_episodes WHERE airdate >= ? AND airdate < ? AND showid = ?",
                (past_date, future_date, int(show["indexer_id"])))

            utc = tz.gettz('GMT')

            for episode in episode_list:
                air_date_time = network_timezones.parse_date_time(
                    episode['airdate'], show["airs"],
                    show['network']).astimezone(utc)
                air_date_time_end = air_date_time + datetime.timedelta(
                    minutes=try_int(show["runtime"], 60))

                # Create event for episode
                ical += 'BEGIN:VEVENT\r\n'
                ical += f'DTSTART:{air_date_time.strftime("%Y%m%d")}T{air_date_time.strftime("%H%M%S")}Z\r\n'
                ical += f'DTEND:{air_date_time_end.strftime("%Y%m%d")}T{air_date_time_end.strftime("%H%M%S")}Z\r\n'
                if settings.CALENDAR_ICONS:
                    ical += 'X-GOOGLE-CALENDAR-CONTENT-ICON:https://sickchill.github.io/images/ico/favicon-16.png\r\n'
                    ical += 'X-GOOGLE-CALENDAR-CONTENT-DISPLAY:CHIP\r\n'
                ical += f'SUMMARY: {show["show_name"]} - {episode["season"]}x{episode["episode"]} - {episode["name"]}\r\n'
                ical += f'UID:SickChill-{datetime.date.today().isoformat()}-{show["show_name"].replace(" ", "-")}-S{episode["season"]}E{episode["episode"]}\r\n'
                ical += f'DESCRIPTION:{show["airs"] or "(Unknown airs)"} on {show["network"] or "Unknown network"}'
                if episode['description']:
                    ical += f' \\n\\n {episode["description"].splitlines()[0]}'
                ical += '\r\nEND:VEVENT\r\n'

        # Ending the iCal
        ical += 'END:VCALENDAR'

        return ical
Beispiel #29
0
    def run(self, force=False):  # pylint:disable=too-many-branches
        """
        Runs the daily searcher, queuing selected episodes for search

        :param force: Force search
        """
        if self.amActive:
            return

        self.amActive = True
        logger.info(_("Searching for new released episodes ..."))

        if not network_timezones.network_dict:
            network_timezones.update_network_dict()

        if network_timezones.network_dict:
            curDate = (datetime.date.today() +
                       datetime.timedelta(days=1)).toordinal()
        else:
            curDate = (datetime.date.today() +
                       datetime.timedelta(days=2)).toordinal()

        curTime = datetime.datetime.now(network_timezones.sb_timezone)

        main_db_con = db.DBConnection()
        sql_results = main_db_con.select(
            "SELECT showid, airdate, season, episode FROM tv_episodes WHERE status = ? AND (airdate <= ? and airdate > 1)",
            [common.UNAIRED, curDate])

        sql_l = []
        show = None

        for sqlEp in sql_results:
            try:
                if not show or int(sqlEp["showid"]) != show.indexerid:
                    show = Show.find(settings.showList, int(sqlEp["showid"]))

                # for when there is orphaned series in the database but not loaded into our showlist
                if not show or show.paused:
                    continue

            except MultipleShowObjectsException:
                logger.info("ERROR: expected to find a single show matching " +
                            str(sqlEp["showid"]))
                continue

            if show.airs and show.network:
                # This is how you assure it is always converted to local time
                air_time = network_timezones.parse_date_time(
                    sqlEp["airdate"], show.airs,
                    show.network).astimezone(network_timezones.sb_timezone)

                # filter out any episodes that haven't started airing yet,
                # but set them to the default status while they are airing
                # so they are snatched faster
                if air_time > curTime:
                    continue

            ep = show.getEpisode(sqlEp["season"], sqlEp["episode"])
            with ep.lock:
                if ep.season == 0:
                    logger.info(
                        "New episode " + ep.pretty_name +
                        " airs today, setting status to SKIPPED because is a special season"
                    )
                    ep.status = common.SKIPPED
                else:
                    logger.info(
                        "New episode {0} airs today, setting to default episode status for this show: {1}"
                        .format(
                            ep.pretty_name,
                            common.statusStrings[ep.show.default_ep_status]))
                    ep.status = ep.show.default_ep_status

                sql_l.append(ep.get_sql())

        if sql_l:
            main_db_con = db.DBConnection()
            main_db_con.mass_action(sql_l)
        else:
            logger.info("No new released episodes found ...")

        # queue episode for daily search
        dailysearch_queue_item = sickchill.oldbeard.search_queue.DailySearchQueueItem(
        )
        settings.searchQueueScheduler.action.add_item(dailysearch_queue_item)

        self.amActive = False
Beispiel #30
0
    def find_search_results(self,
                            show,
                            episodes,
                            search_mode,
                            manual_search=False,
                            download_current_quality=False):
        self._check_auth()
        self.show = show

        results = {}
        items_list = []
        searched_scene_season = None

        for episode in episodes:
            cache_result = self.cache.search_cache(
                episode,
                manual_search=manual_search,
                down_cur_quality=download_current_quality)
            if cache_result:
                if episode.episode not in results:
                    results[episode.episode] = cache_result
                else:
                    results[episode.episode].extend(cache_result)

                continue

            if len(
                    episodes
            ) > 1 and search_mode == 'sponly' and searched_scene_season == episode.scene_season:
                continue

            search_strings = []
            searched_scene_season = episode.scene_season

            if len(episodes) > 1 and search_mode == 'sponly':
                search_strings = self.get_season_search_strings(episode)
            elif search_mode == 'eponly':
                search_strings = self.get_episode_search_strings(episode)

            for search_string in search_strings:
                items_list += self.search(search_string, ep_obj=episode)

        if len(results) == len(episodes):
            return results

        if items_list:
            items = {}
            unknown_items = []

            for item in items_list:
                quality = self.get_quality(item, anime=show.is_anime)

                if quality == Quality.UNKNOWN:
                    unknown_items.append(item)
                elif quality == Quality.NONE:
                    pass  # Skipping an HEVC when HEVC is not allowed by settings
                else:
                    if quality not in items:
                        items[quality] = []
                    items[quality].append(item)

            items_list = list(
                chain(*[v for (k_, v) in sorted(items.items(), reverse=True)]))
            items_list += unknown_items

        cl = []

        for item in items_list:
            title, url = self._get_title_and_url(item)
            seeders, leechers = self._get_seeders_and_leechers(item)
            size = self._get_size(item)

            try:
                parse_result = NameParser(
                    parse_method=('normal',
                                  'anime')[show.is_anime]).parse(title)
            except (InvalidNameException, InvalidShowException) as error:
                logger.debug("{0}".format(error))
                continue

            show_object = parse_result.show
            quality = parse_result.quality
            release_group = parse_result.release_group
            version = parse_result.version
            add_cache_entry = False

            if not (show_object.air_by_date or show_object.sports):
                if search_mode == 'sponly':
                    if parse_result.episode_numbers:
                        logger.debug(
                            'This is supposed to be a season pack search but the result {0} is not a valid season pack, skipping it'
                            .format(title))
                        add_cache_entry = True
                    elif not [
                            ep
                            for ep in episodes if parse_result.season_number ==
                        (ep.season, ep.scene_season)[ep.show.is_scene]
                    ]:
                        logger.info(
                            'This season result {0} is for a season we are not searching for, skipping it'
                            .format(title), logger.DEBUG)
                        add_cache_entry = True

                else:
                    if not all([
                            parse_result.season_number is not None,
                            parse_result.episode_numbers,
                        [
                            ep for ep in episodes if
                            (ep.season, ep.scene_season)[ep.show.is_scene] ==
                            (parse_result.season_number,
                             parse_result.scene_season)[ep.show.is_scene] and
                            (ep.episode, ep.scene_episode)[ep.show.is_scene] in
                            parse_result.episode_numbers
                        ]
                    ]) and not all([
                            # fallback for anime on absolute numbering
                            parse_result.is_anime,
                            parse_result.ab_episode_numbers is not None,
                        [
                            ep for ep in episodes if ep.show.is_anime and ep.
                            absolute_number in parse_result.ab_episode_numbers
                        ]
                    ]):

                        logger.info(
                            'The result {0} doesn\'t seem to match an episode that we are currently trying to snatch, skipping it'
                            .format(title))
                        add_cache_entry = True

                if not add_cache_entry:
                    actual_season = parse_result.season_number
                    actual_episodes = parse_result.episode_numbers
            else:
                same_day_special = False

                if not parse_result.is_air_by_date:
                    logger.debug(
                        'This is supposed to be a date search but the result {0} didn\'t parse as one, skipping it'
                        .format(title))
                    add_cache_entry = True
                else:
                    air_date = parse_result.air_date.toordinal()
                    db = DBConnection()
                    sql_results = db.select(
                        'SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?',
                        [show_object.indexerid, air_date])

                    if len(sql_results) == 2:
                        if int(sql_results[0]['season']) == 0 and int(
                                sql_results[1]['season']) != 0:
                            actual_season = int(sql_results[1]['season'])
                            actual_episodes = [int(sql_results[1]['episode'])]
                            same_day_special = True
                        elif int(sql_results[1]['season']) == 0 and int(
                                sql_results[0]['season']) != 0:
                            actual_season = int(sql_results[0]['season'])
                            actual_episodes = [int(sql_results[0]['episode'])]
                            same_day_special = True
                    elif len(sql_results) != 1:
                        logger.warning(
                            'Tried to look up the date for the episode {0} but the database didn\'t give proper results, skipping it'
                            .format(title))
                        add_cache_entry = True

                if not add_cache_entry and not same_day_special:
                    actual_season = int(sql_results[0]['season'])
                    actual_episodes = [int(sql_results[0]['episode'])]

            if add_cache_entry:
                logger.debug(
                    'Adding item from search to cache: {0}'.format(title))

                ci = self.cache._add_cache_entry(title,
                                                 url,
                                                 size,
                                                 seeders,
                                                 leechers,
                                                 parse_result=parse_result)

                if ci is not None:
                    cl.append(ci)

                continue

            episode_wanted = True

            for episode_number in actual_episodes:
                if not show_object.wantEpisode(actual_season, episode_number,
                                               quality, manual_search,
                                               download_current_quality):
                    episode_wanted = False
                    break

            if not episode_wanted:
                logger.debug(_('Ignoring result ') + f'{title}.')
                continue

            logger.debug(
                _('Found result {title} at {url}'.format(title=title,
                                                         url=url)))

            episode_object = []
            for current_episode in actual_episodes:
                episode_object.append(
                    show_object.getEpisode(actual_season, current_episode))

            result = self.get_result(episode_object)
            result.show = show_object
            result.url = url
            result.name = title
            result.quality = quality
            result.release_group = release_group
            result.version = version
            result.content = None
            result.size = self._get_size(item)

            if len(episode_object) == 1:
                episode_number = episode_object[0].episode
                logger.debug('Single episode result.')
            elif len(episode_object) > 1:
                episode_number = MULTI_EP_RESULT
                logger.debug(
                    'Separating multi-episode result to check for later - result contains episodes: {0}'
                    .format(parse_result.episode_numbers))
            elif len(episode_object) == 0:
                episode_number = SEASON_RESULT
                logger.debug(
                    'Separating full season result to check for later')

            if episode_number not in results:
                results[episode_number] = [result]
            else:
                results[episode_number].append(result)

        if cl:

            # Access to a protected member of a client class
            cache_db = self.cache._get_db()
            cache_db.mass_upsert('results', cl)

        return results