Example #1
0
    def qualityFromFileMeta(filename):
        """
        Get quality from file metadata

        :param filename: Filename to analyse
        :return: Quality prefix
        """

        from hachoir_core.stream import StringInputStream
        from hachoir_parser import guessParser
        from hachoir_metadata import extractMetadata
        from hachoir_core.log import log
        log.use_print = False

        if ek(os.path.isfile, filename):
            base_filename = ek(os.path.basename, filename)
            bluray = re.search(r"blue?-?ray|hddvd|b[rd](rip|mux)", base_filename, re.I) is not None
            webdl = re.search(r"web.?dl|web(rip|mux|hd)", base_filename, re.I) is not None

            try:
                with ek(io.open, filename, "rb") as file:
                    file_metadata = extractMetadata(guessParser(StringInputStream(file.read())))
                    if file_metadata:
                        for metadata in chain([file_metadata], file_metadata.iterGroups()):
                            height = metadata.get('height', None)
                            if height and height > 1000:
                                return ((Quality.FULLHDTV, Quality.FULLHDBLURAY)[bluray], Quality.FULLHDWEBDL)[webdl]
                            elif height and height > 680 and height < 800:
                                return ((Quality.HDTV, Quality.HDBLURAY)[bluray], Quality.HDWEBDL)[webdl]
                            elif height and height < 680:
                                return (Quality.SDTV, Quality.SDDVD)[re.search(r'dvd|b[rd]rip|blue?-?ray', base_filename, re.I) is not None]
            except Exception as e:
                sickbeard.logger.log(ex(e))

        return Quality.UNKNOWN
Example #2
0
def subtitlesLanguages(video_path):
    """Return a list detected subtitles for the given video file"""
    resultList = []

    # Serch for embedded subtitles
    embedded_languages = subliminal.scan_video(video_path, subtitles=False, embedded_subtitles=not sickbeard.EMBEDDED_SUBTITLES_ALL)

    # Search subtitles in the absolute path
    if sickbeard.SUBTITLES_DIR and ek(os.path.exists, sickbeard.SUBTITLES_DIR):
        video_path = ek(os.path.join, sickbeard.SUBTITLES_DIR, ek(os.path.basename, video_path))
    # Search subtitles in the relative path
    elif sickbeard.SUBTITLES_DIR:
        video_path = ek(os.path.join, ek(os.path.dirname, video_path), sickbeard.SUBTITLES_DIR, ek(os.path.basename, video_path))

    languages = subliminal.video.scan_subtitle_languages(video_path)

    for language in languages.union(embedded_languages.subtitle_languages):
        if hasattr(language, 'opensubtitles') and language.opensubtitles:
            resultList.append(language.opensubtitles)
        elif hasattr(language, 'alpha3') and language.alpha3:
            resultList.append(language.alpha3)
        elif hasattr(language, 'alpha2') and language.alpha2:
            resultList.append(language.alpha2)

    defaultLang = wantedLanguages()

    if ('pob' in defaultLang or 'pb' in defaultLang) and ('pt' not in defaultLang and 'por' not in defaultLang):
            resultList = [x if not x in ['por', 'pt'] else u'pob' for x in resultList]

    return sorted(resultList)
Example #3
0
    def run(self):
        if self.enable_https:
            protocol = "https"
            self.server = HTTPServer(self.app, ssl_options={"certfile": self.https_cert, "keyfile": self.https_key})
        else:
            protocol = "http"
            self.server = HTTPServer(self.app)

        logger.log(u"Starting SickRage on " + protocol + "://" + str(self.options['host']) + ":" + str(
            self.options['port']) + "/")

        try:
            self.server.listen(self.options['port'], self.options['host'])
        except:
            if sickbeard.LAUNCH_BROWSER and not self.daemon:
                sickbeard.launchBrowser('https' if sickbeard.ENABLE_HTTPS else 'http', self.options['port'], sickbeard.WEB_ROOT)
                logger.log(u"Launching browser and exiting")
            logger.log(u"Could not start webserver on port %s, already in use!" % self.options['port'])
            ek(os._exit, 1)

        try:
            self.io_loop.start()
            self.io_loop.close(True)
        except (IOError, ValueError):
            # Ignore errors like "ValueError: I/O operation on closed kqueue fd". These might be thrown during a reload.
            pass
Example #4
0
    def _makeURL(self, result):
        urls = []
        filename = u''
        if result.url.startswith('magnet'):
            try:
                torrent_hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0].upper()

                try:
                    torrent_name = re.findall('dn=([^&]+)', result.url)[0]
                except Exception:
                    torrent_name = 'NO_DOWNLOAD_NAME'

                if len(torrent_hash) == 32:
                    torrent_hash = b16encode(b32decode(torrent_hash)).upper()

                if not torrent_hash:
                    logger.log(u"Unable to extract torrent hash from magnet: " + ex(result.url), logger.ERROR)
                    return urls, filename

                urls = [x.format(torrent_hash=torrent_hash, torrent_name=torrent_name) for x in self.btCacheURLS]
            except Exception:
                logger.log(u"Unable to extract torrent hash or name from magnet: " + ex(result.url), logger.ERROR)
                return urls, filename
        else:
            urls = [result.url]

        if self.providerType == GenericProvider.TORRENT:
            filename = ek(os.path.join, sickbeard.TORRENT_DIR, sanitize_filename(result.name) + '.' + self.providerType)

        elif self.providerType == GenericProvider.NZB:
            filename = ek(os.path.join, sickbeard.NZB_DIR, sanitize_filename(result.name) + '.' + self.providerType)

        return urls, filename
Example #5
0
    def change_size(image_url, factor=3):
        match = re.search(r"^(.*)V1._(.{2})(.*?)_(.{2})(.*?),(.*?),(.*?),(.*?)_.jpg$", image_url)

        if match:
            matches = match.groups()
            ek(os.path.basename, image_url)
            matches = list(matches)
            matches[2] = int(matches[2]) * factor
            matches[4] = int(matches[4]) * factor
            matches[5] = int(matches[5]) * factor
            matches[6] = int(matches[6]) * factor
            matches[7] = int(matches[7]) * factor

            return "%sV1._%s%s_%s%s,%s,%s,%s_.jpg" % (
                matches[0],
                matches[1],
                matches[2],
                matches[3],
                matches[4],
                matches[5],
                matches[6],
                matches[7],
            )
        else:
            return image_url
Example #6
0
    def run(self, force=False):
        """
        TODO: Rename class to PostProcessor (classname contains a typo)
        Runs the postprocessor
        :param force: Forces postprocessing run (reserved for future use)
        :return: Returns when done without a return state/code
        """
        self.amActive = True

        if not ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
            logger.log(u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " doesn't exist",
                       logger.ERROR)
            self.amActive = False
            return

        if not ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR):
            logger.log(
                u"Automatic post-processing attempted but dir " + sickbeard.TV_DOWNLOAD_DIR + " is relative (and probably not what you really want to process)",
                logger.ERROR)
            self.amActive = False
            return

        processTV.processDir(sickbeard.TV_DOWNLOAD_DIR)

        self.amActive = False
Example #7
0
def change_LOG_DIR(log_dir, web_log):
    """
    Change logging directory for application and webserver

    :param log_dir: Path to new logging directory
    :param web_log: Enable/disable web logging
    :return: True on success, False on failure
    """
    log_dir_changed = False
    abs_log_dir = ek(os.path.normpath, ek(os.path.join, sickbeard.DATA_DIR, log_dir))
    web_log_value = checkbox_to_value(web_log)

    if ek(os.path.normpath, sickbeard.LOG_DIR) != abs_log_dir:
        if helpers.makeDir(abs_log_dir):
            sickbeard.ACTUAL_LOG_DIR = ek(os.path.normpath, log_dir)
            sickbeard.LOG_DIR = abs_log_dir

            logger.initLogging()
            logger.log(u"Initialized new log file in " + sickbeard.LOG_DIR)
            log_dir_changed = True

        else:
            return False

    if sickbeard.WEB_LOG != web_log_value or log_dir_changed is True:
        sickbeard.WEB_LOG = web_log_value

    return True
Example #8
0
    def qualityFromFileMeta(filename):
        """
        Get quality from file metadata

        :param filename: Filename to analyse
        :return: Quality prefix
        """

        from hachoir_core.stream import StringInputStream
        from hachoir_parser import guessParser
        from hachoir_metadata import extractMetadata
        from hachoir_core import config as hachoir_config
        hachoir_config.quiet = True

        if ek(os.path.isfile, filename):
            base_filename = ek(os.path.basename, filename)
            bluray = re.search(r"blue?-?ray|hddvd|b[rd](rip|mux)", base_filename, re.I) is not None
            webdl = re.search(r"web.?dl|web(rip|mux|hd)", base_filename, re.I) is not None

            for byte in sickbeard.helpers.readFileBuffered(filename):
                try:
                    file_metadata = extractMetadata(guessParser(StringInputStream(byte)))
                    for metadata in chain([file_metadata], file_metadata.iterGroups()):
                        height = metadata.get('height', 0)
                        if height > 1000:
                            return ((Quality.FULLHDTV, Quality.FULLHDBLURAY)[bluray], Quality.FULLHDWEBDL)[webdl]
                        elif height > 680 and height < 800:
                            return ((Quality.HDTV, Quality.HDBLURAY)[bluray], Quality.HDWEBDL)[webdl]
                        elif height < 680:
                            return (Quality.SDTV, Quality.SDDVD)[re.search(r'dvd|b[rd]rip|blue?-?ray', base_filename, re.I) is not None]
                except:continue

        return Quality.UNKNOWN
Example #9
0
def get_path_dir_files(dirName, nzbName, type):
    """
    Get files in a path

    :param dirName: Directory to start in
    :param nzbName: NZB file, if present
    :param type: auto/manual
    :return: a tuple of (path,dirs,files)
    """
    path = ""
    dirs = []
    files = []

    if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or type == "manual":  # Scheduled Post Processing Active
        # Get at first all the subdir in the dirName
        for path, dirs, files in ek(os.walk, dirName):
            break
    else:
        path, dirs = ek(os.path.split, dirName)  # Script Post Processing
        if not nzbName is None and not nzbName.endswith('.nzb') and os.path.isfile(
                os.path.join(dirName, nzbName)):  # For single torrent file without Dir
            dirs = []
            files = [os.path.join(dirName, nzbName)]
        else:
            dirs = [dirs]
            files = []

    return path, dirs, files
Example #10
0
def change_unpack_dir(unpack_dir):
    """
    Change UNPACK directory (used by postprocessor)

    :param unpack_dir: New unpack directory
    :return: True on success, False on failure
    """
    if unpack_dir == '':
        sickbeard.UNPACK_DIR = ''
        return True

    if ek(os.path.normpath, sickbeard.UNPACK_DIR) != ek(os.path.normpath, unpack_dir):
        if bool(sickbeard.ROOT_DIRS) and \
                any(map(lambda rd: helpers.is_subdirectory(unpack_dir, rd), sickbeard.ROOT_DIRS.split('|')[1:])):
            # don't change if it's in any of the TV root directories
            logger.log("Unable to change unpack directory to a sub-directory of a TV root dir")
            return False

        if helpers.makeDir(unpack_dir):
            sickbeard.UNPACK_DIR = ek(os.path.normpath, unpack_dir)
            logger.log("Changed unpack directory to " + unpack_dir)
        else:
            logger.log("Unable to create unpack directory " + ek(os.path.normpath, unpack_dir) + ", dir not changed.")
            return False

    return True
Example #11
0
def subtitlesLanguages(video_path):
    """Return a list detected subtitles for the given video file"""
    resultList = []
    embedded_subtitle_languages = set()

    # Serch for embedded subtitles
    if not sickbeard.EMBEDDED_SUBTITLES_ALL:
        if video_path.endswith('mkv'):
            try:
                with open(video_path.encode(sickbeard.SYS_ENCODING), 'rb') as f:
                    mkv = MKV(f)
                if mkv.subtitle_tracks:
                    for st in mkv.subtitle_tracks:
                        if st.language:
                            try:
                                embedded_subtitle_languages.add(Language.fromalpha3b(st.language))
                            except BabelfishError:
                                logger.log('Embedded subtitle track is not a valid language', logger.DEBUG)
                                embedded_subtitle_languages.add(Language('und'))
                        elif st.name:
                            try:
                                embedded_subtitle_languages.add(Language.fromname(st.name))
                            except BabelfishError:
                                logger.log('Embedded subtitle track is not a valid language', logger.DEBUG)
                                embedded_subtitle_languages.add(Language('und'))
                        else:
                            embedded_subtitle_languages.add(Language('und'))
                else:
                    logger.log('MKV has no subtitle track', logger.DEBUG)
            except MalformedMKVError:
                logger.log('MKV seems to be malformed, ignoring embedded subtitles', logger.WARNING)

    # Search subtitles in the absolute path
    if sickbeard.SUBTITLES_DIR and ek(os.path.exists, sickbeard.SUBTITLES_DIR):
        video_path = ek(os.path.join, sickbeard.SUBTITLES_DIR, ek(os.path.basename, video_path))
    # Search subtitles in the relative path
    elif sickbeard.SUBTITLES_DIR:
        video_path = ek(os.path.join, ek(os.path.dirname, video_path), sickbeard.SUBTITLES_DIR, ek(os.path.basename, video_path))

    external_subtitle_languages = subliminal.video.scan_subtitle_languages(video_path)
    subtitle_languages = external_subtitle_languages.union(embedded_subtitle_languages)

    if (len(subtitle_languages) is 1 and len(wantedLanguages()) is 1) and Language('und') in subtitle_languages:
        subtitle_languages.remove(Language('und'))
        subtitle_languages.add(fromietf(wantedLanguages()[0]))

    for language in subtitle_languages:
        if hasattr(language, 'opensubtitles') and language.opensubtitles:
            resultList.append(language.opensubtitles)
        elif hasattr(language, 'alpha3') and language.alpha3:
            resultList.append(language.alpha3)
        elif hasattr(language, 'alpha2') and language.alpha2:
            resultList.append(language.alpha2)

    defaultLang = wantedLanguages()

    if ('pob' in defaultLang or 'pb' in defaultLang) and ('pt' not in defaultLang and 'por' not in defaultLang):
            resultList = [x if not x in ['por', 'pt'] else u'pob' for x in resultList]

    return sorted(resultList)
Example #12
0
    def _write_image(self, image_data, image_path, obj=None):
        """
        Saves the data in image_data to the location image_path. Returns True/False
        to represent success or failure.

        image_data: binary image data to write to file
        image_path: file location to save the image to
        """

        # don't bother overwriting it
        if ek(os.path.isfile, image_path):
            logger.log(u"Image already exists, not downloading", logger.DEBUG)
            return False

        image_dir = ek(os.path.dirname, image_path)

        if not image_data:
            logger.log(u"Unable to retrieve image to save in %s, skipping" % (ss(image_path)), logger.DEBUG)
            return False

        try:
            if not ek(os.path.isdir, image_dir):
                logger.log(u"Metadata dir didn't exist, creating it at " + image_dir, logger.DEBUG)
                ek(os.makedirs, image_dir)
                helpers.chmodAsParent(image_dir)

            outFile = open(image_path, 'wb')
            outFile.write(image_data)
            outFile.close()
            helpers.chmodAsParent(image_path)
        except IOError, e:
            logger.log(
                u"Unable to write image to " + image_path + " - are you sure the show folder is writable? " + ex(e),
                logger.ERROR)
            return False
Example #13
0
    def log_error_and_exit(self, error_msg, *args, **kwargs):
        self.log(error_msg, ERROR, *args, **kwargs)

        if not self.consoleLogging:
            ek(sys,exit(error_msg))
        else:
            sys.exit(1)
Example #14
0
def get_path_dir_files(dirName, nzbName, proc_type):
    """
    Get files in a path

    :param dirName: Directory to start in
    :param nzbName: NZB file, if present
    :param proc_type: auto/manual
    :return: a tuple of (path,dirs,files)
    """
    path = u""
    dirs = []
    files = []

    if dirName == sickbeard.TV_DOWNLOAD_DIR and not nzbName or proc_type == "manual":  # Scheduled Post Processing Active
        # Get at first all the subdir in the dirName
        for path, dirs, files in ek(os.walk, dirName):
            break
    else:
        # Post process downloaded content for one NZB/Torrent

        path, dirs   = ek(os.path.split, dirName)  #Script Post Processing
        torrent_type = get_torrent_type(dirName, nzbName)

        if torrent_type == TorrentType.SINGLE_FILE:
            # Single file torrent
            dirs = []
            files = [ek(os.path.join, dirName, nzbName)]
        else:
            # NZB or torrent directory
            dirs = [dirs]
            files = []

    return path, dirs, files
Example #15
0
def run_subs_extra_scripts(epObj, foundSubs):

    for curScriptName in sickbeard.SUBTITLES_EXTRA_SCRIPTS:
        script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", curScriptName) if piece.strip()]
        script_cmd[0] = ek(os.path.abspath, script_cmd[0])
        logger.log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)

        for video, subs in foundSubs.iteritems():
            subpaths = []
            for sub in subs:
                subpath = subliminal.subtitle.get_subtitle_path(video.name, sub.language)
                if os.path.isabs(sickbeard.SUBTITLES_DIR):
                    subpath = ek(os.path.join, sickbeard.SUBTITLES_DIR, ek(os.path.basename, subpath))
                elif sickbeard.SUBTITLES_DIR:
                    subpath = ek(os.path.join, ek(os.path.dirname, subpath), sickbeard.SUBTITLES_DIR, ek(os.path.basename, subpath))

                inner_cmd = script_cmd + [video.name, subpath, sub.language.opensubtitles, epObj['show.name'],
                                         str(epObj['season']), str(epObj['episode']), epObj['name'], str(epObj['show.indexerid'])]

                # use subprocess to run the command and capture output
                logger.log(u"Executing command: %s" % inner_cmd)
                try:
                    p = subprocess.Popen(inner_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
                    out, err = p.communicate()  # @UnusedVariable
                    logger.log(u"Script result: %s" % out, logger.DEBUG)

                except Exception as e:
                    logger.log(u"Unable to run subs_extra_script: " + ex(e))
Example #16
0
def getFileList(path, includeFiles, imagesOnly):
    # prune out directories to protect the user from doing stupid things (already lower case the dir to reduce calls)
    hide_list = ['boot', 'bootmgr', 'cache', 'config.msi', 'msocache', 'recovery', '$recycle.bin',
                 'recycler', 'system volume information', 'temporary internet files']  # windows specific
    hide_list += ['.fseventd', '.spotlight', '.trashes', '.vol', 'cachedmessages', 'caches', 'trash']  # osx specific
    hide_list += ['.git']

    file_list = []
    for filename in ek(os.listdir, path):
        if filename.lower() in hide_list:
            continue

        full_filename = ek(os.path.join, path, filename)
        is_file = ek(os.path.isfile, full_filename)

        if not includeFiles and is_file:
            continue

        is_image = filename.endswith(('jpg', 'jpeg', 'png', 'tiff', 'gif'))

        if is_file and imagesOnly and not is_image:
            continue

        file_list.append({
            'name': filename,
            'path': full_filename,
            'isFile': is_file,
            'isImage': is_image
        })

    return file_list
Example #17
0
def getFileList(path, includeFiles):
    # prune out directories to protect the user from doing stupid things (already lower case the dir to reduce calls)
    hide_list = ['boot', 'bootmgr', 'cache', 'config.msi', 'msocache', 'recovery', '$recycle.bin',
                 'recycler', 'system volume information', 'temporary internet files']  # windows specific
    hide_list += ['.fseventd', '.spotlight', '.trashes', '.vol', 'cachedmessages', 'caches', 'trash']  # osx specific
    hide_list += ['.git']

    file_list = []
    for filename in ek(os.listdir, path):
        if filename.lower() in hide_list:
            continue

        full_filename = ek(os.path.join, path, filename)
        is_dir = ek(os.path.isdir, full_filename)

        if not includeFiles and not is_dir:
            continue

        entry = {
            'name': filename,
            'path': full_filename
        }
        if not is_dir:
            entry['isFile'] = True
        file_list.append(entry)

    return file_list
Example #18
0
    def run(self, force=False):
        """
        Runs the postprocessor

        :param force: Forces postprocessing run
        :return: Returns when done without a return state/code
        """
        self.amActive = True

        if not ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR):
            logger.log(u"Automatic post-processing attempted but directory doesn't exist: {0}".format(
                       sickbeard.TV_DOWNLOAD_DIR), logger.WARNING)
            self.amActive = False
            return

        if not (force or ek(os.path.isabs, sickbeard.TV_DOWNLOAD_DIR)):
            logger.log(u"Automatic post-processing attempted but directory is relative "
                       u"(and probably not what you really want to process): %s" %
                       sickbeard.TV_DOWNLOAD_DIR, logger.WARNING)
            self.amActive = False
            return

        processTV.processDir(sickbeard.TV_DOWNLOAD_DIR, force=force)

        self.amActive = False
Example #19
0
def delete_folder(folder, check_empty=True):
    """
    Removes a folder from the filesystem

    :param folder: Path to folder to remove
    :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True
    :return: True on success, False on failure
    """

    # check if it's a folder
    if not ek(os.path.isdir, folder):
        return False

    # check if it isn't TV_DOWNLOAD_DIR
    if sickbeard.TV_DOWNLOAD_DIR:
        if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
            return False

    # check if it's empty folder when wanted checked
    if check_empty:
        check_files = ek(os.listdir, folder)
        if check_files:
            logger.log(u"Not deleting folder " + folder + " found the following files: " + str(check_files), logger.INFO)
            return False

        try:
            logger.log(u"Deleting folder (if it's empty): " + folder)
            os.rmdir(folder)
        except (OSError, IOError), e:
            logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
            return False
Example #20
0
def getFileList(path, includeFiles):
    # prune out directories to protect the user from doing stupid things (already lower case the dir to reduce calls)
    hideList = ["boot", "bootmgr", "cache", "config.msi", "msocache", "recovery", "$recycle.bin",
                "recycler", "system volume information", "temporary internet files"]  # windows specific
    hideList += [".fseventd", ".spotlight", ".trashes", ".vol", "cachedmessages", "caches", "trash"]  # osx specific
    hideList += [".git"]

    fileList = []
    for filename in ek(os.listdir, path):
        if filename.lower() in hideList:
            continue

        fullFilename = ek(os.path.join, path, filename)
        isDir = ek(os.path.isdir, fullFilename)

        if not includeFiles and not isDir:
            continue

        entry = {
            'name': filename,
            'path': fullFilename
        }
        if not isDir: entry['isFile'] = True
        fileList.append(entry)

    return fileList
Example #21
0
    def fetch_popular_shows(self):
        """Get popular show information from IMDB"""

        popular_shows = []

        data = helpers.getURL(self.url, session=self.session, params=self.params, headers={'Referer': 'http://akas.imdb.com/'}, returns='text')
        if not data:
            return None

        soup = BeautifulSoup(data, 'html5lib')
        results = soup.find("table", {"class": "results"})
        rows = results("tr")

        for row in rows:
            show = {}
            image_td = row.find("td", {"class": "image"})

            if image_td:
                image = image_td.find("img")
                show['image_url_large'] = self.change_size(image['src'], 3)
                show['image_path'] = ek(posixpath.join, 'images', 'imdb_popular', ek(os.path.basename, show['image_url_large']))

                self.cache_image(show['image_url_large'])

            td = row.find("td", {"class": "title"})

            if td:
                show['name'] = td.find("a").contents[0]
                show['imdb_url'] = "http://akas.imdb.com" + td.find("a")["href"]
                show['imdb_tt'] = show['imdb_url'][-10:][0:9]
                show['year'] = td.find("span", {"class": "year_type"}).contents[0].split(" ")[0][1:]

                rating_all = td.find("div", {"class": "user_rating"})
                if rating_all:
                    rating_string = rating_all.find("div", {"class": "rating rating-list"})
                    if rating_string:
                        rating_string = rating_string['title']

                        match = re.search(r".* (.*)\/10.*\((.*)\).*", rating_string)
                        if match:
                            matches = match.groups()
                            show['rating'] = matches[0]
                            show['votes'] = matches[1]
                        else:
                            show['rating'] = None
                            show['votes'] = None
                else:
                    show['rating'] = None
                    show['votes'] = None

                outline = td.find("span", {"class": "outline"})
                if outline:
                    show['outline'] = outline.contents[0]
                else:
                    show['outline'] = ''

                popular_shows.append(show)

        return popular_shows
Example #22
0
    def remove_pid_file(PIDFILE):
        try:
            if ek(os.path.exists, PIDFILE):
                ek(os.remove, PIDFILE)
        except (IOError, OSError):
            return False

        return True
Example #23
0
def log_data(min_level, log_filter, log_search, max_lines):
    regex = r"^(\d\d\d\d)\-(\d\d)\-(\d\d)\s*(\d\d)\:(\d\d):(\d\d)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$"
    if log_filter not in LOG_FILTERS:
        log_filter = '<NONE>'

    final_data = []

    log_files = []
    if ek(os.path.isfile, Wrapper.instance.log_file):
        log_files.append(Wrapper.instance.log_file)

        for i in range(1, int(sickbeard.LOG_NR)):
            name = Wrapper.instance.log_file + "." + str(i)
            if not ek(os.path.isfile, name):
                break
            log_files.append(name)
    else:
        return final_data

    data = []
    for _log_file in log_files:
        if len(data) < max_lines:
            with io.open(_log_file, 'r', encoding='utf-8') as f:
                data += [line.strip() + '\n' for line in reversed(f.readlines()) if line.strip()]
        else:
            break

    found_lines = 0
    for x in data:
        match = re.match(regex, x)

        if match:
            level = match.group(7)
            log_name = match.group(8)

            if not sickbeard.DEBUG and level == 'DEBUG':
                continue

            if not sickbeard.DBDEBUG and level == 'DB':
                continue

            if level not in LOGGING_LEVELS:
                final_data.append('AA ' + x)
                found_lines += 1
            elif log_search and log_search.lower() in x.lower():
                final_data.append(x)
                found_lines += 1
            elif not log_search and LOGGING_LEVELS[level] >= int(min_level) and (log_filter == '<NONE>' or log_name.startswith(log_filter)):
                final_data.append(x)
                found_lines += 1
        else:
            final_data.append('AA ' + x)
            found_lines += 1

        if found_lines >= max_lines:
            break

    return final_data
Example #24
0
    def retrieveShowMetadata(self, folder):
        """
        Used only when mass adding Existing Shows, using previously generated Show metadata to reduce the need to query TVDB.
        """

        empty_return = (None, None, None)

        metadata_path = ek(os.path.join, folder, self._show_metadata_filename)

        if not ek(os.path.isdir, folder) or not ek(os.path.isfile, metadata_path):
            logger.log(u"Can't load the metadata file from " + repr(metadata_path) + ", it doesn't exist", logger.DEBUG)
            return empty_return

        logger.log(u"Loading show info from metadata file in " + folder, logger.DEBUG)

        try:
            with ek(open, metadata_path, 'r') as xmlFileObj:
                showXML = etree.ElementTree(file=xmlFileObj)

            if showXML.findtext('title') == None \
                    or (showXML.findtext('tvdbid') == None
                        and showXML.findtext('id') == None):
                logger.log(u"Invalid info in tvshow.nfo (missing name or id):" \
                           + str(showXML.findtext('title')) + " " \
                           + str(showXML.findtext('tvdbid')) + " " \
                           + str(showXML.findtext('id')))
                return empty_return

            name = showXML.findtext('title')

            if showXML.findtext('tvdbid') != None:
                indexer_id = int(showXML.findtext('tvdbid'))
            elif showXML.findtext('id') != None:
                indexer_id = int(showXML.findtext('id'))
            else:
                logger.log(u"Empty <id> or <tvdbid> field in NFO, unable to find a ID", logger.WARNING)
                return empty_return

            if indexer_id is None:
                logger.log(u"Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file", logger.WARNING)
                return empty_return

            indexer = None
            if showXML.find('episodeguide/url') != None:
                epg_url = showXML.findtext('episodeguide/url').lower()
                if str(indexer_id) in epg_url:
                    if 'thetvdb.com' in epg_url:
                        indexer = 1
                    elif 'tvrage' in epg_url:
                        logger.log(u"Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file because it has TVRage info", logger.WARNING)
                        return empty_return


        except Exception, e:
            logger.log(
                u"There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e),
                logger.WARNING)
            return empty_return
Example #25
0
def tearDown_test_db():
    for current_db in [ TESTDBNAME, TESTCACHEDBNAME, TESTFAILEDDBNAME ]:
        file_name = ek(os.path.join, TESTDIR, current_db)
        if ek(os.path.exists,file_name):
            try:
                ek(os.remove, file_name)
            except Exception as e:
                print sickbeard.ex(e)
                continue
Example #26
0
    def fill_cache(self, show_obj):
        """
        Caches all images for the given show. Copies them from the show dir if possible, or
        downloads them from indexer if they aren't in the show dir.

        :param show_obj: TVShow object to cache images for
        """

        logger.log(u"Checking if we need any cache images for show " + str(show_obj.indexerid), logger.DEBUG)

        # check if the images are already cached or not
        need_images = {self.POSTER: not self.has_poster(show_obj.indexerid),
                       self.BANNER: not self.has_banner(show_obj.indexerid),
                       self.POSTER_THUMB: not self.has_poster_thumbnail(show_obj.indexerid),
                       self.BANNER_THUMB: not self.has_banner_thumbnail(show_obj.indexerid),
                       self.FANART: not self.has_fanart(show_obj.indexerid)}

        if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not \
        need_images[self.BANNER_THUMB] and not need_images[self.FANART]:
            logger.log(u"No new cache images needed, not retrieving new ones", logger.DEBUG)
            return

        # check the show dir for poster or banner images and use them
        if need_images[self.POSTER] or need_images[self.BANNER] or need_images[self.FANART]:
            try:
                for cur_provider in sickbeard.metadata_provider_dict.values():
                    logger.log(u"Checking if we can use the show image from the " + cur_provider.name + " metadata",
                               logger.DEBUG)
                    if ek(os.path.isfile, cur_provider.get_poster_path(show_obj)):
                        cur_file_name = ek(os.path.abspath, cur_provider.get_poster_path(show_obj))
                        cur_file_type = self.which_type(cur_file_name)

                        if cur_file_type is None:
                            logger.log(u"Unable to retrieve image type, not using the image from " + str(cur_file_name),
                                       logger.WARNING)
                            continue

                        logger.log(u"Checking if image " + cur_file_name + " (type " + str(
                            cur_file_type) + " needs metadata: " + str(need_images[cur_file_type]), logger.DEBUG)

                        if cur_file_type in need_images and need_images[cur_file_type]:
                            logger.log(
                                u"Found an image in the show dir that doesn't exist in the cache, caching it: " + cur_file_name + ", type " + str(
                                    cur_file_type), logger.DEBUG)
                            self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.indexerid)
                            need_images[cur_file_type] = False
            except ShowDirectoryNotFoundException:
                logger.log(u"Unable to search for images in show dir because it doesn't exist", logger.WARNING)

        # download from indexer for missing ones
        for cur_image_type in [self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB,self.FANART]:
            logger.log(u"Seeing if we still need an image of type " + str(cur_image_type) + ": " + str(
                need_images[cur_image_type]), logger.DEBUG)
            if cur_image_type in need_images and need_images[cur_image_type]:
                self._cache_image_from_indexer(show_obj, cur_image_type)

        logger.log(u"Done cache check")
Example #27
0
    def set_picture(self, picture):
        """
        Set the cover picture of this anime

        :param picture: the image filename
        """
        self._picture = picture
        self.cache_image("http://img7.anidb.net/pics/anime/{0}".format(picture))
        self._image_path = ek(posixpath.join, "images", "anidb", ek(os.path.basename, self._picture))
Example #28
0
 def image_name(self):
     """
     Checks if we have an image for this provider already.
     Returns found image or the default newznab image
     """
     if ek(os.path.isfile,
           ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME, 'images', 'providers',
              self.get_id() + '.png')):
         return self.get_id() + '.png'
     return 'newznab.png'
Example #29
0
    def dumpHTML(data):
        dumpName = ek(os.path.join, sickbeard.CACHE_DIR, 'custom_torrent.html')

        try:
            fileOut = ek(io.open,dumpName, 'wb')
            fileOut.write(data)
            fileOut.close()
            helpers.chmodAsParent(dumpName)
        except IOError, e:
            logger.log(u"Unable to save the file: %s " % repr(e), logger.ERROR)
            return False
Example #30
0
def setUp_test_episode_file():
    if not ek(os.path.exists,FILEDIR):
        ek(os.makedirs, FILEDIR)

    try:
        with open(FILEPATH, 'wb') as f:
            f.write("foo bar")
            f.flush()
    except Exception:
        print "Unable to set up test episode"
        raise
Example #31
0
def subtitlesLanguages(video_path):
    """Return a list detected subtitles for the given video file"""
    resultList = []
    should_save_subtitles = None

    if not sickbeard.EMBEDDED_SUBTITLES_ALL and video_path.endswith('.mkv'):
        embedded_subtitle_languages = getEmbeddedLanguages(
            video_path.encode(sickbeard.SYS_ENCODING))

    # Search subtitles with the absolute path
    if os.path.isabs(sickbeard.SUBTITLES_DIR):
        video_path = ek(os.path.join, sickbeard.SUBTITLES_DIR,
                        ek(os.path.basename, video_path))
    # Search subtitles with the relative path
    elif sickbeard.SUBTITLES_DIR:
        check_subtitles_path = ek(os.path.join, ek(os.path.dirname,
                                                   video_path),
                                  sickbeard.SUBTITLES_DIR)
        if not os.path.exists(check_subtitles_path):
            getSubtitlesPath(video_path)
        video_path = ek(os.path.join, ek(os.path.dirname, video_path),
                        sickbeard.SUBTITLES_DIR,
                        ek(os.path.basename, video_path))
    else:
        video_path = ek(os.path.join, ek(os.path.dirname, video_path),
                        ek(os.path.basename, video_path))

    if not sickbeard.EMBEDDED_SUBTITLES_ALL and video_path.endswith('.mkv'):
        external_subtitle_languages = scan_subtitle_languages(video_path)
        subtitle_languages = external_subtitle_languages.union(
            embedded_subtitle_languages)
        if not sickbeard.SUBTITLES_MULTI:
            currentWantedLanguages = wantedLanguages()
            if len(currentWantedLanguages) == 1 and Language(
                    'und') in external_subtitle_languages:
                if embedded_subtitle_languages not in currentWantedLanguages and Language(
                        'und') in embedded_subtitle_languages:
                    subtitle_languages.add(fromietf(currentWantedLanguages[0]))
                    should_save_subtitles = True
                elif embedded_subtitle_languages not in currentWantedLanguages and Language(
                        'und') not in embedded_subtitle_languages:
                    subtitle_languages.remove(Language('und'))
                    subtitle_languages.add(fromietf(currentWantedLanguages[0]))
                    should_save_subtitles = True
    else:
        subtitle_languages = scan_subtitle_languages(video_path)
        if not sickbeard.SUBTITLES_MULTI:
            if len(wantedLanguages()) == 1 and Language(
                    'und') in subtitle_languages:
                subtitle_languages.remove(Language('und'))
                subtitle_languages.add(fromietf(wantedLanguages()[0]))
                should_save_subtitles = True

    for language in subtitle_languages:
        if hasattr(language, 'opensubtitles') and language.opensubtitles:
            resultList.append(language.opensubtitles)
        elif hasattr(language, 'alpha3b') and language.alpha3b:
            resultList.append(language.alpha3b)
        elif hasattr(language, 'alpha3t') and language.alpha3t:
            resultList.append(language.alpha3t)
        elif hasattr(language, 'alpha2') and language.alpha2:
            resultList.append(language.alpha2)

    return (sorted(resultList), should_save_subtitles)
Example #32
0
def foldersAtPath(path, includeParent=False, includeFiles=False):
    """ Returns a list of dictionaries with the folders contained at the given path
        Give the empty string as the path to list the contents of the root path
        (under Unix this means "/", on Windows this will be a list of drive letters)

        :param includeParent: boolean, include parent dir in list as well
        :param includeFiles: boolean, include files or only directories
        :return: list of folders/files
    """

    # walk up the tree until we find a valid path
    while path and not ek(os.path.isdir, path):
        if path == ek(os.path.dirname, path):
            path = ''
            break
        else:
            path = ek(os.path.dirname, path)

    if path == "":
        if os.name == 'nt':
            entries = [{'current_path': 'Root'}]
            for letter in getWinDrives():
                letterPath = letter + ':\\'
                entries.append({'name': letterPath, 'path': letterPath})
            return entries
        else:
            path = '/'

    # fix up the path and find the parent
    path = ek(os.path.abspath, ek(os.path.normpath, path))
    parentPath = ek(os.path.dirname, path)

    # if we're at the root then the next step is the meta-node showing our drive letters
    if path == parentPath and os.name == 'nt':
        parentPath = ""

    try:
        fileList = [{'name': filename, 'path': ek(os.path.join, path, filename)} for filename in ek(os.listdir, path)]
    except OSError as e:
        logging.warning("Unable to open " + path + ": " + repr(e) + " / " + str(e))
        fileList = [{'name': filename, 'path': ek(os.path.join, parentPath, filename)} for filename in
                    ek(os.listdir, parentPath)]

    if not includeFiles:
        fileList = [x for x in fileList if ek(os.path.isdir, x[b'path'])]

    # prune out directories to protect the user from doing stupid things (already lower case the dir to reduce calls)
    hideList = ["boot", "bootmgr", "cache", "msocache", "recovery", "$recycle.bin", "recycler",
                "system volume information", "temporary internet files"]  # windows specific
    hideList += [".fseventd", ".spotlight", ".trashes", ".vol", "cachedmessages", "caches", "trash"]  # osx specific

    fileList = [x for x in fileList if x[b'name'].lower() not in hideList]

    fileList = sorted(fileList,
                      lambda x, y: cmp(ek(os.path.basename, x[b'name']).lower(),
                                       ek(os.path.basename, y[b'path']).lower()))

    entries = [{'current_path': path}]
    if includeParent and parentPath != path:
        entries.append({'name': "..", 'path': parentPath})
    entries.extend(fileList)

    return entries
Example #33
0
    def qualityFromFileMeta(filename):  # pylint: disable=too-many-branches
        """
        Get quality file file metadata

        :param filename: Filename to analyse
        :return: Quality prefix
        """

        log.use_print = False

        try:
            parser = createParser(filename)
        except Exception:  # pylint: disable=broad-except
            parser = None

        if not parser:
            return Quality.UNKNOWN

        try:
            metadata = extractMetadata(parser)
        except Exception:  # pylint: disable=broad-except
            metadata = None

        try:
            parser.stream._input.close()  # pylint: disable=protected-access
        except Exception:  # pylint: disable=broad-except
            pass

        if not metadata:
            return Quality.UNKNOWN

        height = 0
        if metadata.has('height'):
            height = int(metadata.get('height') or 0)
        else:
            test = getattr(metadata, "iterGroups", None)
            if callable(test):
                for metagroup in metadata.iterGroups():
                    if metagroup.has('height'):
                        height = int(metagroup.get('height') or 0)

        if not height:
            return Quality.UNKNOWN

        base_filename = ek(path.basename, filename)
        bluray = re.search(r"blue?-?ray|hddvd|b[rd](rip|mux)", base_filename,
                           re.I) is not None
        webdl = re.search(r"web.?dl|web(rip|mux|hd)", base_filename,
                          re.I) is not None

        ret = Quality.UNKNOWN
        if 3240 < height:
            ret = ((Quality.UHD_8K_TV, Quality.UHD_8K_BLURAY)[bluray],
                   Quality.UHD_8K_WEBDL)[webdl]
        if 1620 < height <= 3240:
            ret = ((Quality.UHD_4K_TV, Quality.UHD_4K_BLURAY)[bluray],
                   Quality.UHD_4K_WEBDL)[webdl]
        elif 800 < height <= 1620:
            ret = ((Quality.FULLHDTV, Quality.FULLHDBLURAY)[bluray],
                   Quality.FULLHDWEBDL)[webdl]
        elif 680 < height <= 800:
            ret = ((Quality.HDTV, Quality.HDBLURAY)[bluray],
                   Quality.HDWEBDL)[webdl]
        elif height <= 680:
            ret = (Quality.SDTV, Quality.SDDVD)[re.search(
                r'dvd|b[rd]rip|blue?-?ray', base_filename, re.I) is not None]

        return ret
Example #34
0
    def __init__(self, options={}, io_loop=None):
        threading.Thread.__init__(self)
        self.daemon = True
        self.alive = True
        self.name = "TORNADO"
        self.io_loop = io_loop or IOLoop.current()

        self.options = options
        self.options.setdefault('port', 8081)
        self.options.setdefault('host', '0.0.0.0')
        self.options.setdefault('log_dir', None)
        self.options.setdefault('username', '')
        self.options.setdefault('password', '')
        self.options.setdefault('web_root', '/')
        assert isinstance(self.options[b'port'], int)
        assert 'data_root' in self.options

        # video root
        if sickbeard.ROOT_DIRS:
            root_dirs = sickbeard.ROOT_DIRS.split('|')
            self.video_root = root_dirs[int(root_dirs[0]) + 1]
        else:
            self.video_root = None

        # web root
        if self.options[b'web_root']:
            sickbeard.WEB_ROOT = self.options[b'web_root'] = (
                '/' + self.options[b'web_root'].lstrip('/').strip('/'))

        # api root
        if not sickbeard.API_KEY:
            sickbeard.API_KEY = generateApiKey()
        self.options[b'api_root'] = r'%s/api/%s' % (sickbeard.WEB_ROOT,
                                                    sickbeard.API_KEY)

        # tornado setup
        self.enable_https = self.options[b'enable_https']
        self.https_cert = self.options[b'https_cert']
        self.https_key = self.options[b'https_key']

        if self.enable_https:
            # If either the HTTPS certificate or key do not exist, make some self-signed ones.
            if not (self.https_cert
                    and ek(os.path.exists, self.https_cert)) or not (
                        self.https_key and ek(os.path.exists, self.https_key)):
                if not create_https_certificates(self.https_cert,
                                                 self.https_key):
                    logging.info(
                        "Unable to create CERT/KEY files, disabling HTTPS")
                    sickbeard.ENABLE_HTTPS = False
                    self.enable_https = False

            if not (os.path.exists(self.https_cert)
                    and ek(os.path.exists, self.https_key)):
                logging.warning(
                    "Disabled HTTPS because of missing CERT and KEY files")
                sickbeard.ENABLE_HTTPS = False
                self.enable_https = False

        # Load the app
        self.app = Application(
            [],
            debug=sickbeard.DEBUG,
            autoreload=False,
            gzip=sickbeard.WEB_USE_GZIP,
            xheaders=sickbeard.HANDLE_REVERSE_PROXY,
            cookie_secret=sickbeard.WEB_COOKIE_SECRET,
            login_url='%s/login/' % self.options[b'web_root'],
        )

        # Main Handlers
        self.app.add_handlers(
            '.*$',
            [
                # webapi handler
                (r'%s(/?.*)' % self.options[b'api_root'], ApiHandler),

                # webapi key retrieval
                (r'%s/getkey(/?.*)' % self.options[b'web_root'], KeyHandler),

                # webapi builder redirect
                (r'%s/api/builder' % self.options[b'web_root'],
                 RedirectHandler, {
                     "url": self.options[b'web_root'] + '/apibuilder/'
                 }),

                # webui login/logout handlers
                (r'%s/login(/?)' % self.options[b'web_root'], LoginHandler),
                (r'%s/logout(/?)' % self.options[b'web_root'], LogoutHandler),

                # webui handlers
            ] + route.get_routes(self.options[b'web_root']))

        # Web calendar handler (Needed because option Unprotected calendar)
        self.app.add_handlers('.*$', [
            (r'%s/calendar' % self.options[b'web_root'], CalendarHandler),
        ])

        # Static File Handlers
        self.app.add_handlers(
            ".*$",
            [
                # favicon
                (r'%s/(favicon\.ico)' % self.options[b'web_root'],
                 StaticFileHandler, {
                     "path":
                     ek(os.path.join, self.options[b'data_root'],
                        'images/ico/favicon.ico')
                 }),

                # images
                (r'%s/images/(.*)' % self.options[b'web_root'],
                 StaticFileHandler, {
                     "path": ek(os.path.join, self.options[b'data_root'],
                                'images')
                 }),

                # cached images
                (r'%s/cache/images/(.*)' % self.options[b'web_root'],
                 StaticFileHandler, {
                     "path": ek(os.path.join, sickbeard.CACHE_DIR, 'images')
                 }),

                # css
                (r'%s/css/(.*)' % self.options[b'web_root'], StaticFileHandler,
                 {
                     "path": ek(os.path.join, self.options[b'data_root'],
                                'css')
                 }),

                # javascript
                (r'%s/js/(.*)' % self.options[b'web_root'], StaticFileHandler,
                 {
                     "path": ek(os.path.join, self.options[b'data_root'], 'js')
                 }),

                # videos
            ] + [(r'%s/videos/(.*)' % self.options[b'web_root'],
                  StaticFileHandler, {
                      "path": self.video_root
                  })])
Example #35
0
 def _has_episode_metadata(self, ep_obj):
     result = ek(os.path.isfile, self.get_episode_file_path(ep_obj))
     logger.log(
         u"Checking if " + self.get_episode_file_path(ep_obj) +
         " exists: " + str(result), logger.DEBUG)
     return result
Example #36
0
 def _has_season_all_banner(self, show_obj):
     result = ek(os.path.isfile, self.get_season_all_banner_path(show_obj))
     logger.log(
         u"Checking if " + self.get_season_all_banner_path(show_obj) +
         " exists: " + str(result), logger.DEBUG)
     return result
Example #37
0
 def get_episode_file_path(self, ep_obj):
     return ek(helpers.replaceExtension, ep_obj.location,
               self._ep_nfo_extension)
Example #38
0
    def fill_cache(self, show_obj):
        """
        Caches all images for the given show. Copies them from the show dir if possible, or
        downloads them from indexer if they aren't in the show dir.

        :param show_obj: TVShow object to cache images for
        """

        logger.log(
            u"Checking if we need any cache images for show " +
            str(show_obj.indexerid), logger.DEBUG)

        # check if the images are already cached or not
        need_images = {
            self.POSTER: not self.has_poster(show_obj.indexerid),
            self.BANNER: not self.has_banner(show_obj.indexerid),
            self.POSTER_THUMB:
            not self.has_poster_thumbnail(show_obj.indexerid),
            self.BANNER_THUMB:
            not self.has_banner_thumbnail(show_obj.indexerid),
            self.FANART: not self.has_fanart(show_obj.indexerid)
        }

        if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not \
        need_images[self.BANNER_THUMB] and not need_images[self.FANART]:
            logger.log(u"No new cache images needed, not retrieving new ones",
                       logger.DEBUG)
            return

        # check the show dir for poster or banner images and use them
        if need_images[self.POSTER] or need_images[self.BANNER] or need_images[
                self.FANART]:
            try:
                for cur_provider in sickbeard.metadata_provider_dict.values():
                    logger.log(
                        u"Checking if we can use the show image from the " +
                        cur_provider.name + " metadata", logger.DEBUG)
                    if ek(os.path.isfile,
                          cur_provider.get_poster_path(show_obj)):
                        cur_file_name = os.path.abspath(
                            cur_provider.get_poster_path(show_obj))
                        cur_file_type = self.which_type(cur_file_name)

                        if cur_file_type is None:
                            logger.log(
                                u"Unable to retrieve image type, not using the image from "
                                + str(cur_file_name), logger.WARNING)
                            continue

                        logger.log(
                            u"Checking if image " + cur_file_name + " (type " +
                            str(cur_file_type) + " needs metadata: " +
                            str(need_images[cur_file_type]), logger.DEBUG)

                        if cur_file_type in need_images and need_images[
                                cur_file_type]:
                            logger.log(
                                u"Found an image in the show dir that doesn't exist in the cache, caching it: "
                                + cur_file_name + ", type " +
                                str(cur_file_type), logger.DEBUG)
                            self._cache_image_from_file(
                                cur_file_name, cur_file_type,
                                show_obj.indexerid)
                            need_images[cur_file_type] = False
            except ShowDirectoryNotFoundException:
                logger.log(
                    u"Unable to search for images in show dir because it doesn't exist",
                    logger.WARNING)

        # download from indexer for missing ones
        for cur_image_type in [
                self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB,
                self.FANART
        ]:
            logger.log(
                u"Seeing if we still need an image of type " +
                str(cur_image_type) + ": " + str(need_images[cur_image_type]),
                logger.DEBUG)
            if cur_image_type in need_images and need_images[cur_image_type]:
                self._cache_image_from_indexer(show_obj, cur_image_type)

        logger.log(u"Done cache check")
Example #39
0
    def run(self):

        ShowQueueItem.run(self)

        logger.log(u"Starting to add show {0}".format("by ShowDir: {0}".format(self.showDir) if self.showDir else "by Indexer Id: {0}".format(self.indexer_id)))
        # make sure the Indexer IDs are valid
        try:

            lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy()
            if self.lang:
                lINDEXER_API_PARMS['language'] = self.lang

            logger.log(u"" + str(sickbeard.indexerApi(self.indexer).name) + ": " + repr(lINDEXER_API_PARMS))

            t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS)
            s = t[self.indexer_id]

            # Let's try to create the show Dir if it's not provided. This way we force the show dir to build build using the
            # Indexers provided series name
            if not self.showDir and self.root_dir:
                show_name = get_showname_from_indexer(self.indexer, self.indexer_id, self.lang)
                if show_name:
                    self.showDir = ek(os.path.join, self.root_dir, sanitize_filename(show_name))
                    dir_exists = makeDir(self.showDir)
                    if not dir_exists:
                        logger.log(u"Unable to create the folder {0}, can't add the show".format(self.showDir))
                        return

                    chmodAsParent(self.showDir)
                else:
                    logger.log(u"Unable to get a show {0}, can't add the show".format(self.showDir))
                    return

            # this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show
            if getattr(s, 'seriesname', None) is None:
                logger.log(u"Show in {} has no name on {}, probably searched with the wrong language.".format
                           (self.showDir, sickbeard.indexerApi(self.indexer).name), logger.ERROR)

                ui.notifications.error("Unable to add show",
                                       "Show in " + self.showDir + " has no name on " + str(sickbeard.indexerApi(
                                           self.indexer).name) + ", probably the wrong language. Delete .nfo and add manually in the correct language.")
                self._finishEarly()
                return
            # if the show has no episodes/seasons
            if not s:
                logger.log(u"Show " + str(s['seriesname']) + " is on " + str(
                    sickbeard.indexerApi(self.indexer).name) + " but contains no season/episode data.")
                ui.notifications.error("Unable to add show",
                                       "Show " + str(s['seriesname']) + " is on " + str(sickbeard.indexerApi(
                                           self.indexer).name) + " but contains no season/episode data.")
                self._finishEarly()
                return
        except Exception as e:
            logger.log(u"%s Error while loading information from indexer %s. Error: %r" % (self.indexer_id, sickbeard.indexerApi(self.indexer).name, ex(e)), logger.ERROR)
            # logger.log(u"Show name with ID %s doesn't exist on %s anymore. If you are using trakt, it will be removed from your TRAKT watchlist. If you are adding manually, try removing the nfo and adding again" %
            #            (self.indexer_id, sickbeard.indexerApi(self.indexer).name), logger.WARNING)

            ui.notifications.error(
                "Unable to add show",
                "Unable to look up the show in %s on %s using ID %s, not using the NFO. Delete .nfo and try adding manually again." %
                (self.showDir, sickbeard.indexerApi(self.indexer).name, self.indexer_id)
            )

            if sickbeard.USE_TRAKT:

                trakt_id = sickbeard.indexerApi(self.indexer).config['trakt_id']
                trakt_api = TraktAPI(sickbeard.SSL_VERIFY, sickbeard.TRAKT_TIMEOUT)

                title = self.showDir.split("/")[-1]
                data = {
                    'shows': [
                        {
                            'title': title,
                            'ids': {}
                        }
                    ]
                }
                if trakt_id == 'tvdb_id':
                    data['shows'][0]['ids']['tvdb'] = self.indexer_id
                else:
                    data['shows'][0]['ids']['tvrage'] = self.indexer_id

                trakt_api.traktRequest("sync/watchlist/remove", data, method='POST')

            self._finishEarly()
            return

        try:
            newShow = TVShow(self.indexer, self.indexer_id, self.lang)
            newShow.loadFromIndexer()

            self.show = newShow

            # set up initial values
            self.show.location = self.showDir
            self.show.subtitles = self.subtitles if self.subtitles is not None else sickbeard.SUBTITLES_DEFAULT
            self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT
            self.show.flatten_folders = self.flatten_folders if self.flatten_folders is not None else sickbeard.FLATTEN_FOLDERS_DEFAULT
            self.show.anime = self.anime if self.anime is not None else sickbeard.ANIME_DEFAULT
            self.show.scene = self.scene if self.scene is not None else sickbeard.SCENE_DEFAULT
            self.show.paused = self.paused if self.paused is not None else False
            self.show.frenchsearch = 0

            # set up default new/missing episode status
            logger.log(u"Setting all episodes to the specified default status: " + str(self.show.default_ep_status))
            self.show.default_ep_status = self.default_status

            if self.show.anime:
                self.show.release_groups = BlackAndWhiteList(self.show.indexerid)
                if self.blacklist:
                    self.show.release_groups.set_black_keywords(self.blacklist)
                if self.whitelist:
                    self.show.release_groups.set_white_keywords(self.whitelist)

            # # be smartish about this
            # if self.show.genre and "talk show" in self.show.genre.lower():
            #     self.show.air_by_date = 1
            # if self.show.genre and "documentary" in self.show.genre.lower():
            #     self.show.air_by_date = 0
            # if self.show.classification and "sports" in self.show.classification.lower():
            #     self.show.sports = 1

        except sickbeard.indexer_exception as e:
            logger.log(
                u"Unable to add show due to an error with " + sickbeard.indexerApi(self.indexer).name + ": " + ex(e),
                logger.ERROR)
            if self.show:
                ui.notifications.error(
                    "Unable to add " + str(self.show.name) + " due to an error with " + sickbeard.indexerApi(
                        self.indexer).name + "")
            else:
                ui.notifications.error(
                    "Unable to add show due to an error with " + sickbeard.indexerApi(self.indexer).name + "")
            self._finishEarly()
            return

        except MultipleShowObjectsException:
            logger.log(u"The show in " + self.showDir + " is already in your show list, skipping", logger.WARNING)
            ui.notifications.error('Show skipped', "The show in " + self.showDir + " is already in your show list")
            self._finishEarly()
            return

        except Exception as e:
            logger.log(u"Error trying to add show: " + ex(e), logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)
            self._finishEarly()
            raise

        logger.log(u"Retrieving show info from IMDb", logger.DEBUG)
        try:
            self.show.loadIMDbInfo()
        except imdb_exceptions.IMDbError as e:
            logger.log(u" Something wrong on IMDb api: " + ex(e), logger.WARNING)
        except Exception as e:
            logger.log(u"Error loading IMDb info: " + ex(e), logger.ERROR)

        try:
            self.show.saveToDB()
        except Exception as e:
            logger.log(u"Error saving the show to the database: " + ex(e), logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)
            self._finishEarly()
            raise

        # add it to the show list
        sickbeard.showList.append(self.show)

        try:
            self.show.loadEpisodesFromIndexer()
        except Exception as e:
            logger.log(
                u"Error with " + sickbeard.indexerApi(self.show.indexer).name + ", not creating episode list: " + ex(e),
                logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)

        # update internal name cache
        name_cache.buildNameCache(self.show)

        try:
            self.show.loadEpisodesFromDir()
        except Exception as e:
            logger.log(u"Error searching dir for episodes: " + ex(e), logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)

        # if they set default ep status to WANTED then run the backlog to search for episodes
        # FIXME: This needs to be a backlog queue item!!!
        if self.show.default_ep_status == WANTED:
            logger.log(u"Launching backlog for this show since its episodes are WANTED")
            sickbeard.backlogSearchScheduler.action.searchBacklog([self.show])

        self.show.writeMetadata()
        self.show.updateMetadata()
        self.show.populateCache()

        self.show.flushEpisodes()

        if sickbeard.USE_TRAKT:
            # if there are specific episodes that need to be added by trakt
            sickbeard.traktCheckerScheduler.action.manageNewShow(self.show)

            # add show to trakt.tv library
            if sickbeard.TRAKT_SYNC:
                sickbeard.traktCheckerScheduler.action.addShowToTraktLibrary(self.show)

            if sickbeard.TRAKT_SYNC_WATCHLIST:
                logger.log(u"update watchlist")
                notifiers.trakt_notifier.update_watchlist(show_obj=self.show)

        # Load XEM data to DB for show
        sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer, force=True)

        # check if show has XEM mapping so we can determin if searches should go by scene numbering or indexer numbering.
        if not self.scene and sickbeard.scene_numbering.get_xem_numbering_for_show(self.show.indexerid,
                                                                                   self.show.indexer):
            self.show.scene = 1

        # After initial add, set to default_status_after.
        self.show.default_ep_status = self.default_status_after

        self.finish()
Example #40
0
    def start(self):  # pylint: disable=too-many-branches,too-many-statements
        """
        Start SickRage
        """
        # do some preliminary stuff
        sickbeard.MY_FULLNAME = ek(os.path.normpath,
                                   ek(os.path.abspath, __file__))
        sickbeard.MY_NAME = ek(os.path.basename, sickbeard.MY_FULLNAME)
        sickbeard.PROG_DIR = ek(os.path.dirname, sickbeard.MY_FULLNAME)
        sickbeard.LOCALE_DIR = ek(os.path.join, sickbeard.PROG_DIR, 'locale')
        sickbeard.DATA_DIR = sickbeard.PROG_DIR
        sickbeard.MY_ARGS = sys.argv[1:]

        try:
            locale.setlocale(locale.LC_ALL, '')
            sickbeard.SYS_ENCODING = locale.getpreferredencoding()
        except (locale.Error, IOError):
            sickbeard.SYS_ENCODING = 'UTF-8'

        # pylint: disable=no-member
        if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING.lower() in ('ansi_x3.4-1968', 'us-ascii', 'ascii', 'charmap') or \
                (sys.platform.startswith('win') and sys.getwindowsversion()[0] >= 6 and str(getattr(sys.stdout, 'device', sys.stdout).encoding).lower() in ('cp65001', 'charmap')):
            sickbeard.SYS_ENCODING = 'UTF-8'

        # TODO: Continue working on making this unnecessary, this hack creates all sorts of hellish problems
        if not hasattr(sys, 'setdefaultencoding'):
            reload_module(sys)

        try:
            # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
            sys.setdefaultencoding(sickbeard.SYS_ENCODING)  # pylint: disable=no-member
        except (AttributeError, LookupError):
            sys.exit(
                'Sorry, you MUST add the SickRage folder to the PYTHONPATH environment variable\n'
                'or find another way to force Python to use {} for string encoding.'
                .format(sickbeard.SYS_ENCODING))

        # Rename the main thread
        threading.currentThread().name = 'MAIN'

        args = SickRageArgumentParser(sickbeard.PROG_DIR).parse_args()

        if args.force_update:
            result = self.force_update()
            sys.exit(int(not result))  # Ok -> 0 , Error -> 1

        # Need console logging for SickBeard.py and SickBeard-console.exe
        sickbeard.NO_RESIZE = args.noresize
        self.console_logging = (not hasattr(sys, 'frozen')) or (
            sickbeard.MY_NAME.lower().find('-console') > 0) and not args.quiet
        self.no_launch = args.nolaunch
        self.forced_port = args.port
        if args.daemon:
            self.run_as_daemon = platform.system() != 'Windows'
            self.console_logging = False
            self.no_launch = True

        self.create_pid = bool(args.pidfile)
        self.pid_file = args.pidfile
        if self.pid_file and ek(os.path.exists, self.pid_file):
            # If the pid file already exists, SickRage may still be running, so exit
            raise SystemExit('PID file: {0} already exists. Exiting.'.format(
                self.pid_file))

        sickbeard.DATA_DIR = ek(
            os.path.abspath,
            args.datadir) if args.datadir else sickbeard.DATA_DIR
        sickbeard.CONFIG_FILE = ek(
            os.path.abspath, args.config) if args.config else ek(
                os.path.join, sickbeard.DATA_DIR, 'config.ini')

        # The pid file is only useful in daemon mode, make sure we can write the file properly
        if self.create_pid:
            if self.run_as_daemon:
                pid_dir = ek(os.path.dirname, self.pid_file)
                if not ek(os.access, pid_dir, os.F_OK):
                    sys.exit('PID dir: {0} doesn\'t exist. Exiting.'.format(
                        pid_dir))
                if not ek(os.access, pid_dir, os.W_OK):
                    raise SystemExit(
                        'PID dir: {0} must be writable (write permissions). Exiting.'
                        .format(pid_dir))
            else:
                if self.console_logging:
                    sys.stdout.write(
                        'Not running in daemon mode. PID file creation disabled.\n'
                    )
                self.create_pid = False

        # Make sure that we can create the data dir
        if not ek(os.access, sickbeard.DATA_DIR, os.F_OK):
            try:
                ek(os.makedirs, sickbeard.DATA_DIR, 0o744)
            except os.error:
                raise SystemExit('Unable to create data directory: {0}'.format(
                    sickbeard.DATA_DIR))

        # Make sure we can write to the data dir
        if not ek(os.access, sickbeard.DATA_DIR, os.W_OK):
            raise SystemExit('Data directory must be writeable: {0}'.format(
                sickbeard.DATA_DIR))

        # Make sure we can write to the config file
        if not ek(os.access, sickbeard.CONFIG_FILE, os.W_OK):
            if ek(os.path.isfile, sickbeard.CONFIG_FILE):
                raise SystemExit('Config file must be writeable: {0}'.format(
                    sickbeard.CONFIG_FILE))
            elif not ek(os.access, ek(os.path.dirname, sickbeard.CONFIG_FILE),
                        os.W_OK):
                raise SystemExit(
                    'Config file root dir must be writeable: {0}'.format(
                        ek(os.path.dirname, sickbeard.CONFIG_FILE)))

        ek(os.chdir, sickbeard.DATA_DIR)

        # Check if we need to perform a restore first
        restore_dir = ek(os.path.join, sickbeard.DATA_DIR, 'restore')
        if ek(os.path.exists, restore_dir):
            success = self.restore_db(restore_dir, sickbeard.DATA_DIR)
            if self.console_logging:
                sys.stdout.write(
                    'Restore: restoring DB and config.ini {0}!\n'.format(
                        ('FAILED', 'SUCCESSFUL')[success]))

        # Load the config and publish it to the sickbeard package
        if self.console_logging and not ek(os.path.isfile,
                                           sickbeard.CONFIG_FILE):
            sys.stdout.write(
                'Unable to find {0}, all settings will be default!\n'.format(
                    sickbeard.CONFIG_FILE))

        sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE, encoding='UTF-8')

        # Initialize the config and our threads
        sickbeard.initialize(consoleLogging=self.console_logging)

        if self.run_as_daemon:
            self.daemonize()

        # Get PID
        sickbeard.PID = os.getpid()

        # Build from the DB to start with
        self.load_shows_from_db()

        logger.log('Starting SickRage [{branch}] using \'{config}\''.format(
            branch=sickbeard.BRANCH, config=sickbeard.CONFIG_FILE))

        self.clear_cache()

        if self.forced_port:
            logger.log('Forcing web server to port {port}'.format(
                port=self.forced_port))
            self.start_port = self.forced_port
        else:
            self.start_port = sickbeard.WEB_PORT

        if sickbeard.WEB_LOG:
            self.log_dir = sickbeard.LOG_DIR
        else:
            self.log_dir = None

        # sickbeard.WEB_HOST is available as a configuration value in various
        # places but is not configurable. It is supported here for historic reasons.
        if sickbeard.WEB_HOST and sickbeard.WEB_HOST != '0.0.0.0':
            self.web_host = sickbeard.WEB_HOST
        else:
            self.web_host = '' if sickbeard.WEB_IPV6 else '0.0.0.0'

        # web server options
        self.web_options = {
            'port':
            int(self.start_port),
            'host':
            self.web_host,
            'data_root':
            ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME),
            'web_root':
            sickbeard.WEB_ROOT,
            'log_dir':
            self.log_dir,
            'username':
            sickbeard.WEB_USERNAME,
            'password':
            sickbeard.WEB_PASSWORD,
            'enable_https':
            sickbeard.ENABLE_HTTPS,
            'handle_reverse_proxy':
            sickbeard.HANDLE_REVERSE_PROXY,
            'https_cert':
            ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_CERT),
            'https_key':
            ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_KEY),
        }

        # start web server
        self.web_server = SRWebServer(self.web_options)
        self.web_server.start()

        # Fire up all our threads
        sickbeard.start()

        # Build internal name cache
        name_cache.buildNameCache()

        # Pre-populate network timezones, it isn't thread safe
        network_timezones.update_network_dict()

        # sure, why not?
        if sickbeard.USE_FAILED_DOWNLOADS:
            failed_history.trimHistory()

        # Check for metadata indexer updates for shows (sets the next aired ep!)
        # sickbeard.showUpdateScheduler.forceRun()

        # Launch browser
        if sickbeard.LAUNCH_BROWSER and not (self.no_launch
                                             or self.run_as_daemon):
            sickbeard.launchBrowser(
                'https' if sickbeard.ENABLE_HTTPS else 'http', self.start_port,
                sickbeard.WEB_ROOT)

        # main loop
        while True:
            time.sleep(1)
Example #41
0
def unRAR(path, rarFiles, force, result):  # pylint: disable=too-many-branches,too-many-statements
    """
    Extracts RAR files

    :param path: Path to look for files in
    :param rarFiles: Names of RAR files
    :param force: process currently processing items
    :param result: Previous results
    :return: List of unpacked file names
    """

    unpacked_files = []

    if sickbeard.UNPACK and rarFiles:

        result.output += logHelper(u"Packed Releases detected: %s" % rarFiles, logger.DEBUG)

        for archive in rarFiles:

            result.output += logHelper(u"Unpacking archive: %s" % archive, logger.DEBUG)

            failure = None
            try:
                rar_handle = RarFile(ek(os.path.join, path, archive))

                # Skip extraction if any file in archive has previously been extracted
                skip_file = False
                for file_in_archive in [ek(os.path.basename, x.filename) for x in rar_handle.infolist() if not x.isdir]:
                    if already_postprocessed(path, file_in_archive, force, result):
                        result.output += logHelper(u"Archive file already post-processed, extraction skipped: %s" %
                                                   file_in_archive, logger.DEBUG)
                        skip_file = True
                        break

                if skip_file:
                    continue

                rar_handle.extract(path=path, withSubpath=False, overwrite=False)
                for x in rar_handle.infolist():
                    if not x.isdir:
                        basename = ek(os.path.basename, x.filename)
                        if basename not in unpacked_files:
                            unpacked_files.append(basename)
                del rar_handle

            except ArchiveHeaderBroken:
                failure = (u'Archive Header Broken', u'Unpacking failed because the Archive Header is Broken')
            except IncorrectRARPassword:
                failure = (u'Incorrect RAR Password', u'Unpacking failed because of an Incorrect Rar Password')
            except FileOpenError:
                failure = (u'File Open Error, check the parent folder and destination file permissions.',
                           u'Unpacking failed with a File Open Error (file permissions?)')
            except InvalidRARArchiveUsage:
                failure = (u'Invalid Rar Archive Usage', u'Unpacking Failed with Invalid Rar Archive Usage')
            except InvalidRARArchive:
                failure = (u'Invalid Rar Archive', u'Unpacking Failed with an Invalid Rar Archive Error')
            except Exception as e:
                failure = (ex(e), u'Unpacking failed for an unknown reason')

            if failure is not None:
                result.output += logHelper(u'Failed Unrar archive {}: {}'.format(archive, failure[0]), logger.ERROR)
                result.missedfiles.append(u'{} : Unpacking failed: {}'.format(archive, failure[1]))
                result.result = False
                continue

        result.output += logHelper(u"UnRar content: %s" % unpacked_files, logger.DEBUG)

    return unpacked_files
Example #42
0
 def get_season_all_banner_path(self, show_obj):
     return ek(os.path.join, show_obj.location, self.season_all_banner_name)
Example #43
0
 def get_poster_path(self, show_obj):
     return ek(os.path.join, show_obj.location, self.poster_name)
Example #44
0
 def _cache_dir(self):
     """
     Builds up the full path to the image cache directory
     """
     return ek(os.path.abspath,
               ek(os.path.join, sickbeard.CACHE_DIR, 'images'))
Example #45
0
 def get_fanart_path(self, show_obj):
     return ek(os.path.join, show_obj.location, self.fanart_name)
Example #46
0
 def _thumbnails_dir(self):
     """
     Builds up the full path to the thumbnails image cache directory
     """
     return ek(os.path.abspath,
               ek(os.path.join, self._cache_dir(), 'thumbnails'))
Example #47
0
 def get_show_file_path(self, show_obj):
     return ek(os.path.join, show_obj.location,
               self._show_metadata_filename)
Example #48
0
    def init_logging(self,
                     console_logging=False,
                     file_logging=False,
                     debug_logging=False,
                     database_logging=False):
        """
        Initialize logging

        :param console_logging: True if logging to console
        :param file_logging: True if logging to file
        :param debug_logging: True if debug logging is enabled
        :param database_logging: True if logging database access
        """
        self.log_file = self.log_file or ek(os.path.join, sickbeard.LOG_DIR,
                                            'sickrage.log')

        global log_file
        log_file = self.log_file

        self.debug_logging = debug_logging
        self.console_logging = console_logging
        self.file_logging = file_logging
        self.database_logging = database_logging

        logging.addLevelName(DB, 'DB')  # add a new logging level DB
        logging.getLogger().addHandler(NullHandler())  # nullify root logger

        # set custom root logger
        for logger in self.loggers:
            if logger is not self.logger:
                logger.root = self.logger
                logger.parent = self.logger

        log_level = DB if self.database_logging else DEBUG if self.debug_logging else INFO

        # set minimum logging level allowed for loggers
        for logger in self.loggers:
            logger.setLevel(log_level)

        logging.getLogger("tornado.general").setLevel('ERROR')

        # console log handler
        if self.console_logging:
            console = logging.StreamHandler()
            console.setFormatter(
                CensoredFormatter('%(asctime)s %(levelname)s::%(message)s',
                                  '%H:%M:%S'))
            console.setLevel(log_level)

            for logger in self.loggers:
                logger.addHandler(console)

        # rotating log file handler
        if self.file_logging:
            rfh = logging.handlers.RotatingFileHandler(
                self.log_file,
                maxBytes=int(sickbeard.LOG_SIZE * 1048576),
                backupCount=sickbeard.LOG_NR,
                encoding='utf-8')
            rfh.setFormatter(
                CensoredFormatter('%(asctime)s %(levelname)-8s %(message)s',
                                  dateTimeFormat))
            rfh.setLevel(log_level)

            for logger in self.loggers:
                logger.addHandler(rfh)
Example #49
0
 def _has_fanart(self, show_obj):
     result = ek(os.path.isfile, self.get_fanart_path(show_obj))
     logger.log(
         u"Checking if " + self.get_fanart_path(show_obj) + " exists: " +
         str(result), logger.DEBUG)
     return result
Example #50
0
    def submit_errors(self):  # pylint: disable=too-many-branches,too-many-locals

        submitter_result = ''
        issue_id = None

        gh_credentials = (sickbeard.GIT_AUTH_TYPE == 0 and sickbeard.GIT_USERNAME and sickbeard.GIT_PASSWORD) \
            or (sickbeard.GIT_AUTH_TYPE == 1 and sickbeard.GIT_TOKEN)

        if not all((gh_credentials, sickbeard.DEBUG, sickbeard.gh,
                    classes.ErrorViewer.errors)):
            submitter_result = 'Please set your GitHub token or username and password in the config and enable debug. Unable to submit issue ticket to GitHub!'
            return submitter_result, issue_id

        try:
            from sickbeard.versionChecker import CheckVersion
            checkversion = CheckVersion()
            checkversion.check_for_new_version()
            commits_behind = checkversion.updater.get_num_commits_behind()
        except Exception:  # pylint: disable=broad-except
            submitter_result = 'Could not check if your SickRage is updated, unable to submit issue ticket to GitHub!'
            return submitter_result, issue_id

        if commits_behind is None or commits_behind > 0:
            submitter_result = 'Please update SickRage, unable to submit issue ticket to GitHub with an outdated version!'
            return submitter_result, issue_id

        if self.submitter_running:
            submitter_result = 'Issue submitter is running, please wait for it to complete'
            return submitter_result, issue_id

        self.submitter_running = True

        try:
            # read log file
            __log_data = None

            if ek(os.path.isfile, self.log_file):
                with io.open(self.log_file, encoding='utf-8') as log_f:
                    __log_data = log_f.readlines()

            for i in range(1, int(sickbeard.LOG_NR)):
                f_name = '{0}.{1:d}'.format(self.log_file, i)
                if ek(os.path.isfile, f_name) and (len(__log_data) <= 500):
                    with io.open(f_name, encoding='utf-8') as log_f:
                        __log_data += log_f.readlines()

            __log_data = list(reversed(__log_data))

            # parse and submit errors to issue tracker
            for cur_error in sorted(classes.ErrorViewer.errors,
                                    key=lambda error: error.time,
                                    reverse=True)[:500]:
                try:
                    title_error = ss(str(cur_error.title))
                    if not title_error or title_error == 'None':
                        title_error = re.match(
                            r'^[A-Z0-9\-\[\] :]+::\s*(.*)(?: \[[\w]{7}\])$',
                            ss(cur_error.message)).group(1)

                    if len(title_error) > 1000:
                        title_error = title_error[0:1000]

                except Exception as err_msg:  # pylint: disable=broad-except
                    self.log(
                        'Unable to get error title : {0}'.format(ex(err_msg)),
                        ERROR)
                    title_error = 'UNKNOWN'

                gist = None
                regex = r'^({0})\s+([A-Z]+)\s+([0-9A-Z\-]+)\s*(.*)(?: \[[\w]{{7}}\])$'.format(
                    cur_error.time)
                for i, data in enumerate(__log_data):
                    match = re.match(regex, data)
                    if match:
                        level = match.group(2)
                        if LOGGING_LEVELS[level] == ERROR:
                            paste_data = ''.join(__log_data[i:i + 50])
                            if paste_data:
                                gist = sickbeard.gh.get_user().create_gist(
                                    False, {
                                        'sickrage.log':
                                        InputFileContent(paste_data)
                                    })
                            break
                    else:
                        gist = 'No ERROR found'

                try:
                    locale_name = locale.getdefaultlocale()[1]
                except Exception:  # pylint: disable=broad-except
                    locale_name = 'unknown'

                if gist and gist != 'No ERROR found':
                    log_link = 'Link to Log: {0}'.format(gist.html_url)
                else:
                    log_link = 'No Log available with ERRORS:'

                msg = [
                    '### INFO',
                    'Python Version: **{0}**'.format(sys.version[:120].replace(
                        '\n', '')),
                    'Operating System: **{0}**'.format(platform.platform()),
                    'Locale: {0}'.format(locale_name),
                    'Branch: **{0}**'.format(sickbeard.BRANCH),
                    'Commit: SickRage/SickRage@{0}'.format(
                        sickbeard.CUR_COMMIT_HASH),
                    log_link,
                    '### ERROR',
                    '```',
                    cur_error.message,
                    '```',
                    '---',
                    '_STAFF NOTIFIED_: @SickRage/owners @SickRage/moderators',
                ]

                message = '\n'.join(msg)
                title_error = '[APP SUBMITTED]: {0}'.format(title_error)

                repo = sickbeard.gh.get_organization(
                    sickbeard.GIT_ORG).get_repo(sickbeard.GIT_REPO)
                reports = repo.get_issues(state='all')

                def is_ascii_error(title):
                    # [APP SUBMITTED]: 'ascii' codec can't encode characters in position 00-00: ordinal not in range(128)
                    # [APP SUBMITTED]: 'charmap' codec can't decode byte 0x00 in position 00: character maps to <undefined>
                    return re.search(
                        r'.* codec can\'t .*code .* in position .*:',
                        title) is not None

                def is_malformed_error(title):
                    # [APP SUBMITTED]: not well-formed (invalid token): line 0, column 0
                    return re.search(
                        r'.* not well-formed \(invalid token\): line .* column .*',
                        title) is not None

                ascii_error = is_ascii_error(title_error)
                malformed_error = is_malformed_error(title_error)

                issue_found = False
                for report in reports:
                    if title_error.rsplit(' :: ')[-1] in report.title or \
                        (malformed_error and is_malformed_error(report.title)) or \
                            (ascii_error and is_ascii_error(report.title)):

                        issue_id = report.number
                        if not report.raw_data['locked']:
                            if report.create_comment(message):
                                submitter_result = 'Commented on existing issue #{0} successfully!'.format(
                                    issue_id)
                            else:
                                submitter_result = 'Failed to comment on found issue #{0}!'.format(
                                    issue_id)
                        else:
                            submitter_result = 'Issue #{0} is locked, check GitHub to find info about the error.'.format(
                                issue_id)

                        issue_found = True
                        break

                if not issue_found:
                    issue = repo.create_issue(title_error, message)
                    if issue:
                        issue_id = issue.number
                        submitter_result = 'Your issue ticket #{0} was submitted successfully!'.format(
                            issue_id)
                    else:
                        submitter_result = 'Failed to create a new issue!'

                if issue_id and cur_error in classes.ErrorViewer.errors:
                    # clear error from error list
                    classes.ErrorViewer.errors.remove(cur_error)
        except GhEx.RateLimitExceededException as ex:
            submitter_result = 'Your Github user has exceeded its API rate limit, please try again later'
            issue_id = None
        except GhEx.TwoFactorException as ex:
            submitter_result = (
                'Your Github account requires Two-Factor Authentication, '
                'please change your auth method in the config')
            issue_id = None
        except Exception:  # pylint: disable=broad-except
            self.log(traceback.format_exc(), ERROR)
            submitter_result = 'Exception generated in issue submitter, please check the log'
            issue_id = None
        finally:
            self.submitter_running = False

        return submitter_result, issue_id
Example #51
0
    def retrieveShowMetadata(self, folder):
        """
        Used only when mass adding Existing Shows, using previously generated Show metadata to reduce the need to query TVDB.
        """

        empty_return = (None, None, None)

        metadata_path = ek(os.path.join, folder, self._show_metadata_filename)

        if not ek(os.path.isdir, folder) or not ek(os.path.isfile,
                                                   metadata_path):
            logger.log(
                u"Can't load the metadata file from " + repr(metadata_path) +
                ", it doesn't exist", logger.DEBUG)
            return empty_return

        logger.log(u"Loading show info from metadata file in " + folder,
                   logger.DEBUG)

        try:
            with open(metadata_path, 'r') as xmlFileObj:
                showXML = etree.ElementTree(file=xmlFileObj)

            if showXML.findtext('title') == None or (
                    showXML.findtext('tvdbid') == None
                    and showXML.findtext('id') == None):
                logger.log(
                    u"Invalid info in tvshow.nfo (missing name or id): %s %s %s"
                    % (showXML.findtext('title'), showXML.findtext('tvdbid'),
                       showXML.findtext('id')))
                return empty_return

            name = showXML.findtext('title')

            if showXML.findtext('tvdbid') != None:
                indexer_id = int(showXML.findtext('tvdbid'))
            elif showXML.findtext('id') != None:
                indexer_id = int(showXML.findtext('id'))
            else:
                logger.log(
                    u"Empty <id> or <tvdbid> field in NFO, unable to find a ID",
                    logger.WARNING)
                return empty_return

            if indexer_id is None:
                logger.log(
                    u"Invalid Indexer ID (" + str(indexer_id) +
                    "), not using metadata file", logger.WARNING)
                return empty_return

            indexer = None
            if showXML.find('episodeguide/url') != None:
                epg_url = showXML.findtext('episodeguide/url').lower()
                if str(indexer_id) in epg_url:
                    if 'thetvdb.com' in epg_url:
                        indexer = 1
                    elif 'tvrage' in epg_url:
                        logger.log(
                            u"Invalid Indexer ID (" + str(indexer_id) +
                            "), not using metadata file because it has TVRage info",
                            logger.WARNING)
                        return empty_return

        except Exception, e:
            logger.log(
                u"There was an error parsing your existing metadata file: '" +
                metadata_path + "' error: " + ex(e), logger.WARNING)
            return empty_return
Example #52
0
def log_data(min_level, log_filter, log_search, max_lines):
    regex = r"^(\d\d\d\d)\-(\d\d)\-(\d\d)\s*(\d\d)\:(\d\d):(\d\d)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$"
    if log_filter not in LOG_FILTERS:
        log_filter = '<NONE>'

    final_data = []

    log_files = []
    if ek(os.path.isfile, Wrapper.instance.log_file):
        log_files.append(Wrapper.instance.log_file)

        for i in range(1, int(sickbeard.LOG_NR)):
            name = Wrapper.instance.log_file + "." + str(i)
            if not ek(os.path.isfile, name):
                break
            log_files.append(name)
    else:
        return final_data

    data = []
    for _log_file in log_files:
        if len(data) < max_lines:
            with io.open(_log_file, 'r', encoding='utf-8') as f:
                data += [
                    line.strip() + '\n' for line in reversed(f.readlines())
                    if line.strip()
                ]
        else:
            break

    found_lines = 0
    for x in data:
        match = re.match(regex, x)

        if match:
            level = match.group(7)
            log_name = match.group(8)

            if not sickbeard.DEBUG and level == 'DEBUG':
                continue

            if not sickbeard.DBDEBUG and level == 'DB':
                continue

            if level not in LOGGING_LEVELS:
                final_data.append('AA ' + x)
                found_lines += 1
            elif log_search and log_search.lower() in x.lower():
                final_data.append(x)
                found_lines += 1
            elif not log_search and LOGGING_LEVELS[level] >= int(
                    min_level) and (log_filter == '<NONE>'
                                    or log_name.startswith(log_filter)):
                final_data.append(x)
                found_lines += 1
        else:
            final_data.append('AA ' + x)
            found_lines += 1

        if found_lines >= max_lines:
            break

    return final_data
Example #53
0
    def test_combos(self):

        for (name, result, which_regexes) in combination_test_cases:
            # Normalise the paths. Converts UNIX-style paths into Windows-style
            # paths when test is run on Windows.
            self._test_combo(ek(os.path.normpath, name), result, which_regexes)
Example #54
0
from sickrage.helper import video_screen_size
from sickrage.helper.encoding import ek
from sickrage.recompiled import tags
from sickrage.tagger.episode import EpisodeTags

gettext.install('messages', unicode=1, codeset='UTF-8', names=["ngettext"])

# If some provider has an issue with functionality of SR, other than user agents, it's best to come talk to us rather than block.
# It is no different than us going to a provider if we have questions or issues. Be a team player here.
# This is disabled, was only added for testing, and has no config.ini or web ui setting. To enable, set SPOOF_USER_AGENT = True
SPOOF_USER_AGENT = False
INSTANCE_ID = str(uuid.uuid1())
USER_AGENT = ('Sick-Rage.CE.1/(' + platform.system() + '; ' +
              platform.release() + '; ' + INSTANCE_ID + ')')
UA_SETTINGS.DB = ek(
    path.abspath,
    ek(path.join, ek(path.dirname, __file__), '../lib/fake_useragent/ua.json'))
UA_POOL = UserAgent()
if SPOOF_USER_AGENT:
    USER_AGENT = UA_POOL.random

cpu_presets = {'HIGH': 5, 'NORMAL': 2, 'LOW': 1}

# Other constants
MULTI_EP_RESULT = -1
SEASON_RESULT = -2

# Notification Types
NOTIFY_SNATCH = 1
NOTIFY_DOWNLOAD = 2
NOTIFY_SUBTITLE_DOWNLOAD = 3
Example #55
0
    def start(self):
        # do some preliminary stuff
        sickbeard.MY_FULLNAME = ek(os.path.normpath, ek(os.path.abspath, __file__))
        sickbeard.MY_NAME = ek(os.path.basename, sickbeard.MY_FULLNAME)
        sickbeard.PROG_DIR = ek(os.path.dirname, sickbeard.MY_FULLNAME)
        sickbeard.DATA_DIR = sickbeard.PROG_DIR
        sickbeard.MY_ARGS = sys.argv[1:]

        try:
            locale.setlocale(locale.LC_ALL, "")
            sickbeard.SYS_ENCODING = locale.getpreferredencoding()
        except (locale.Error, IOError):
            sickbeard.SYS_ENCODING = 'UTF-8'

        # pylint: disable=no-member
        if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING.lower() in ('ansi_x3.4-1968', 'us-ascii', 'ascii', 'charmap') or \
            (sys.platform.startswith('win') and sys.getwindowsversion()[0] >= 6 and str(getattr(sys.stdout, 'device', sys.stdout).encoding).lower() in ('cp65001', 'charmap')):
            sickbeard.SYS_ENCODING = 'UTF-8'

        # TODO: Continue working on making this unnecessary, this hack creates all sorts of hellish problems
        if not hasattr(sys, "setdefaultencoding"):
            reload(sys)

        try:
            # pylint: disable=no-member
            # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
            sys.setdefaultencoding(sickbeard.SYS_ENCODING)
        except Exception:
            sys.exit("Sorry, you MUST add the SickRage folder to the PYTHONPATH environment variable\n" +
                     "or find another way to force Python to use " + sickbeard.SYS_ENCODING + " for string encoding.")

        # Need console logging for SickBeard.py and SickBeard-console.exe
        self.consoleLogging = (not hasattr(sys, "frozen")) or (sickbeard.MY_NAME.lower().find('-console') > 0)

        # Rename the main thread
        threading.currentThread().name = u"MAIN"

        try:
            opts, _ = getopt.getopt(
                sys.argv[1:], "hqdp::",
                ['help', 'quiet', 'nolaunch', 'daemon', 'pidfile=', 'port=', 'datadir=', 'config=', 'noresize']
            )
        except getopt.GetoptError:
            sys.exit(self.help_message())

        for o, a in opts:
            # Prints help message
            if o in ('-h', '--help'):
                sys.exit(self.help_message())

            # For now we'll just silence the logging
            if o in ('-q', '--quiet'):
                self.consoleLogging = False

            # Suppress launching web browser
            # Needed for OSes without default browser assigned
            # Prevent duplicate browser window when restarting in the app
            if o in ('--nolaunch',):
                self.noLaunch = True

            # Override default/configured port
            if o in ('-p', '--port'):
                try:
                    self.forcedPort = int(a)
                except ValueError:
                    sys.exit("Port: " + str(a) + " is not a number. Exiting.")

            # Run as a double forked daemon
            if o in ('-d', '--daemon'):
                self.runAsDaemon = True
                # When running as daemon disable consoleLogging and don't start browser
                self.consoleLogging = False
                self.noLaunch = True

                if sys.platform == 'win32' or sys.platform == 'darwin':
                    self.runAsDaemon = False

            # Write a pidfile if requested
            if o in ('--pidfile',):
                self.CREATEPID = True
                self.PIDFILE = str(a)

                # If the pidfile already exists, sickbeard may still be running, so exit
                if ek(os.path.exists, self.PIDFILE):
                    sys.exit("PID file: " + self.PIDFILE + " already exists. Exiting.")

            # Specify folder to load the config file from
            if o in ('--config',):
                sickbeard.CONFIG_FILE = ek(os.path.abspath, a)

            # Specify folder to use as the data dir
            if o in ('--datadir',):
                sickbeard.DATA_DIR = ek(os.path.abspath, a)

            # Prevent resizing of the banner/posters even if PIL is installed
            if o in ('--noresize',):
                sickbeard.NO_RESIZE = True

        # The pidfile is only useful in daemon mode, make sure we can write the file properly
        if self.CREATEPID:
            if self.runAsDaemon:
                pid_dir = ek(os.path.dirname, self.PIDFILE)
                if not ek(os.access, pid_dir, os.F_OK):
                    sys.exit("PID dir: " + pid_dir + " doesn't exist. Exiting.")
                if not ek(os.access, pid_dir, os.W_OK):
                    sys.exit("PID dir: " + pid_dir + " must be writable (write permissions). Exiting.")

            else:
                if self.consoleLogging:
                    sys.stdout.write(u"Not running in daemon mode. PID file creation disabled.\n")

                self.CREATEPID = False

        # If they don't specify a config file then put it in the data dir
        if not sickbeard.CONFIG_FILE:
            sickbeard.CONFIG_FILE = ek(os.path.join, sickbeard.DATA_DIR, "config.ini")

        # Make sure that we can create the data dir
        if not ek(os.access, sickbeard.DATA_DIR, os.F_OK):
            try:
                ek(os.makedirs, sickbeard.DATA_DIR, 0744)
            except os.error:
                raise SystemExit("Unable to create datadir '" + sickbeard.DATA_DIR + "'")

        # Make sure we can write to the data dir
        if not ek(os.access, sickbeard.DATA_DIR, os.W_OK):
            raise SystemExit("Datadir must be writeable '" + sickbeard.DATA_DIR + "'")

        # Make sure we can write to the config file
        if not ek(os.access, sickbeard.CONFIG_FILE, os.W_OK):
            if ek(os.path.isfile, sickbeard.CONFIG_FILE):
                raise SystemExit("Config file '" + sickbeard.CONFIG_FILE + "' must be writeable.")
            elif not ek(os.access, ek(os.path.dirname, sickbeard.CONFIG_FILE), os.W_OK):
                raise SystemExit(
                    "Config file root dir '" + ek(os.path.dirname, sickbeard.CONFIG_FILE) + "' must be writeable.")

        ek(os.chdir, sickbeard.DATA_DIR)

        # Check if we need to perform a restore first
        restoreDir = ek(os.path.join, sickbeard.DATA_DIR, 'restore')
        if ek(os.path.exists, restoreDir):
            success = self.restoreDB(restoreDir, sickbeard.DATA_DIR)
            if self.consoleLogging:
                sys.stdout.write(u"Restore: restoring DB and config.ini %s!\n" % ("FAILED", "SUCCESSFUL")[success])

        # Load the config and publish it to the sickbeard package
        if self.consoleLogging and not ek(os.path.isfile, sickbeard.CONFIG_FILE):
            sys.stdout.write(u"Unable to find '" + sickbeard.CONFIG_FILE + "' , all settings will be default!" + "\n")

        sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE)

        # Initialize the config and our threads
        sickbeard.initialize(consoleLogging=self.consoleLogging)

        if self.runAsDaemon:
            self.daemonize()

        # Get PID
        sickbeard.PID = os.getpid()

        # Build from the DB to start with
        self.loadShowsFromDB()

        if self.forcedPort:
            logger.log(u"Forcing web server to port " + str(self.forcedPort))
            self.startPort = self.forcedPort
        else:
            self.startPort = sickbeard.WEB_PORT

        if sickbeard.WEB_LOG:
            self.log_dir = sickbeard.LOG_DIR
        else:
            self.log_dir = None

        # sickbeard.WEB_HOST is available as a configuration value in various
        # places but is not configurable. It is supported here for historic reasons.
        if sickbeard.WEB_HOST and sickbeard.WEB_HOST != '0.0.0.0':
            self.webhost = sickbeard.WEB_HOST
        else:
            if sickbeard.WEB_IPV6:
                self.webhost = '::'
            else:
                self.webhost = '0.0.0.0'

        # web server options
        self.web_options = {
            'port': int(self.startPort),
            'host': self.webhost,
            'data_root': ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME),
            'web_root': sickbeard.WEB_ROOT,
            'log_dir': self.log_dir,
            'username': sickbeard.WEB_USERNAME,
            'password': sickbeard.WEB_PASSWORD,
            'enable_https': sickbeard.ENABLE_HTTPS,
            'handle_reverse_proxy': sickbeard.HANDLE_REVERSE_PROXY,
            'https_cert': ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_CERT),
            'https_key': ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_KEY),
        }

        # start web server
        self.webserver = SRWebServer(self.web_options)
        self.webserver.start()

        if self.consoleLogging:
            print "Starting up SickRage " + sickbeard.BRANCH + " from " + sickbeard.CONFIG_FILE

        # Clean up after update
        if sickbeard.GIT_NEWVER:
            toclean = ek(os.path.join, sickbeard.CACHE_DIR, 'mako')
            for root, dirs, files in ek(os.walk, toclean, topdown=False):
                for name in files:
                    ek(os.remove, ek(os.path.join, root, name))
                for name in dirs:
                    ek(os.rmdir, ek(os.path.join, root, name))
            sickbeard.GIT_NEWVER = False

        # Fire up all our threads
        sickbeard.start()

        # Build internal name cache
        name_cache.buildNameCache()

        # Prepopulate network timezones, it isn't thread safe
        network_timezones.update_network_dict()

        # sure, why not?
        if sickbeard.USE_FAILED_DOWNLOADS:
            failed_history.trimHistory()

        # # Check for metadata indexer updates for shows (Disabled until we use api)
        # sickbeard.showUpdateScheduler.forceRun()

        # Launch browser
        if sickbeard.LAUNCH_BROWSER and not (self.noLaunch or self.runAsDaemon):
            sickbeard.launchBrowser('https' if sickbeard.ENABLE_HTTPS else 'http', self.startPort, sickbeard.WEB_ROOT)

        # main loop
        while True:
            time.sleep(1)
Example #56
0
    def scene_quality(name, anime=False):  # pylint: disable=too-many-branches, too-many-statements
        """
        Return The quality from the scene episode File

        :param name: Episode filename to analyse
        :param anime: Boolean to indicate if the show we're resolving is Anime
        :return: Quality
        """

        if not name:
            return Quality.UNKNOWN

        name = ek(path.basename, name)

        result = None
        ep = EpisodeTags(name)

        if anime:
            sd_options = tags.anime_sd.search(name)
            hd_options = tags.anime_hd.search(name)
            full_hd = tags.anime_fullhd.search(name)
            ep.rex[b'bluray'] = tags.anime_bluray

            # BluRay
            if ep.bluray and (full_hd or hd_options):
                result = Quality.FULLHDBLURAY if full_hd else Quality.HDBLURAY
            # HD TV
            elif not ep.bluray and (full_hd or hd_options):
                result = Quality.FULLHDTV if full_hd else Quality.HDTV
            # SD DVD
            elif ep.dvd:
                result = Quality.SDDVD
            # SD TV
            elif sd_options:
                result = Quality.SDTV

            return Quality.UNKNOWN if result is None else result

        # Is it UHD?
        if ep.vres in [2160, 4320] and ep.scan == 'p':
            # BluRay
            full_res = (ep.vres == 4320)
            if ep.avc and ep.bluray:
                result = Quality.UHD_4K_BLURAY if not full_res else Quality.UHD_8K_BLURAY
            # WEB-DL
            elif (ep.avc and ep.itunes) or ep.web:
                result = Quality.UHD_4K_WEBDL if not full_res else Quality.UHD_8K_WEBDL
            # HDTV
            elif ep.avc and ep.tv == 'hd':
                result = Quality.UHD_4K_TV if not full_res else Quality.UHD_8K_TV

        # Is it HD?
        elif ep.vres in [1080, 720]:
            if ep.scan == 'p':
                # BluRay
                full_res = (ep.vres == 1080)
                if ep.avc and (ep.bluray or ep.hddvd):
                    result = Quality.FULLHDBLURAY if full_res else Quality.HDBLURAY
                # WEB-DL
                elif (ep.avc and ep.itunes) or ep.web:
                    result = Quality.FULLHDWEBDL if full_res else Quality.HDWEBDL
                # HDTV
                elif ep.avc and ep.tv == 'hd':
                    result = Quality.FULLHDTV if full_res else Quality.HDTV  #1080 HDTV h264
                # MPEG2 encoded
                elif all([ep.vres == 1080, ep.tv == 'hd', ep.mpeg]):
                    result = Quality.RAWHDTV
                elif all([ep.vres == 720, ep.tv == 'hd', ep.mpeg]):
                    result = Quality.RAWHDTV
            elif (ep.res == '1080i') and ep.tv == 'hd' and (
                    ep.mpeg or (ep.raw and ep.avc_non_free)):
                result = Quality.RAWHDTV
        elif ep.hrws:
            result = Quality.HDTV

        # Is it SD?
        elif ep.xvid or ep.avc:
            # SD DVD
            if ep.dvd or ep.bluray:
                result = Quality.SDDVD
            # SDTV
            elif ep.res == '480p' or any([ep.tv, ep.sat, ep.web]):
                result = Quality.SDTV
        elif ep.dvd:
            # SD DVD
            result = Quality.SDDVD
        elif ep.tv:
            # SD TV/HD TV
            result = Quality.SDTV

        return Quality.UNKNOWN if result is None else result
Example #57
0
def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, delete_on=False, failed=False, proc_type="auto"):
    """
    Scans through the files in dirName and processes whatever media files it finds

    :param dirName: The folder name to look in
    :param nzbName: The NZB name which resulted in this folder being downloaded
    :param force: True to postprocess already postprocessed files
    :param failed: Boolean for whether or not the download failed
    :param proc_type: Type of postprocessing auto or manual
    """

    result = ProcessResult()

    # if they passed us a real dir then assume it's the one we want
    if ek(os.path.isdir, dirName):
        dirName = ek(os.path.realpath, dirName)
        result.output += logHelper(u"Processing folder %s" % dirName, logger.DEBUG)

    # if the client and SickRage are not on the same machine translate the directory into a network directory
    elif all([sickbeard.TV_DOWNLOAD_DIR,
              ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR),
              ek(os.path.normpath, dirName) == ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR)]):
        dirName = ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek(os.path.abspath, dirName).split(os.path.sep)[-1])
        result.output += logHelper(u"Trying to use folder: %s " % dirName, logger.DEBUG)

    # if we didn't find a real dir then quit
    if not ek(os.path.isdir, dirName):
        result.output += logHelper(u"Unable to figure out what folder to process. "
                                   u"If your downloader and SickRage aren't on the same PC "
                                   u"make sure you fill out your TV download dir in the config.",
                                   logger.DEBUG)
        return result.output

    path, dirs, files = get_path_dir_files(dirName, nzbName, proc_type)

    files = [x for x in files if not is_torrent_or_nzb_file(x)]
    SyncFiles = [x for x in files if is_sync_file(x)]
    nzbNameOriginal = nzbName

    # Don't post process if files are still being synced and option is activated
    postpone = SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES

    # Warn user if 'postpone if no subs' is enabled. Will debug possible user issues with PP
    if sickbeard.POSTPONE_IF_NO_SUBS:
        result.output += logHelper(u"Feature 'postpone postprocessing if no subtitle available' is enabled", logger.INFO)

    if not postpone:
        result.output += logHelper(u"PostProcessing Path: %s" % path, logger.INFO)
        result.output += logHelper(u"PostProcessing Dirs: %s" % str(dirs), logger.DEBUG)

        videoFiles = [x for x in files if helpers.isMediaFile(x)]
        rarFiles = [x for x in files if helpers.isRarFile(x)]
        rarContent = ""
        if rarFiles and not (sickbeard.POSTPONE_IF_NO_SUBS and videoFiles):
            # Unpack only if video file was not already extracted by 'postpone if no subs' feature
            rarContent = unRAR(path, rarFiles, force, result)
            files += rarContent
            videoFiles += [x for x in rarContent if helpers.isMediaFile(x)]
        videoInRar = [x for x in rarContent if helpers.isMediaFile(x)] if rarContent else ''

        result.output += logHelper(u"PostProcessing Files: %s" % files, logger.DEBUG)
        result.output += logHelper(u"PostProcessing VideoFiles: %s" % videoFiles, logger.DEBUG)
        result.output += logHelper(u"PostProcessing RarContent: %s" % rarContent, logger.DEBUG)
        result.output += logHelper(u"PostProcessing VideoInRar: %s" % videoInRar, logger.DEBUG)

        # If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten).
        nzbName = None if len(videoFiles) >= 2 else nzbName

        process_method = process_method if process_method else sickbeard.PROCESS_METHOD
        result.result = True

        # Don't Link media when the media is extracted from a rar in the same path
        if process_method in (u'hardlink', u'symlink') and videoInRar:
            process_media(path, videoInRar, nzbName, u'move', force, is_priority, result)
            delete_files(path, rarContent, result)
            for video in set(videoFiles) - set(videoInRar):
                process_media(path, [video], nzbName, process_method, force, is_priority, result)
        elif sickbeard.DELRARCONTENTS and videoInRar:
            process_media(path, videoInRar, nzbName, process_method, force, is_priority, result)
            delete_files(path, rarContent, result, True)
            for video in set(videoFiles) - set(videoInRar):
                process_media(path, [video], nzbName, process_method, force, is_priority, result)
        else:
            for video in videoFiles:
                process_media(path, [video], nzbName, process_method, force, is_priority, result)

    else:
        result.output += logHelper(u"Found temporary sync files: %s in path: %s" % (SyncFiles, path))
        result.output += logHelper(u"Skipping post processing for folder: %s" % path)
        result.missedfiles.append(u"%s : Syncfiles found" % path)

    # Process Video File in all TV Subdir
    for curDir in [x for x in dirs if validateDir(path, x, nzbNameOriginal, failed, result)]:
        result.result = True

        for processPath, _, fileList in ek(os.walk, ek(os.path.join, path, curDir), topdown=False):

            if not validateDir(path, processPath, nzbNameOriginal, failed, result):
                continue

            SyncFiles = [x for x in fileList if is_sync_file(x)]

            # Don't post process if files are still being synced and option is activated
            postpone = SyncFiles and sickbeard.POSTPONE_IF_SYNC_FILES

            if not postpone:
                videoFiles = [x for x in fileList if helpers.isMediaFile(x)]
                rarFiles = [x for x in fileList if helpers.isRarFile(x)]
                rarContent = ""
                if rarFiles and not (sickbeard.POSTPONE_IF_NO_SUBS and videoFiles):
                    # Unpack only if video file was not already extracted by 'postpone if no subs' feature
                    rarContent = unRAR(processPath, rarFiles, force, result)
                    fileList = set(fileList + rarContent)
                    videoFiles += [x for x in rarContent if helpers.isMediaFile(x)]

                videoInRar = [x for x in rarContent if helpers.isMediaFile(x)] if rarContent else ''
                notwantedFiles = [x for x in fileList if x not in videoFiles]
                if notwantedFiles:
                    result.output += logHelper(u"Found unwanted files: %s" % notwantedFiles, logger.DEBUG)

                # Don't Link media when the media is extracted from a rar in the same path
                if process_method in (u'hardlink', u'symlink') and videoInRar:
                    process_media(processPath, videoInRar, nzbName, u'move', force, is_priority, result)
                    process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force,
                                  is_priority, result)
                    delete_files(processPath, rarContent, result)
                elif sickbeard.DELRARCONTENTS and videoInRar:
                    process_media(processPath, videoInRar, nzbName, process_method, force, is_priority, result)
                    process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force,
                                  is_priority, result)
                    delete_files(processPath, rarContent, result, True)
                else:
                    process_media(processPath, videoFiles, nzbName, process_method, force, is_priority, result)

                    # Delete all file not needed and avoid deleting files if Manual PostProcessing
                    if not(process_method == u"move" and result.result) or (proc_type == u"manual" and not delete_on):
                        continue

                    delete_folder(ek(os.path.join, processPath, u'@eaDir'))
                    delete_files(processPath, notwantedFiles, result)

                    if all([not sickbeard.NO_DELETE or proc_type == u"manual",
                            process_method == u"move",
                            ek(os.path.normpath, processPath) != ek(os.path.normpath, sickbeard.TV_DOWNLOAD_DIR)]):

                        if delete_folder(processPath, check_empty=True):
                            result.output += logHelper(u"Deleted folder: %s" % processPath, logger.DEBUG)

            else:
                result.output += logHelper(u"Found temporary sync files: %s in path: %s" % (SyncFiles, processPath))
                result.output += logHelper(u"Skipping post processing for folder: %s" % processPath)
                result.missedfiles.append(u"%s : Syncfiles found" % path)

    if result.aggresult:
        result.output += logHelper(u"Successfully processed")
        if result.missedfiles:
            result.output += logHelper(u"I did encounter some unprocessable items: ")
            for missedfile in result.missedfiles:
                result.output += logHelper(u"[%s]" % missedfile)
    else:
        result.output += logHelper(u"Problem(s) during processing, failed the following files/folders:  ", logger.WARNING)
        for missedfile in result.missedfiles:
            result.output += logHelper(u"[%s]" % missedfile, logger.WARNING)

    return result.output
Example #58
0
    def run(self, force=False):

        self.amActive = True
        if not sickbeard.USE_SUBTITLES:
            return

        if len(sickbeard.subtitles.getEnabledServiceList()) < 1:
            logger.log(
                u'Not enough services selected. At least 1 service is required to search subtitles in the background',
                logger.WARNING)
            return

        logger.log(u'Checking for subtitles', logger.INFO)

        # get episodes on which we want subtitles
        # criteria is:
        #  - show subtitles = 1
        #  - episode subtitles != config wanted languages or 'und' (depends on config multi)
        #  - search count < 2 and diff(airdate, now) > 1 week : now -> 1d
        #  - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d

        today = datetime.date.today().toordinal()

        # you have 5 minutes to understand that one. Good luck
        myDB = db.DBConnection()

        sqlResults = myDB.select(
            'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, '
            +
            'e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff '
            +
            'FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) '
            + 'WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) ' +
            'AND (e.subtitles_searchcount <= 2 OR (e.subtitles_searchcount <= 7 AND airdate_daydiff <= 7)) '
            + 'AND e.location != ""', [today, wantedLanguages(True)])

        if len(sqlResults) == 0:
            logger.log('No subtitles to download', logger.INFO)
            return

        rules = self._getRules()
        now = datetime.datetime.now()
        for epToSub in sqlResults:

            if not ek(os.path.isfile, epToSub['location']):
                logger.log(
                    'Episode file does not exist, cannot download subtitles for episode %dx%d of show %s'
                    % (epToSub['season'], epToSub['episode'],
                       epToSub['show_name']), logger.DEBUG)
                continue

            # Old shows rule
            throwaway = datetime.datetime.strptime('20110101', '%Y%m%d')
            if ((epToSub['airdate_daydiff'] > 7 and epToSub['searchcount'] < 2
                 and now - datetime.datetime.strptime(epToSub['lastsearch'],
                                                      dateTimeFormat) >
                 datetime.timedelta(hours=rules['old'][epToSub['searchcount']])
                 ) or
                    # Recent shows rule
                (epToSub['airdate_daydiff'] <= 7 and epToSub['searchcount'] < 7
                 and now - datetime.datetime.strptime(epToSub['lastsearch'],
                                                      dateTimeFormat) >
                 datetime.timedelta(hours=rules['new'][epToSub['searchcount']])
                 )):

                logger.log(
                    'Downloading subtitles for episode %dx%d of show %s' %
                    (epToSub['season'], epToSub['episode'],
                     epToSub['show_name']), logger.DEBUG)

                showObj = sickbeard.helpers.findCertainShow(
                    sickbeard.showList, int(epToSub['showid']))
                if not showObj:
                    logger.log(u'Show not found', logger.DEBUG)
                    return

                epObj = showObj.getEpisode(int(epToSub["season"]),
                                           int(epToSub["episode"]))
                if isinstance(epObj, str):
                    logger.log(u'Episode not found', logger.DEBUG)
                    return

                existing_subtitles = epObj.subtitles

                try:
                    epObj.downloadSubtitles()
                except Exception as e:
                    logger.log(u'Unable to find subtitles', logger.DEBUG)
                    logger.log(str(e), logger.DEBUG)
                    return

                newSubtitles = frozenset(
                    epObj.subtitles).difference(existing_subtitles)
                if newSubtitles:
                    logger.log(u'Downloaded subtitles for S%02dE%02d in %s' %
                               (epToSub["season"], epToSub["episode"],
                                ', '.join(newSubtitles)))

        self.amActive = False
Example #59
0
def process_media(processPath, videoFiles, nzbName, process_method, force,
                  is_priority, result):  # pylint: disable=too-many-arguments
    """
    Postprocess mediafiles

    :param processPath: Path to postprocess in
    :param videoFiles: Filenames to look for and postprocess
    :param nzbName: Name of NZB file related
    :param process_method: auto/manual
    :param force: Postprocess currently postprocessing file
    :param is_priority: Boolean, is this a priority download
    :param result: Previous results
    """

    processor = None
    for cur_video_file in videoFiles:
        cur_video_file_path = ek(os.path.join, processPath, cur_video_file)

        if already_postprocessed(processPath, cur_video_file, force, result):
            result.output += logHelper(
                u"Skipping already processed file: %s" % cur_video_file,
                logger.DEBUG)
            continue

        try:
            processor = postProcessor.PostProcessor(cur_video_file_path,
                                                    nzbName, process_method,
                                                    is_priority)

            # This feature prevents PP for files that do not have subtitle associated with the video file
            if sickbeard.POSTPONE_IF_NO_SUBS:
                associatedFiles = processor.list_associated_files(
                    cur_video_file_path, subtitles_only=True)
                if not [
                        associatedFile for associatedFile in associatedFiles
                        if associatedFile[-3:] in subtitle_extensions
                ]:
                    result.output += logHelper(
                        u"No subtitles associated. Postponing the post-process of this file: %s"
                        % cur_video_file, logger.DEBUG)
                    continue
                else:
                    result.output += logHelper(
                        u"Found subtitles associated. Continuing the post-process of this file: %s"
                        % cur_video_file)

            result.result = processor.process()
            process_fail_message = u""
        except EpisodePostProcessingFailedException as e:
            result.result = False
            process_fail_message = ex(e)

        if processor:
            result.output += processor.log

        if result.result:
            result.output += logHelper(u"Processing succeeded for %s" %
                                       cur_video_file_path)
        else:
            result.output += logHelper(
                u"Processing failed for %s: %s" %
                (cur_video_file_path, process_fail_message), logger.WARNING)
            result.missedfiles.append(
                u"%s : Processing failed: %s" %
                (cur_video_file_path, process_fail_message))
            result.aggresult = False
Example #60
0
def validateDir(path, dirName, nzbNameOriginal, failed, result):  # pylint: disable=too-many-locals,too-many-branches,too-many-return-statements
    """
    Check if directory is valid for processing

    :param path: Path to use
    :param dirName: Directory to check
    :param nzbNameOriginal: Original NZB name
    :param failed: Previously failed objects
    :param result: Previous results
    :return: True if dir is valid for processing, False if not
    """

    dirName = ss(dirName)

    IGNORED_FOLDERS = [u'.AppleDouble', u'.@__thumb', u'@eaDir']
    folder_name = ek(os.path.basename, dirName)
    if folder_name in IGNORED_FOLDERS:
        return False

    result.output += logHelper(u"Processing folder " + dirName, logger.DEBUG)

    if folder_name.startswith(u'_FAILED_'):
        result.output += logHelper(u"The directory name indicates it failed to extract.", logger.DEBUG)
        failed = True
    elif folder_name.startswith(u'_UNDERSIZED_'):
        result.output += logHelper(u"The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG)
        failed = True
    elif folder_name.upper().startswith(u'_UNPACK'):
        result.output += logHelper(u"The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG)
        result.missedfiles.append(u"%s : Being unpacked" % dirName)
        return False

    if failed:
        process_failed(ek(os.path.join, path, dirName), nzbNameOriginal, result)
        result.missedfiles.append(u"%s : Failed download" % dirName)
        return False

    if helpers.is_hidden_folder(ek(os.path.join, path, dirName)):
        result.output += logHelper(u"Ignoring hidden folder: %s" % dirName, logger.DEBUG)
        result.missedfiles.append(u"%s : Hidden folder" % dirName)
        return False

    # make sure the dir isn't inside a show dir
    main_db_con = db.DBConnection()
    sql_results = main_db_con.select("SELECT location FROM tv_shows")

    for sqlShow in sql_results:
        if dirName.lower().startswith(ek(os.path.realpath, sqlShow["location"]).lower() + os.sep) or \
                dirName.lower() == ek(os.path.realpath, sqlShow["location"]).lower():

            result.output += logHelper(
                u"Cannot process an episode that's already been moved to its show dir, skipping " + dirName,
                logger.WARNING)
            return False

    # Get the videofile list for the next checks
    allFiles = []
    allDirs = []
    for _, processdir, fileList in ek(os.walk, ek(os.path.join, path, dirName), topdown=False):
        allDirs += processdir
        allFiles += fileList

    videoFiles = [x for x in allFiles if helpers.isMediaFile(x)]
    allDirs.append(dirName)

    # check if the dir have at least one tv video file
    for video in videoFiles:
        try:
            NameParser().parse(video, cache_result=False)
            return True
        except (InvalidNameException, InvalidShowException) as error:
            result.output += logHelper(u"{}".format(error), logger.DEBUG)

    for proc_dir in allDirs:
        try:
            NameParser().parse(proc_dir, cache_result=False)
            return True
        except (InvalidNameException, InvalidShowException) as error:
            result.output += logHelper(u"{}".format(error), logger.DEBUG)

    if sickbeard.UNPACK:
        # Search for packed release
        packedFiles = [x for x in allFiles if helpers.isRarFile(x)]

        for packed in packedFiles:
            try:
                NameParser().parse(packed, cache_result=False)
                return True
            except (InvalidNameException, InvalidShowException) as error:
                result.output += logHelper(u"{}".format(error), logger.DEBUG)

    result.output += logHelper(u"%s : No processable items found in folder" % dirName, logger.DEBUG)
    return False