예제 #1
0
def change_unpack_dir(unpack_dir):
    """
    Change UNPACK directory (used by postprocessor)

    :param unpack_dir: New unpack directory
    :return: True on success, False on failure
    """
    if unpack_dir == '':
        sickbeard.UNPACK_DIR = ''
        return True

    if ek(os.path.normpath, sickbeard.UNPACK_DIR) != ek(os.path.normpath, unpack_dir):
        if bool(sickbeard.ROOT_DIRS) and \
                any(map(lambda rd: helpers.is_subdirectory(unpack_dir, rd), sickbeard.ROOT_DIRS.split('|')[1:])):
            # don't change if it's in any of the TV root directories
            logger.log("Unable to change unpack directory to a sub-directory of a TV root dir")
            return False

        if helpers.makeDir(unpack_dir):
            sickbeard.UNPACK_DIR = ek(os.path.normpath, unpack_dir)
            logger.log("Changed unpack directory to " + unpack_dir)
        else:
            logger.log("Unable to create unpack directory " + ek(os.path.normpath, unpack_dir) + ", dir not changed.")
            return False

    return True
예제 #2
0
파일: imdb.py 프로젝트: murbaniak/SickRage
    def fetch_popular_shows(self):
        """Get popular show information from IMDB"""

        popular_shows = []

        data = helpers.getURL(self.url, session=self.session, params=self.params, headers={'Referer': 'http://akas.imdb.com/'}, returns='text')
        if not data:
            return None

        soup = BeautifulSoup(data, 'html5lib')
        results = soup.find("table", {"class": "results"})
        rows = results("tr")

        for row in rows:
            show = {}
            image_td = row.find("td", {"class": "image"})

            if image_td:
                image = image_td.find("img")
                show['image_url_large'] = self.change_size(image['src'], 3)
                show['image_path'] = ek(posixpath.join, 'images', 'imdb_popular', ek(os.path.basename, show['image_url_large']))

                self.cache_image(show['image_url_large'])

            td = row.find("td", {"class": "title"})

            if td:
                show['name'] = td.find("a").contents[0]
                show['imdb_url'] = "http://akas.imdb.com" + td.find("a")["href"]
                show['imdb_tt'] = show['imdb_url'][-10:][0:9]
                show['year'] = td.find("span", {"class": "year_type"}).contents[0].split(" ")[0][1:]

                rating_all = td.find("div", {"class": "user_rating"})
                if rating_all:
                    rating_string = rating_all.find("div", {"class": "rating rating-list"})
                    if rating_string:
                        rating_string = rating_string['title']

                        match = re.search(r".* (.*)\/10.*\((.*)\).*", rating_string)
                        if match:
                            matches = match.groups()
                            show['rating'] = matches[0]
                            show['votes'] = matches[1]
                        else:
                            show['rating'] = None
                            show['votes'] = None
                else:
                    show['rating'] = None
                    show['votes'] = None

                outline = td.find("span", {"class": "outline"})
                if outline:
                    show['outline'] = outline.contents[0]
                else:
                    show['outline'] = ''

                popular_shows.append(show)

        return popular_shows
예제 #3
0
def log_data(min_level, log_filter, log_search, max_lines):
    regex = r"^(\d\d\d\d)\-(\d\d)\-(\d\d)\s*(\d\d)\:(\d\d):(\d\d)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$"
    if log_filter not in LOG_FILTERS:
        log_filter = '<NONE>'

    final_data = []

    log_files = []
    if ek(os.path.isfile, Wrapper.instance.log_file):
        log_files.append(Wrapper.instance.log_file)

        for i in range(1, int(sickbeard.LOG_NR)):
            name = Wrapper.instance.log_file + "." + str(i)
            if not ek(os.path.isfile, name):
                break
            log_files.append(name)
    else:
        return final_data

    data = []
    for _log_file in log_files:
        if len(data) < max_lines:
            with io.open(_log_file, 'r', encoding='utf-8') as f:
                data += [line.strip() + '\n' for line in reversed(f.readlines()) if line.strip()]
        else:
            break

    found_lines = 0
    for x in data:
        match = re.match(regex, x)

        if match:
            level = match.group(7)
            log_name = match.group(8)

            if not sickbeard.DEBUG and level == 'DEBUG':
                continue

            if not sickbeard.DBDEBUG and level == 'DB':
                continue

            if level not in LOGGING_LEVELS:
                final_data.append('AA ' + x)
                found_lines += 1
            elif log_search and log_search.lower() in x.lower():
                final_data.append(x)
                found_lines += 1
            elif not log_search and LOGGING_LEVELS[level] >= int(min_level) and (log_filter == '<NONE>' or log_name.startswith(log_filter)):
                final_data.append(x)
                found_lines += 1
        else:
            final_data.append('AA ' + x)
            found_lines += 1

        if found_lines >= max_lines:
            break

    return final_data
예제 #4
0
    def fill_cache(self, show_obj):
        """
        Caches all images for the given show. Copies them from the show dir if possible, or
        downloads them from indexer if they aren't in the show dir.

        :param show_obj: TVShow object to cache images for
        """

        logger.log("Checking if we need any cache images for show " + str(show_obj.indexerid), logger.DEBUG)

        # check if the images are already cached or not
        need_images = {self.POSTER: not self.has_poster(show_obj.indexerid),
                       self.BANNER: not self.has_banner(show_obj.indexerid),
                       self.POSTER_THUMB: not self.has_poster_thumbnail(show_obj.indexerid),
                       self.BANNER_THUMB: not self.has_banner_thumbnail(show_obj.indexerid),
                       self.FANART: not self.has_fanart(show_obj.indexerid)}

        if not need_images[self.POSTER] and not need_images[self.BANNER] and not need_images[self.POSTER_THUMB] and not need_images[self.BANNER_THUMB] and not need_images[self.FANART]:
            logger.log("No new cache images needed, not retrieving new ones", logger.DEBUG)
            return

        # check the show dir for poster or banner images and use them
        if need_images[self.POSTER] or need_images[self.BANNER] or need_images[self.FANART]:
            try:
                for cur_provider in sickbeard.metadata_provider_dict.values():
                    logger.log("Checking if we can use the show image from the " + cur_provider.name + " metadata",
                               logger.DEBUG)
                    if ek(os.path.isfile, cur_provider.get_poster_path(show_obj)):
                        cur_file_name = ek(os.path.abspath, cur_provider.get_poster_path(show_obj))
                        cur_file_type = self.which_type(cur_file_name)

                        if cur_file_type is None:
                            logger.log("Unable to retrieve image type, not using the image from " + str(cur_file_name),
                                       logger.WARNING)
                            continue

                        logger.log("Checking if image " + cur_file_name + " (type " + str(
                            cur_file_type) + " needs metadata: " + str(need_images[cur_file_type]), logger.DEBUG)

                        if cur_file_type in need_images and need_images[cur_file_type]:
                            logger.log(
                                "Found an image in the show dir that doesn't exist in the cache, caching it: " + cur_file_name + ", type " + str(
                                    cur_file_type), logger.DEBUG)
                            self._cache_image_from_file(cur_file_name, cur_file_type, show_obj.indexerid)
                            need_images[cur_file_type] = False
            except ShowDirectoryNotFoundException:
                logger.log("Unable to search for images in show dir because it doesn't exist", logger.WARNING)

        # download from indexer for missing ones
        for cur_image_type in [self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB, self.FANART]:
            logger.log("Seeing if we still need an image of type " + str(cur_image_type) + ": " + str(
                need_images[cur_image_type]), logger.DEBUG)
            if cur_image_type in need_images and need_images[cur_image_type]:
                self._cache_image_from_indexer(show_obj, cur_image_type)

        logger.log("Done cache check")
예제 #5
0
 def makeObject(self, cmd_arg, cur_path):
     if sickbeard.USE_SYNOINDEX:
         synoindex_cmd = ['/usr/syno/bin/synoindex', cmd_arg, ek(os.path.abspath, cur_path)]
         logger.log("Executing command " + str(synoindex_cmd), logger.DEBUG)
         logger.log("Absolute path to command: " + ek(os.path.abspath, synoindex_cmd[0]), logger.DEBUG)
         try:
             p = subprocess.Popen(synoindex_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                                  cwd=sickbeard.PROG_DIR)
             out, err = p.communicate()  # @UnusedVariable
             logger.log("Script result: " + str(out), logger.DEBUG)
         except OSError as e:
             logger.log("Unable to run synoindex: " + ex(e), logger.ERROR)
예제 #6
0
    def retrieveShowMetadata(self, folder):
        """
        Used only when mass adding Existing Shows, using previously generated Show metadata to reduce the need to query TVDB.
        """

        empty_return = (None, None, None)

        assert isinstance(folder, six.text_type)

        metadata_path = ek(os.path.join, folder, self._show_metadata_filename)

        if not ek(os.path.isdir, folder) or not ek(os.path.isfile, metadata_path):
            logger.log("Can't load the metadata file from " + metadata_path + ", it doesn't exist", logger.DEBUG)
            return empty_return

        logger.log("Loading show info from metadata file in " + metadata_path, logger.DEBUG)

        try:
            with io.open(metadata_path, 'rb') as xmlFileObj:
                showXML = etree.ElementTree(file=xmlFileObj)

            if showXML.findtext('title') is None or (showXML.findtext('tvdbid') is None and showXML.findtext('id') is None):
                logger.log("Invalid info in tvshow.nfo (missing name or id): {0} {1} {2}".format(showXML.findtext('title'), showXML.findtext('tvdbid'), showXML.findtext('id')))
                return empty_return

            name = showXML.findtext('title')

            indexer_id_text = showXML.findtext('tvdbid') or showXML.findtext('id')
            if indexer_id_text:
                indexer_id = try_int(indexer_id_text, None)
                if indexer_id is None or indexer_id < 1:
                    logger.log("Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file", logger.DEBUG)
                    return empty_return
            else:
                logger.log("Empty <id> or <tvdbid> field in NFO, unable to find a ID, not using metadata file", logger.DEBUG)
                return empty_return

            indexer = 1
            epg_url_text = showXML.findtext('episodeguide/url')
            if epg_url_text:
                epg_url = epg_url_text.lower()
                if str(indexer_id) in epg_url and 'tvrage' in epg_url:
                    logger.log("Invalid Indexer ID (" + str(indexer_id) + "), not using metadata file because it has TVRage info", logger.WARNING)
                    return empty_return

        except Exception as e:
            logger.log(
                "There was an error parsing your existing metadata file: '" + metadata_path + "' error: " + ex(e),
                logger.WARNING)
            return empty_return

        return indexer_id, name, indexer
예제 #7
0
    def remove_pid_file(pid_file):
        """
        Remove pid file

        :param pid_file: to remove
        :return:
        """
        try:
            if ek(os.path.exists, pid_file):
                ek(os.remove, pid_file)
        except EnvironmentError:
            return False

        return True
예제 #8
0
 def has_banner_thumbnail(self, indexer_id):
     """
     Returns true if a cached banner exists for the given Indexer ID
     """
     banner_thumb_path = self.banner_thumb_path(indexer_id)
     logger.log("Checking if file " + str(banner_thumb_path) + " exists", logger.DEBUG)
     return ek(os.path.isfile, banner_thumb_path)
예제 #9
0
 def _check_exists(location):
     if location:
         assert isinstance(location, six.text_type)
         result = ek(os.path.isfile, location)
         logger.log("Checking if " + location + " exists: " + str(result), logger.DEBUG)
         return result
     return False
예제 #10
0
 def has_poster(self, indexer_id):
     """
     Returns true if a cached poster exists for the given Indexer ID
     """
     poster_path = self.poster_path(indexer_id)
     logger.log("Checking if file " + str(poster_path) + " exists", logger.DEBUG)
     return ek(os.path.isfile, poster_path)
예제 #11
0
    def addDefaultShow(indexer, indexer_id, name, status):
        """
        Adds a new show with the default settings
        """
        if not Show.find(sickbeard.showList, int(indexer_id)):
            logger.log("Adding show " + str(indexer_id))
            root_dirs = sickbeard.ROOT_DIRS.split('|')

            try:
                location = root_dirs[int(root_dirs[0]) + 1]
            except Exception:
                location = None

            if location:
                showPath = ek(os.path.join, location, sanitize_filename(name))
                dir_exists = helpers.makeDir(showPath)

                if not dir_exists:
                    logger.log("Unable to create the folder {0} , can't add the show".format(showPath), logger.WARNING)
                    return
                else:
                    helpers.chmodAsParent(showPath)

                sickbeard.showQueueScheduler.action.add_show(int(indexer), int(indexer_id), showPath,
                                                             default_status=status,
                                                             quality=int(sickbeard.QUALITY_DEFAULT),
                                                             season_folders=int(sickbeard.SEASON_FOLDERS_DEFAULT),
                                                             paused=sickbeard.TRAKT_START_PAUSED,
                                                             default_status_after=status)
            else:
                logger.log("There was an error creating the show, no root directory setting found", logger.WARNING)
                return
예제 #12
0
    def get_media_path(self):
        show = self.get_show()

        if show:
            return ek(join, self.get_media_root(), 'images', 'network', show.network_logo_name + '.png')

        return ''
예제 #13
0
    def get_episode_thumb_path(ep_obj):
        """
        Returns a full show dir/metadata/episode.jpg path for MediaBrowser
        episode thumbs.

        ep_obj: a TVEpisode object to get the path from
        """

        if ek(os.path.isfile, ep_obj.location):
            tbn_file_name = replace_extension(ek(os.path.basename, ep_obj.location), 'jpg')
            metadata_dir_name = ek(os.path.join, ek(os.path.dirname, ep_obj.location), 'metadata')
            tbn_file_path = ek(os.path.join, metadata_dir_name, tbn_file_name)
        else:
            return None

        return tbn_file_path
예제 #14
0
 def has_fanart(self, indexer_id):
     """
     Returns true if a cached fanart exists for the given Indexer ID
     """
     fanart_path = self.fanart_path(indexer_id)
     logger.log("Checking if file " + str(fanart_path) + " exists", logger.DEBUG)
     return ek(os.path.isfile, fanart_path)
예제 #15
0
    def qualityFromFileMeta(filename):  # pylint: disable=too-many-branches
        """
        Get quality file file metadata

        :param filename: Filename to analyse
        :return: Quality prefix
        """

        height = video_screen_size(filename)[1]
        if not height:
            return Quality.UNKNOWN

        base_filename = ek(path.basename, filename)
        bluray = re.search(r"blue?-?ray|hddvd|b[rd](rip|mux)", base_filename, re.I) is not None
        webdl = re.search(r"web.?dl|web(rip|mux|hd)", base_filename, re.I) is not None

        ret = Quality.UNKNOWN
        if 3240 < height:
            ret = ((Quality.UHD_8K_TV, Quality.UHD_8K_BLURAY)[bluray], Quality.UHD_8K_WEBDL)[webdl]
        if 1620 < height <= 3240:
            ret = ((Quality.UHD_4K_TV, Quality.UHD_4K_BLURAY)[bluray], Quality.UHD_4K_WEBDL)[webdl]
        elif 800 < height <= 1620:
            ret = ((Quality.FULLHDTV, Quality.FULLHDBLURAY)[bluray], Quality.FULLHDWEBDL)[webdl]
        elif 680 < height <= 800:
            ret = ((Quality.HDTV, Quality.HDBLURAY)[bluray], Quality.HDWEBDL)[webdl]
        elif height <= 680:
            ret = (Quality.SDTV, Quality.SDDVD)[re.search(r'dvd|b[rd]rip|blue?-?ray', base_filename, re.I) is not None]

        return ret
예제 #16
0
    def fetch_popular_shows(self):
        """Get popular show information from IMDB"""

        popular_shows = []

        data = helpers.getURL(self.url, session=self.session, params=self.params, headers={'Referer': 'http://akas.imdb.com/'}, returns='text')
        if not data:
            return None

        soup = BeautifulSoup(data, 'html5lib')
        results = soup.find_all("div", {"class": "lister-item"})

        for row in results:
            show = {}
            image_div = row.find("div", {"class": "lister-item-image"})
            if image_div:
                image = image_div.find("img")
                show['image_url_large'] = self.change_size(image['loadlate'], 3)
                show['imdb_tt'] = image['data-tconst']
                show['image_path'] = ek(posixpath.join, 'images', 'imdb_popular', ek(os.path.basename, show['image_url_large']))
                self.cache_image(show['image_url_large'])

            content = row.find("div", {"class": "lister-item-content"})
            if content:
                header = row.find("h3", {"class": "lister-item-header"})
                if header:
                    a_tag = header.find("a")
                    if a_tag:
                        show['name'] = a_tag.get_text(strip=True)
                        show['imdb_url'] = "http://www.imdb.com" + a_tag["href"]
                        show['year'] = header.find("span", {"class": "lister-item-year"}).contents[0].split(" ")[0][1:].strip("-")

                imdb_rating = row.find("div", {"class": "ratings-imdb-rating"})
                show['rating'] = imdb_rating['data-value'] if imdb_rating else None

                votes = row.find("span", {"name": "nv"})
                show['votes'] = votes['data-value'] if votes else None

                outline = content.find_all("p", {"class": "text-muted"})
                if outline and len(outline) >= 2:
                    show['outline'] = outline[1].contents[0].strip("\"")
                else:
                    show['outline'] = ''

                popular_shows.append(show)

        return popular_shows
예제 #17
0
    def get_episode_file_path(self, ep_obj):
        """
        Returns a full show dir/metadata/episode.xml path for MediaBrowser
        episode metadata files

        ep_obj: a TVEpisode object to get the path for
        """

        if ek(os.path.isfile, ep_obj.location):
            xml_file_name = replace_extension(ek(os.path.basename, ep_obj.location), self._ep_nfo_extension)
            metadata_dir_name = ek(os.path.join, ek(os.path.dirname, ep_obj.location), 'metadata')
            xml_file_path = ek(os.path.join, metadata_dir_name, xml_file_name)
        else:
            logger.log("Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG)
            return ''

        return xml_file_path
예제 #18
0
    def api_params(self):
        if self.indexerID:
            if sickbeard.CACHE_DIR:
                indexerConfig[self.indexerID]['api_params']['cache'] = ek(os.path.join, sickbeard.CACHE_DIR, 'indexers', self.name)
            if sickbeard.PROXY_SETTING and sickbeard.PROXY_INDEXERS:
                indexerConfig[self.indexerID]['api_params']['proxy'] = sickbeard.PROXY_SETTING

            return indexerConfig[self.indexerID]['api_params']
예제 #19
0
    def change_size(image_url, factor=3):
        match = re.search("^(.*)V1_(.{2})(.*?)_(.{2})(.*?),(.*?),(.*?),(.\d?)_(.*?)_.jpg$", image_url)

        if match:
            matches = match.groups()
            ek(os.path.basename, image_url)
            matches = list(matches)
            matches[2] = int(matches[2]) * factor
            matches[4] = int(matches[4]) * factor
            matches[5] = int(matches[5]) * factor
            matches[6] = int(matches[6]) * factor
            matches[7] = int(matches[7]) * factor

            return "{0}V1._{1}{2}_{3}{4},{5},{6},{7}_.jpg".format(matches[0], matches[1], matches[2], matches[3], matches[4],
                                                      matches[5], matches[6], matches[7])
        else:
            return image_url
예제 #20
0
def getFileList(path, includeFiles, fileTypes):
    # prune out directories to protect the user from doing stupid things (already lower case the dir to reduce calls)
    hide_list = ['boot', 'bootmgr', 'cache', 'config.msi', 'msocache', 'recovery', '$recycle.bin',
                 'recycler', 'system volume information', 'temporary internet files']  # windows specific
    hide_list += ['.fseventd', '.spotlight', '.trashes', '.vol', 'cachedmessages', 'caches', 'trash']  # osx specific
    hide_list += ['.git']

    file_list = []
    dir_list = []
    for filename in ek(os.listdir, path):
        if filename.lower() in hide_list:
            continue

        full_filename = ek(os.path.join, path, filename)
        is_file = ek(os.path.isfile, full_filename)

        if not includeFiles and is_file:
            continue

        is_image = False
        allowed_type = True
        if is_file and fileTypes:
            if 'images' in fileTypes:
                is_image = filename.endswith(('jpg', 'jpeg', 'png', 'tiff', 'gif'))
            allowed_type = filename.endswith(tuple(fileTypes)) or is_image

            if not allowed_type:
                continue

        item_to_add = {
            'name': filename,
            'path': full_filename,
            'isFile': is_file,
            'isImage': is_image,
            'isAllowed': allowed_type
        }

        if is_file:
            file_list.append(item_to_add)
        else:
            dir_list.append(item_to_add)

    # Sort folders first, alphabetically, case insensitive
    dir_list.sort(key=lambda mbr: itemgetter('name')(mbr).lower())
    file_list.sort(key=lambda mbr: itemgetter('name')(mbr).lower())
    return dir_list + file_list
예제 #21
0
def delete_files(process_path, unwanted_files, result, force=False):
    """
    Remove files from filesystem

    :param process_path: path to process
    :param unwanted_files: files we do not want
    :param result: Processor results
    :param force: Boolean, force deletion, defaults to false
    """
    if not result.result and force:
        result.output += log_helper("Forcing deletion of files, even though last result was not success", logger.DEBUG)
    elif not result.result:
        return

    # Delete all file not needed
    for cur_file in unwanted_files:
        cur_file_path = ek(os.path.join, process_path, cur_file)
        if not ek(os.path.isfile, cur_file_path):
            continue  # Prevent error when a notwantedfiles is an associated files

        result.output += log_helper("Deleting file: {0}".format(cur_file), logger.DEBUG)

        # check first the read-only attribute
        file_attribute = ek(os.stat, cur_file_path)[0]
        if not file_attribute & stat.S_IWRITE:
            # File is read-only, so make it writeable
            result.output += log_helper("Changing ReadOnly Flag for file: {0}".format(cur_file), logger.DEBUG)
            try:
                ek(os.chmod, cur_file_path, stat.S_IWRITE)
            except OSError as e:
                result.output += log_helper("Cannot change permissions of {0}: {1}".format(cur_file_path, ex(e)), logger.DEBUG)
        try:
            ek(os.remove, cur_file_path)
        except OSError as e:
            result.output += log_helper("Unable to delete file {0}: {1}".format(cur_file, e.strerror), logger.DEBUG)
예제 #22
0
    def banner_thumb_path(self, indexer_id):
        """
        Builds up the path to a banner thumb cache for a given Indexer ID

        :param indexer_id: ID of the show to use in the file name
        :return: a full path to the cached banner thumb file for the given Indexer ID
        """
        bannerthumb_file_name = str(indexer_id) + '.banner.jpg'
        return ek(os.path.join, self._thumbnails_dir(), bannerthumb_file_name)
예제 #23
0
    def poster_path(self, indexer_id):
        """
        Builds up the path to a poster cache for a given Indexer ID

        :param indexer_id: ID of the show to use in the file name
        :return: a full path to the cached poster file for the given Indexer ID
        """
        poster_file_name = str(indexer_id) + '.poster.jpg'
        return ek(os.path.join, self._cache_dir(), poster_file_name)
예제 #24
0
    def fanart_path(self, indexer_id):
        """
        Builds up the path to a fanart cache for a given Indexer ID

        :param indexer_id: ID of the show to use in the file name
        :return: a full path to the cached fanart file for the given Indexer ID
        """
        fanart_file_name = str(indexer_id) + '.fanart.jpg'
        return ek(os.path.join, self._cache_dir(), fanart_file_name)
예제 #25
0
 def _notify(self, title, message, force=False):
     if self.notify_initialized and sickbeard.USE_LIBNOTIFY | force:
         icon = ek(os.path.join, sickbeard.PROG_DIR, 'gui', 'slick', 'images', 'ico', 'favicon-120.png')
         # noinspection PyBroadException
         try:
             n = Notify.Notification.new(title, message, icon)
             return n.show()
         except Exception:
             return False
예제 #26
0
    def force_update():
        """
        Forces SickChill to update to the latest version and exit.

        :return: True if successful, False otherwise
        """

        def update_with_git():
            def run_git(updater, cmd):
                stdout_, stderr_, exit_status = updater._run_git(updater._git_path, cmd)
                if not exit_status == 0:
                    print('Failed to run command: {0} {1}'.format(updater._git_path, cmd))
                    return False
                else:
                    return True

            updater = GitUpdateManager()
            if not run_git(updater, 'config remote.origin.url https://github.com/SickChill/SickChill.git'):
                return False
            if not run_git(updater, 'fetch origin --prune'):
                return False
            if not run_git(updater, 'checkout master'):
                return False
            if not run_git(updater, 'reset --hard origin/master'):
                return False

            return True

        if ek(os.path.isdir, ek(os.path.join, sickbeard.PROG_DIR, '.git')):  # update with git
            print('Forcing SickChill to update using git...')
            result = update_with_git()
            if result:
                print('Successfully updated to latest commit. You may now run SickChill normally.')
                return True
            else:
                print('Error while trying to force an update using git.')

        print('Forcing SickChill to update using source...')
        if not SourceUpdateManager().update():
            print('Failed to force an update.')
            return False

        print('Successfully updated to latest commit. You may now run SickChill normally.')
        return True
예제 #27
0
    def write_ep_file(self, ep_obj):
        """
        Generates and writes ep_obj's metadata under the given path with the
        given filename root. Uses the episode's name with the extension in
        _ep_nfo_extension.

        ep_obj: TVEpisode object for which to create the metadata

        file_name_path: The file name to use for this metadata. Note that the extension
                will be automatically added based on _ep_nfo_extension. This should
                include an absolute path.

        Note that this method expects that _ep_data will return an ElementTree
        object. If your _ep_data returns data in another format yo'll need to
        override this method.
        """

        data = self._ep_data(ep_obj)

        if not data:
            return False

        nfo_file_path = self.get_episode_file_path(ep_obj)
        nfo_file_dir = ek(os.path.dirname, nfo_file_path)

        try:
            if not ek(os.path.isdir, nfo_file_dir):
                logger.log("Metadata dir didn't exist, creating it at " + nfo_file_dir, logger.DEBUG)
                ek(os.makedirs, nfo_file_dir)
                helpers.chmodAsParent(nfo_file_dir)

            logger.log("Writing episode nfo file to " + nfo_file_path, logger.DEBUG)

            nfo_file = io.open(nfo_file_path, 'wb')

            data.write(nfo_file, encoding="utf-8", xml_declaration=True)
            nfo_file.close()
            helpers.chmodAsParent(nfo_file_path)
        except IOError as e:
            logger.log("Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
                       logger.ERROR)
            return False

        return True
예제 #28
0
파일: tivo.py 프로젝트: murbaniak/SickRage
    def get_episode_file_path(self, ep_obj):
        """
        Returns a full show dir/.meta/episode.txt path for Tivo
        episode metadata files.

        Note, that pyTivo requires the metadata filename to include the original extention.

        ie If the episode name is foo.avi, the metadata name is foo.avi.txt

        ep_obj: a TVEpisode object to get the path for
        """
        if ek(os.path.isfile, ep_obj.location):
            metadata_file_name = ek(os.path.basename, ep_obj.location) + "." + self._ep_nfo_extension
            metadata_dir_name = ek(os.path.join, ek(os.path.dirname, ep_obj.location), '.meta')
            metadata_file_path = ek(os.path.join, metadata_dir_name, metadata_file_name)
        else:
            logger.log("Episode location doesn't exist: " + str(ep_obj.location), logger.DEBUG)
            return ''
        return metadata_file_path
예제 #29
0
    def write_show_file(self, show_obj):
        """
        Generates and writes show_obj's metadata under the given path to the
        filename given by get_show_file_path()

        show_obj: TVShow object for which to create the metadata

        path: An absolute or relative path where we should put the file. Note that
                the file name will be the default show_file_name.

        Note that this method expects that _show_data will return an ElementTree
        object. If your _show_data returns data in another format yo'll need to
        override this method.
        """

        data = self._show_data(show_obj)

        if not data:
            return False

        nfo_file_path = self.get_show_file_path(show_obj)
        assert isinstance(nfo_file_path, six.text_type)

        nfo_file_dir = ek(os.path.dirname, nfo_file_path)

        try:
            if not ek(os.path.isdir, nfo_file_dir):
                logger.log("Metadata dir didn't exist, creating it at " + nfo_file_dir, logger.DEBUG)
                ek(os.makedirs, nfo_file_dir)
                helpers.chmodAsParent(nfo_file_dir)

            logger.log("Writing show nfo file to " + nfo_file_path, logger.DEBUG)

            nfo_file = io.open(nfo_file_path, 'wb')
            data.write(nfo_file, encoding='UTF-8')
            nfo_file.close()
            helpers.chmodAsParent(nfo_file_path)
        except IOError as e:
            logger.log("Unable to write file to " + nfo_file_path + " - are you sure the folder is writable? " + ex(e),
                       logger.ERROR)
            return False

        return True
예제 #30
0
 def clear_cache():
     """
     Remove the Mako cache directory
     """
     try:
         cache_folder = ek(os.path.join, sickbeard.CACHE_DIR, 'mako')
         if os.path.isdir(cache_folder):
             shutil.rmtree(cache_folder)
     except Exception:  # pylint: disable=broad-except
         logger.log('Unable to remove the cache/mako directory!', logger.WARNING)
예제 #31
0
 def _send_synologyNotifier(self, message, title):
     synodsmnotify_cmd = [
         "/usr/syno/bin/synodsmnotify", "@administrators", title, message
     ]
     logger.log("Executing command " + str(synodsmnotify_cmd))
     logger.log(
         "Absolute path to command: " +
         ek(os.path.abspath, synodsmnotify_cmd[0]), logger.DEBUG)
     try:
         p = subprocess.Popen(synodsmnotify_cmd,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.STDOUT,
                              cwd=sickbeard.PROG_DIR)
         out, err = p.communicate()  # @UnusedVariable
         logger.log("Script result: " + str(out), logger.DEBUG)
     except OSError as e:
         logger.log("Unable to run synodsmnotify: " + ex(e))
예제 #32
0
    def get_season_poster_path(show_obj, season):
        """
        Returns the full path to the file for a given season poster.

        show_obj: a TVShow instance for which to generate the path
        season: a season number to be used for the path. Note that season 0
                means specials.
        """

        # Our specials thumbnail is, well, special
        if season == 0:
            season_poster_filename = 'season-specials'
        else:
            season_poster_filename = 'season' + str(season).zfill(2)

        return ek(os.path.join, show_obj.location,
                  season_poster_filename + '-poster.jpg')
예제 #33
0
    def which_type(self, path):
        """
        Analyzes the image provided and attempts to determine whether it is a poster or banner.

        :param path: full path to the image
        :return: BANNER, POSTER if it concluded one or the other, or None if the image was neither (or didn't exist)
        """

        if not ek(os.path.isfile, path):
            logger.log(
                "Couldn't check the type of " + str(path) +
                " cause it doesn't exist", logger.WARNING)
            return None

        # use hachoir to parse the image for us
        img_parser = createParser(path)
        img_metadata = extractMetadata(img_parser)

        if not img_metadata:
            logger.log(
                "Unable to get metadata from " + str(path) +
                ", not using your existing image", logger.DEBUG)
            return None

        img_ratio = float(img_metadata.get('width')) / float(
            img_metadata.get('height'))

        img_parser.stream._input.close()

        # most posters are around 0.68 width/height ratio (eg. 680/1000)
        if 0.55 < img_ratio < 0.8:
            return self.POSTER

        # most banners are around 5.4 width/height ratio (eg. 758/140)
        elif 5 < img_ratio < 6:
            return self.BANNER

        # most fanart are around 1.77777 width/height ratio (eg. 1280/720 and 1920/1080)
        elif 1.7 < img_ratio < 1.8:
            return self.FANART
        else:
            logger.log(
                "Image has size ratio of " + str(img_ratio) + ", unknown type",
                logger.WARNING)
            return None
예제 #34
0
    def get_episode_thumb_path(ep_obj):
        """
        Returns the path where the episode thumbnail should be stored.
        ep_obj: a TVEpisode instance for which to create the thumbnail
        """
        assert isinstance(ep_obj.location, six.text_type)
        if ek(os.path.isfile, ep_obj.location):

            tbn_filename = ep_obj.location.rpartition(".")

            if tbn_filename[0] == "":
                tbn_filename = ep_obj.location + "-thumb.jpg"
            else:
                tbn_filename = tbn_filename[0] + "-thumb.jpg"
        else:
            return None

        return tbn_filename
예제 #35
0
    def restore(backupFile=None):

        finalResult = ''

        if backupFile:
            source = backupFile
            target_dir = ek(os.path.join, sickbeard.DATA_DIR, 'restore')

            if helpers.restore_config_zip(source, target_dir):
                finalResult += "Successfully extracted restore files to " + target_dir
                finalResult += "<br>Restart sickchill to complete the restore."
            else:
                finalResult += "Restore FAILED"
        else:
            finalResult += "You need to select a backup file to restore!"

        finalResult += "<br>\n"

        return finalResult
예제 #36
0
    def clean_libs(self):
        lib_path = ek(os.path.join, sickbeard.PROG_DIR, 'lib')

        def removeEmptyFolders(path):
            if not ek(os.path.isdir, path):
                return

            files = ek(os.listdir, path)
            for f in files:
                full_path = ek(os.path.join, path, f)
                if ek(os.path.isdir, full_path):
                    removeEmptyFolders(full_path)

            files = ek(os.listdir, path)
            if len(files) == 0 and path != lib_path:
                ek(os.rmdir, path)

        self.remove_pyc('lib')
        removeEmptyFolders(lib_path)
예제 #37
0
    def _make_url(self, result):
        if not result:
            return '', ''

        filename = ''
        urls = [result.url]
        if result.url.startswith('magnet'):
            torrent_hash = self.hash_from_magnet(result.url)
            if not torrent_hash:
                return urls, filename

            try:
                torrent_name = re.findall('dn=([^&]+)', result.url)[0]
            except Exception:
                torrent_name = 'NO_DOWNLOAD_NAME'

            urls = []
            for cache_url in self.bt_cache_urls:
                if isinstance(cache_url, tuple):
                    urls += cache_url[0].format(
                        torrent_hash=torrent_hash,
                        torrent_name=torrent_name), cache_url[1].format(
                            torrent_hash=torrent_hash,
                            torrent_name=torrent_name)
                else:
                    urls += cache_url.format(torrent_hash=torrent_hash,
                                             torrent_name=torrent_name)

        if 'torrage.info/torrent.php' in result.url:
            torrent_hash = result.url.split('=')[1]
            urls = [
                ('https://t.torrage.info/download?h={torrent_hash}'.format(
                    torrent_hash=torrent_hash),
                 'https://torrage.info/torrent.php?h={torrent_hash}'.format(
                     torrent_hash=torrent_hash))
            ]

        filename = ek(
            join, self._get_storage_dir(),
            sanitize_filename(result.name) + '.' + self.provider_type)

        return urls, filename
예제 #38
0
def delete_files(process_path, unwanted_files, result, force=False):
    """
    Remove files from filesystem

    :param process_path: path to process
    :param unwanted_files: files we do not want
    :param result: Processor results
    :param force: Boolean, force deletion, defaults to false
    """
    if not result.result and force:
        result.output += log_helper(
            "Forcing deletion of files, even though last result was not success",
            logger.DEBUG)
    elif not result.result:
        return

    # Delete all file not needed
    for cur_file in unwanted_files:
        cur_file_path = ek(os.path.join, process_path, cur_file)
        if not ek(os.path.isfile, cur_file_path):
            continue  # Prevent error when a notwantedfiles is an associated files

        result.output += log_helper("Deleting file: {0}".format(cur_file),
                                    logger.DEBUG)

        # check first the read-only attribute
        file_attribute = ek(os.stat, cur_file_path)[0]
        if not file_attribute & stat.S_IWRITE:
            # File is read-only, so make it writeable
            result.output += log_helper(
                "Changing ReadOnly Flag for file: {0}".format(cur_file),
                logger.DEBUG)
            try:
                ek(os.chmod, cur_file_path, stat.S_IWRITE)
            except OSError as e:
                result.output += log_helper(
                    "Cannot change permissions of {0}: {1}".format(
                        cur_file_path, ex(e)), logger.DEBUG)
        try:
            ek(os.remove, cur_file_path)
        except OSError as e:
            result.output += log_helper(
                "Unable to delete file {0}: {1}".format(cur_file, e.strerror),
                logger.DEBUG)
예제 #39
0
    def addDefaultShow(indexer, indexer_id, name, status):
        """
        Adds a new show with the default settings
        """
        if not Show.find(sickbeard.showList, int(indexer_id)):
            logger.log("Adding show " + str(indexer_id))
            root_dirs = sickbeard.ROOT_DIRS.split('|')

            try:
                location = root_dirs[int(root_dirs[0]) + 1]
            except Exception:
                location = None

            if location:
                showPath = ek(os.path.join, location, sanitize_filename(name))
                dir_exists = helpers.makeDir(showPath)

                if not dir_exists:
                    logger.log(
                        "Unable to create the folder {0} , can't add the show".
                        format(showPath), logger.WARNING)
                    return
                else:
                    helpers.chmodAsParent(showPath)

                sickbeard.showQueueScheduler.action.add_show(
                    int(indexer),
                    int(indexer_id),
                    showPath,
                    default_status=status,
                    quality=int(sickbeard.QUALITY_DEFAULT),
                    season_folders=int(sickbeard.SEASON_FOLDERS_DEFAULT),
                    paused=sickbeard.TRAKT_START_PAUSED,
                    default_status_after=status)
            else:
                logger.log(
                    "There was an error creating the show, no root directory setting found",
                    logger.WARNING)
                return
예제 #40
0
def process_media(process_path, video_files, release_name, process_method, force, is_priority, result):
    """
    Postprocess mediafiles

    :param process_path: Path to process in
    :param video_files: Filenames to look for and postprocess
    :param release_name: Name of NZB/Torrent file related
    :param process_method: auto/manual
    :param force: Postprocess currently postprocessing file
    :param is_priority: Boolean, is this a priority download
    :param result: Previous results
    """

    processor = None
    for cur_video_file in video_files:
        cur_video_file_path = ek(os.path.join, process_path, cur_video_file)

        if already_processed(process_path, cur_video_file, force, result):
            result.output += log_helper("Skipping already processed file: {0}".format(cur_video_file), logger.DEBUG)
            continue

        try:
            processor = postProcessor.PostProcessor(cur_video_file_path, release_name, process_method, is_priority)
            result.result = processor.process()
            process_fail_message = ""
        except EpisodePostProcessingFailedException as e:
            result.result = False
            process_fail_message = ex(e)

        if processor:
            result.output += processor.log

        if result.result:
            result.output += log_helper("Processing succeeded for {0}".format(cur_video_file_path))
        else:
            result.output += log_helper("Processing failed for {0}: {1}".format(cur_video_file_path, process_fail_message), logger.WARNING)
            result.missed_files.append("{0} : Processing failed: {1}".format(cur_video_file_path, process_fail_message))
            result.aggresult = False
예제 #41
0
    def qualityFromFileMeta(filename):
        """
        Get quality file file metadata

        :param filename: Filename to analyse
        :return: Quality prefix
        """

        height = video_screen_size(filename)[1]
        if not height:
            return Quality.UNKNOWN

        base_filename = ek(path.basename, filename)
        bluray = re.search(r"blue?-?ray|hddvd|b[rd](rip|mux)", base_filename,
                           re.I) is not None
        webdl = re.search(r"web.?dl|web(rip|mux|hd)", base_filename,
                          re.I) is not None

        ret = Quality.UNKNOWN
        if 3240 < height:
            ret = ((Quality.UHD_8K_TV, Quality.UHD_8K_BLURAY)[bluray],
                   Quality.UHD_8K_WEBDL)[webdl]
        if 1620 < height <= 3240:
            ret = ((Quality.UHD_4K_TV, Quality.UHD_4K_BLURAY)[bluray],
                   Quality.UHD_4K_WEBDL)[webdl]
        elif 800 < height <= 1620:
            ret = ((Quality.FULLHDTV, Quality.FULLHDBLURAY)[bluray],
                   Quality.FULLHDWEBDL)[webdl]
        elif 680 < height <= 800:
            ret = ((Quality.HDTV, Quality.HDBLURAY)[bluray],
                   Quality.HDWEBDL)[webdl]
        elif height <= 680:
            ret = (Quality.SDTV, Quality.SDDVD)[re.search(
                r'dvd|b[rd]rip|blue?-?ray', base_filename, re.I) is not None]

        return ret
예제 #42
0
    def _backup(backupDir=None):
        if not backupDir:
            return False
        source = [
            ek(os.path.join, sickbeard.DATA_DIR, 'sickbeard.db'),
            sickbeard.CONFIG_FILE,
            ek(os.path.join, sickbeard.DATA_DIR, 'failed.db'),
            ek(os.path.join, sickbeard.DATA_DIR, 'cache.db')
        ]
        target = ek(os.path.join, backupDir, 'sickchill-' + time.strftime('%Y%m%d%H%M%S') + '.zip')

        for (path, dirs, files) in ek(os.walk, sickbeard.CACHE_DIR, topdown=True):
            for dirname in dirs:
                if path == sickbeard.CACHE_DIR and dirname not in ['images']:
                    dirs.remove(dirname)
            for filename in files:
                source.append(ek(os.path.join, path, filename))

        return helpers.backup_config_zip(source, target, sickbeard.DATA_DIR)
예제 #43
0
    def _cache_image_from_file(self, image_path, img_type, indexer_id):
        """
        Takes the image provided and copies it to the cache folder

        :param image_path: path to the image we're caching
        :param img_type: BANNER or POSTER or FANART
        :param indexer_id: id of the show this image belongs to
        :return: bool representing success
        """

        # generate the path based on the type & indexer_id
        if img_type == self.POSTER:
            dest_path = self.poster_path(indexer_id)
        elif img_type == self.BANNER:
            dest_path = self.banner_path(indexer_id)
        elif img_type == self.FANART:
            dest_path = self.fanart_path(indexer_id)
        else:
            logger.log("Invalid cache image type: " + str(img_type),
                       logger.ERROR)
            return False

        # make sure the cache folder exists before we try copying to it
        if not ek(os.path.isdir, self._cache_dir()):
            logger.log("Image cache dir didn't exist, creating it at " +
                       str(self._cache_dir()))
            ek(os.makedirs, self._cache_dir())

        if not ek(os.path.isdir, self._thumbnails_dir()):
            logger.log("Thumbnails cache dir didn't exist, creating it at " +
                       str(self._thumbnails_dir()))
            ek(os.makedirs, self._thumbnails_dir())

        logger.log("Copying from " + image_path + " to " + dest_path)
        helpers.copyFile(image_path, dest_path)

        return True
예제 #44
0
import sickbeard
from sickchill.helper import video_screen_size
from sickchill.helper.encoding import ek
from sickchill.recompiled import tags
from sickchill.tagger.episode import EpisodeTags

# Local Folder Imports
from .numdict import NumDict

gettext.install('messages', unicode=1, codeset='UTF-8', names=["ngettext"])

# If some provider has an issue with functionality of SR, other than user agents, it's best to come talk to us rather than block.
# It is no different than us going to a provider if we have questions or issues. Be a team player here.
# This is disabled, was only added for testing, and has no config.ini or web ui setting. To enable, set SPOOF_USER_AGENT = True
SPOOF_USER_AGENT = True
ua_pool = fake_useragent.FakeUserAgent(path=ek(
    path.join, ek(path.dirname, __file__), '../fake_useragent.ua.json'))

if SPOOF_USER_AGENT:
    USER_AGENT = ua_pool.random
else:
    INSTANCE_ID = str(uuid.uuid1())
    USER_AGENT = ('SickChill.CE.1/(' + platform.system() + '; ' +
                  platform.release() + '; ' + INSTANCE_ID + ')')

cpu_presets = {'HIGH': 5, 'NORMAL': 2, 'LOW': 1}

# Other constants
MULTI_EP_RESULT = -1
SEASON_RESULT = -2

# Notification Types
예제 #45
0
 def get_season_all_banner_path(self, show_obj):
     return ek(os.path.join, show_obj.location, self.season_all_banner_name)
예제 #46
0
 def get_poster_path(self, show_obj):
     return ek(os.path.join, show_obj.location, self.poster_name)
예제 #47
0
 def get_fanart_path(self, show_obj):
     return ek(os.path.join, show_obj.location, self.fanart_name)
예제 #48
0
 def get_show_file_path(self, show_obj):
     return ek(os.path.join, show_obj.location,
               self._show_metadata_filename)
예제 #49
0
    def __init__(self, options=None):
        threading.Thread.__init__(self)
        self.daemon = True
        self.alive = True
        self.name = "WEBSERVER"

        self.options = options or {}
        self.options.setdefault('port', 8081)
        self.options.setdefault('host', '0.0.0.0')
        self.options.setdefault('log_dir', None)
        self.options.setdefault('username', '')
        self.options.setdefault('password', '')
        self.options.setdefault('web_root', '/')

        assert isinstance(self.options['port'], int)
        assert 'data_root' in self.options

        self.server = None

        # video root
        if sickbeard.ROOT_DIRS:
            root_dirs = sickbeard.ROOT_DIRS.split('|')
            self.video_root = root_dirs[int(root_dirs[0]) + 1]
        else:
            self.video_root = None

        # web root
        if self.options['web_root']:
            sickbeard.WEB_ROOT = self.options['web_root'] = (
                '/' + self.options['web_root'].lstrip('/').strip('/'))

        # api root
        if not sickbeard.API_KEY:
            sickbeard.API_KEY = generateApiKey()
        self.options['api_root'] = r'{0}/api/{1}'.format(
            sickbeard.WEB_ROOT, sickbeard.API_KEY)

        # tornado setup
        self.enable_https = self.options['enable_https']
        self.https_cert = self.options['https_cert']
        self.https_key = self.options['https_key']

        if self.enable_https:
            # If either the HTTPS certificate or key do not exist, make some self-signed ones.
            if not (self.https_cert
                    and ek(os.path.exists, self.https_cert)) or not (
                        self.https_key and ek(os.path.exists, self.https_key)):
                if not create_https_certificates(self.https_cert,
                                                 self.https_key):
                    logger.log(
                        "Unable to create CERT/KEY files, disabling HTTPS")
                    sickbeard.ENABLE_HTTPS = False
                    self.enable_https = False

            if not (ek(os.path.exists, self.https_cert)
                    and ek(os.path.exists, self.https_key)):
                logger.log(
                    "Disabled HTTPS because of missing CERT and KEY files",
                    logger.WARNING)
                sickbeard.ENABLE_HTTPS = False
                self.enable_https = False

        # Load the app
        self.app = Application(
            [],
            debug=
            False,  # enables autoreload, compiled_template_cache, static_hash_cache, serve_traceback - This fixes the 404 page and fixes autoreload for
            #  devs. We could now update without restart possibly if we check DB version hasnt changed!
            autoreload=False,
            gzip=sickbeard.WEB_USE_GZIP,
            cookie_secret=sickbeard.WEB_COOKIE_SECRET,
            login_url='{0}/login/'.format(self.options['web_root']),
            static_path=self.options['data_root'],
            static_url_prefix='{0}/'.format(self.options['web_root'])
            # default_handler_class=Custom404Handler
        )

        # Static File Handlers
        self.app.add_handlers(
            ".*$",
            [
                url(r'{0}/favicon.ico'.format(self.options['web_root']),
                    StaticFileHandler, {
                        "path":
                        ek(os.path.join, self.options['data_root'],
                           'images/ico/favicon.ico')
                    },
                    name='favicon'),
                url(r'{0}/images/(.*)'.format(self.options['web_root']),
                    StaticFileHandler, {
                        "path":
                        ek(os.path.join, self.options['data_root'], 'images')
                    },
                    name='images'),
                url(r'{0}/cache/images/(.*)'.format(self.options['web_root']),
                    StaticFileHandler,
                    {"path": ek(os.path.join, sickbeard.CACHE_DIR, 'images')},
                    name='image_cache'),
                url(r'{0}/css/(.*)'.format(self.options['web_root']),
                    StaticFileHandler, {
                        "path": ek(os.path.join, self.options['data_root'],
                                   'css')
                    },
                    name='css'),
                url(r'{0}/js/(.*)'.format(self.options['web_root']),
                    StaticFileHandler, {
                        "path": ek(os.path.join, self.options['data_root'],
                                   'js')
                    },
                    name='js'),
                url(r'{0}/fonts/(.*)'.format(self.options['web_root']),
                    StaticFileHandler, {
                        "path":
                        ek(os.path.join, self.options['data_root'], 'fonts')
                    },
                    name='fonts')

                # TODO: WTF is this?
                # url(r'{0}/videos/(.*)'.format(self.options['web_root']), StaticFileHandler,
                #     {"path": self.video_root}, name='videos')
            ])

        # Main Handlers
        self.app.add_handlers(
            '.*$',
            [
                url(r'{0}(/?.*)'.format(self.options['api_root']),
                    ApiHandler,
                    name='api'),
                url(r'{0}/getkey(/?.*)'.format(self.options['web_root']),
                    KeyHandler,
                    name='get_api_key'),
                url(r'{0}/api/builder'.format(self.options['web_root']),
                    RedirectHandler,
                    {"url": self.options['web_root'] + '/apibuilder/'},
                    name='apibuilder'),
                url(r'{0}/login(/?)'.format(self.options['web_root']),
                    LoginHandler,
                    name='login'),
                url(r'{0}/logout(/?)'.format(self.options['web_root']),
                    LogoutHandler,
                    name='logout'),
                url(r'{0}/calendar/?'.format(self.options['web_root']),
                    CalendarHandler,
                    name='calendar'),

                # routes added by @route decorator
                # Plus naked index with missing web_root prefix
            ] + Route.get_routes(self.options['web_root']))
예제 #50
0
def validate_name(pattern,
                  multi=None,
                  anime_type=None,
                  file_only=False,
                  abd=False,
                  sports=False):
    """
    See if we understand a name

    :param pattern: Name to analyse
    :param multi: Is this a multi-episode name
    :param anime_type: Is this anime
    :param file_only: Is this just a file or a dir
    :param abd: Is air-by-date enabled
    :param sports: Is this sports
    :return: True if valid name, False if not
    """
    ep = generate_sample_ep(multi, abd, sports, anime_type)

    new_name = ep.formatted_filename(pattern, multi, anime_type) + '.ext'
    new_path = ep.formatted_dir(pattern, multi, anime_type)
    if not file_only:
        new_name = ek(os.path.join, new_path, new_name)

    if not new_name:
        logger.log("Unable to create a name out of " + pattern, logger.DEBUG)
        return False

    logger.log("Trying to parse " + new_name, logger.DEBUG)

    try:
        result = NameParser(True, showObj=ep.show,
                            naming_pattern=True).parse(new_name)
    except (InvalidNameException, InvalidShowException) as error:
        logger.log("{0}".format(error), logger.DEBUG)
        return False

    logger.log("The name " + new_name + " parsed into " + str(result),
               logger.DEBUG)

    if abd or sports:
        if result.air_date != ep.airdate:
            logger.log(
                "Air date incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False
    elif anime_type != 3:
        if len(result.ab_episode_numbers) and result.ab_episode_numbers != [
                x.absolute_number for x in [ep] + ep.relatedEps
        ]:
            logger.log(
                "Absolute numbering incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False
    else:
        if result.season_number != ep.season:
            logger.log(
                "Season number incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False
        if result.episode_numbers != [x.episode for x in [ep] + ep.relatedEps]:
            logger.log(
                "Episode numbering incorrect in parsed episode, pattern isn't valid",
                logger.DEBUG)
            return False

    return True
예제 #51
0
    def savePostProcessing(self,
                           kodi_data=None,
                           kodi_12plus_data=None,
                           mediabrowser_data=None,
                           sony_ps3_data=None,
                           wdtv_data=None,
                           tivo_data=None,
                           mede8er_data=None,
                           keep_processed_dir=None,
                           process_method=None,
                           processor_follow_symlinks=None,
                           del_rar_contents=None,
                           process_automatically=None,
                           no_delete=None,
                           rename_episodes=None,
                           airdate_episodes=None,
                           file_timestamp_timezone=None,
                           unpack=None,
                           unpack_dir=None,
                           unrar_tool=None,
                           alt_unrar_tool=None,
                           move_associated_files=None,
                           delete_non_associated_files=None,
                           sync_files=None,
                           postpone_if_sync_files=None,
                           allowed_extensions=None,
                           tv_download_dir=None,
                           create_missing_show_dirs=None,
                           add_shows_wo_dir=None,
                           extra_scripts=None,
                           nfo_rename=None,
                           naming_pattern=None,
                           naming_multi_ep=None,
                           naming_custom_abd=None,
                           naming_anime=None,
                           naming_abd_pattern=None,
                           naming_strip_year=None,
                           naming_custom_sports=None,
                           naming_sports_pattern=None,
                           naming_custom_anime=None,
                           naming_anime_pattern=None,
                           naming_anime_multi_ep=None,
                           autopostprocessor_frequency=None,
                           use_icacls=None):

        results = []

        if not config.change_tv_download_dir(tv_download_dir):
            results += [
                "Unable to create directory " +
                ek(os.path.normpath, tv_download_dir) + ", dir not changed."
            ]

        config.change_postprocessor_frequency(autopostprocessor_frequency)
        config.change_process_automatically(process_automatically)
        sickbeard.USE_ICACLS = config.checkbox_to_value(use_icacls)

        config.change_unrar_tool(unrar_tool, alt_unrar_tool)

        unpack = try_int(unpack)
        if unpack == sickbeard.UNPACK_PROCESS_CONTENTS:
            sickbeard.UNPACK = int(self.isRarSupported() != 'not supported')
            if sickbeard.UNPACK != sickbeard.UNPACK_PROCESS_CONTENTS:
                results.append(
                    _("Unpacking Not Supported, disabling unpack setting"))
        elif unpack in sickbeard.unpackStrings:
            sickbeard.UNPACK = unpack

        if not config.change_unpack_dir(unpack_dir):
            results += [
                "Unable to change unpack directory to " +
                ek(os.path.normpath, unpack_dir) + ", check the logs."
            ]

        sickbeard.NO_DELETE = config.checkbox_to_value(no_delete)
        sickbeard.KEEP_PROCESSED_DIR = config.checkbox_to_value(
            keep_processed_dir)
        sickbeard.CREATE_MISSING_SHOW_DIRS = config.checkbox_to_value(
            create_missing_show_dirs)
        sickbeard.ADD_SHOWS_WO_DIR = config.checkbox_to_value(add_shows_wo_dir)
        sickbeard.PROCESS_METHOD = process_method
        sickbeard.PROCESSOR_FOLLOW_SYMLINKS = config.checkbox_to_value(
            processor_follow_symlinks)
        sickbeard.DELRARCONTENTS = config.checkbox_to_value(del_rar_contents)
        sickbeard.EXTRA_SCRIPTS = [
            x.strip() for x in extra_scripts.split('|') if x.strip()
        ]
        sickbeard.RENAME_EPISODES = config.checkbox_to_value(rename_episodes)
        sickbeard.AIRDATE_EPISODES = config.checkbox_to_value(airdate_episodes)
        sickbeard.FILE_TIMESTAMP_TIMEZONE = file_timestamp_timezone
        sickbeard.MOVE_ASSOCIATED_FILES = config.checkbox_to_value(
            move_associated_files)
        sickbeard.DELETE_NON_ASSOCIATED_FILES = config.checkbox_to_value(
            delete_non_associated_files)
        sickbeard.SYNC_FILES = sync_files
        sickbeard.POSTPONE_IF_SYNC_FILES = config.checkbox_to_value(
            postpone_if_sync_files)

        sickbeard.ALLOWED_EXTENSIONS = ','.join(
            {x.strip()
             for x in allowed_extensions.split(',') if x.strip()})
        sickbeard.NAMING_CUSTOM_ABD = config.checkbox_to_value(
            naming_custom_abd)
        sickbeard.NAMING_CUSTOM_SPORTS = config.checkbox_to_value(
            naming_custom_sports)
        sickbeard.NAMING_CUSTOM_ANIME = config.checkbox_to_value(
            naming_custom_anime)
        sickbeard.NAMING_STRIP_YEAR = config.checkbox_to_value(
            naming_strip_year)
        sickbeard.NFO_RENAME = config.checkbox_to_value(nfo_rename)

        sickbeard.METADATA_KODI = kodi_data
        sickbeard.METADATA_KODI_12PLUS = kodi_12plus_data
        sickbeard.METADATA_MEDIABROWSER = mediabrowser_data
        sickbeard.METADATA_PS3 = sony_ps3_data
        sickbeard.METADATA_WDTV = wdtv_data
        sickbeard.METADATA_TIVO = tivo_data
        sickbeard.METADATA_MEDE8ER = mede8er_data

        sickbeard.metadata_provider_dict['KODI'].set_config(
            sickbeard.METADATA_KODI)
        sickbeard.metadata_provider_dict['KODI 12+'].set_config(
            sickbeard.METADATA_KODI_12PLUS)
        sickbeard.metadata_provider_dict['MediaBrowser'].set_config(
            sickbeard.METADATA_MEDIABROWSER)
        sickbeard.metadata_provider_dict['Sony PS3'].set_config(
            sickbeard.METADATA_PS3)
        sickbeard.metadata_provider_dict['WDTV'].set_config(
            sickbeard.METADATA_WDTV)
        sickbeard.metadata_provider_dict['TIVO'].set_config(
            sickbeard.METADATA_TIVO)
        sickbeard.metadata_provider_dict['Mede8er'].set_config(
            sickbeard.METADATA_MEDE8ER)

        if self.isNamingValid(naming_pattern,
                              naming_multi_ep,
                              anime_type=naming_anime) != "invalid":
            sickbeard.NAMING_PATTERN = naming_pattern
            sickbeard.NAMING_MULTI_EP = try_int(
                naming_multi_ep, NAMING_LIMITED_EXTEND_E_PREFIXED)
            sickbeard.NAMING_FORCE_FOLDERS = naming.check_force_season_folders(
            )
        else:
            results.append(
                _("You tried saving an invalid normal naming config, not saving your naming settings"
                  ))

        if self.isNamingValid(naming_anime_pattern,
                              naming_anime_multi_ep,
                              anime_type=naming_anime) != "invalid":
            sickbeard.NAMING_ANIME_PATTERN = naming_anime_pattern
            sickbeard.NAMING_ANIME_MULTI_EP = try_int(
                naming_anime_multi_ep, NAMING_LIMITED_EXTEND_E_PREFIXED)
            sickbeard.NAMING_ANIME = try_int(naming_anime, 3)
            sickbeard.NAMING_FORCE_FOLDERS = naming.check_force_season_folders(
            )
        else:
            results.append(
                _("You tried saving an invalid anime naming config, not saving your naming settings"
                  ))

        if self.isNamingValid(naming_abd_pattern, None, abd=True) != "invalid":
            sickbeard.NAMING_ABD_PATTERN = naming_abd_pattern
        else:
            results.append(
                "You tried saving an invalid air-by-date naming config, not saving your air-by-date settings"
            )

        if self.isNamingValid(naming_sports_pattern, None,
                              sports=True) != "invalid":
            sickbeard.NAMING_SPORTS_PATTERN = naming_sports_pattern
        else:
            results.append(
                "You tried saving an invalid sports naming config, not saving your sports settings"
            )

        sickbeard.save_config()

        if results:
            for x in results:
                logger.log(x, logger.WARNING)
            ui.notifications.error(_('Error(s) Saving Configuration'),
                                   '<br>\n'.join(results))
        else:
            ui.notifications.message(_('Configuration Saved'),
                                     ek(os.path.join, sickbeard.CONFIG_FILE))

        return self.redirect("/config/postProcessing/")
예제 #52
0
 def _thumbnails_dir(self):
     """
     Builds up the full path to the thumbnails image cache directory
     """
     return ek(os.path.abspath,
               ek(os.path.join, self._cache_dir(), 'thumbnails'))
예제 #53
0
 def _cache_dir():
     """
     Builds up the full path to the image cache directory
     """
     return ek(os.path.abspath,
               ek(os.path.join, sickbeard.CACHE_DIR, 'images'))
예제 #54
0
    def fill_cache(self, show_obj):
        """
        Caches all images for the given show. Copies them from the show dir if possible, or
        downloads them from indexer if they aren't in the show dir.

        :param show_obj: TVShow object to cache images for
        """

        logger.log(
            "Checking if we need any cache images for show " +
            str(show_obj.indexerid), logger.DEBUG)

        # check if the images are already cached or not
        need_images = {
            self.POSTER: not self.has_poster(show_obj.indexerid),
            self.BANNER: not self.has_banner(show_obj.indexerid),
            self.POSTER_THUMB:
            not self.has_poster_thumbnail(show_obj.indexerid),
            self.BANNER_THUMB:
            not self.has_banner_thumbnail(show_obj.indexerid),
            self.FANART: not self.has_fanart(show_obj.indexerid)
        }

        if not need_images[self.POSTER] and not need_images[
                self.BANNER] and not need_images[
                    self.POSTER_THUMB] and not need_images[
                        self.BANNER_THUMB] and not need_images[self.FANART]:
            logger.log("No new cache images needed, not retrieving new ones",
                       logger.DEBUG)
            return

        # check the show dir for poster or banner images and use them
        if need_images[self.POSTER] or need_images[self.BANNER] or need_images[
                self.FANART]:
            try:
                for cur_provider in sickbeard.metadata_provider_dict.values():
                    logger.log(
                        "Checking if we can use the show image from the " +
                        cur_provider.name + " metadata", logger.DEBUG)
                    if ek(os.path.isfile,
                          cur_provider.get_poster_path(show_obj)):
                        cur_file_name = ek(
                            os.path.abspath,
                            cur_provider.get_poster_path(show_obj))
                        cur_file_type = self.which_type(cur_file_name)

                        if cur_file_type is None:
                            logger.log(
                                "Unable to retrieve image type, not using the image from "
                                + str(cur_file_name), logger.WARNING)
                            continue

                        logger.log(
                            "Checking if image " + cur_file_name + " (type " +
                            str(cur_file_type) + " needs metadata: " +
                            str(need_images[cur_file_type]), logger.DEBUG)

                        if cur_file_type in need_images and need_images[
                                cur_file_type]:
                            logger.log(
                                "Found an image in the show dir that doesn't exist in the cache, caching it: "
                                + cur_file_name + ", type " +
                                str(cur_file_type), logger.DEBUG)
                            self._cache_image_from_file(
                                cur_file_name, cur_file_type,
                                show_obj.indexerid)
                            need_images[cur_file_type] = False
            except ShowDirectoryNotFoundException:
                logger.log(
                    "Unable to search for images in show dir because it doesn't exist",
                    logger.DEBUG)

        # download from indexer for missing ones
        for cur_image_type in [
                self.POSTER, self.BANNER, self.POSTER_THUMB, self.BANNER_THUMB,
                self.FANART
        ]:
            logger.log(
                "Seeing if we still need an image of type " +
                str(cur_image_type) + ": " + str(need_images[cur_image_type]),
                logger.DEBUG)
            if cur_image_type in need_images and need_images[cur_image_type]:
                self._cache_image_from_indexer(show_obj, cur_image_type)

        logger.log("Done cache check")
예제 #55
0
    def scene_quality(name, anime=False):
        """
        Return The quality from the scene episode File

        :param name: Episode filename to analyse
        :param anime: Boolean to indicate if the show we're resolving is Anime
        :return: Quality
        """

        if not name:
            return Quality.UNKNOWN

        name = ek(path.basename, name)

        result = None
        ep = EpisodeTags(name)

        if anime:
            sd_options = tags.anime_sd.search(name)
            hd_options = tags.anime_hd.search(name)
            full_hd = tags.anime_fullhd.search(name)
            ep.rex[b'bluray'] = tags.anime_bluray

            # BluRay
            if ep.bluray and (full_hd or hd_options):
                result = Quality.FULLHDBLURAY if full_hd else Quality.HDBLURAY
            # HD TV
            elif not ep.bluray and (full_hd or hd_options):
                result = Quality.FULLHDTV if full_hd else Quality.HDTV
            # SD DVD
            elif ep.dvd:
                result = Quality.SDDVD
            # SD TV
            elif sd_options:
                result = Quality.SDTV
        elif ep.hevc and not sickbeard.QUALITY_ALLOW_HEVC:
            result = Quality.NONE
        elif ep.mpeg:
            result = Quality.RAWHDTV
        # Is it UHD?
        elif ep.vres in {2160, 4320} and ep.scan == 'p':
            # BluRay
            full_res = (ep.vres == 4320)
            if ep.bluray:
                result = (Quality.UHD_4K_BLURAY,
                          Quality.UHD_8K_BLURAY)[full_res]
            # WEB-DL
            elif ep.itunes or ep.amazon or ep.netflix or ep.web:
                result = (Quality.UHD_4K_WEBDL, Quality.UHD_8K_WEBDL)[full_res]
            # HDTV
            elif ep.tv == 'hd':
                result = (Quality.UHD_4K_TV, Quality.UHD_8K_TV)[full_res]
        elif ep.vres in {1080, 720}:
            if ep.scan == 'p':
                # BluRay
                full_res = (ep.vres == 1080)
                if ep.bluray or ep.hddvd:
                    result = (Quality.HDBLURAY, Quality.FULLHDBLURAY)[full_res]
                # WEB-DL
                elif ep.itunes or ep.amazon or ep.netflix or ep.web:
                    result = (Quality.HDWEBDL, Quality.FULLHDWEBDL)[full_res]
                # HDTV
                elif ep.tv == 'hd' or ep.hevc:
                    result = (Quality.HDTV,
                              Quality.FULLHDTV)[full_res]  # 1080 HDTV h264
                # MPEG2 encoded
                elif all([full_res, ep.tv == 'hd', ep.mpeg]):
                    result = Quality.RAWHDTV
                elif all([not full_res, ep.tv == 'hd', ep.mpeg]):
                    result = Quality.RAWHDTV
            elif (ep.res == '1080i') and ep.tv == 'hd' and (
                    ep.mpeg or (ep.raw and ep.avc_non_free)):
                result = Quality.RAWHDTV
        elif not ep.vres and ep.netflix or ep.amazon or ep.itunes:
            result = Quality.HDWEBDL
        elif ep.hrws:
            result = Quality.HDTV

        # Is it SD?
        elif ep.xvid or ep.avc:
            # SD DVD
            if ep.dvd or ep.bluray:
                result = Quality.SDDVD
            # SDTV
            elif ep.res == '480p' or any([ep.tv, ep.sat, ep.web]):
                result = Quality.SDTV
        elif ep.dvd:
            # SD DVD
            result = Quality.SDDVD
        elif ep.tv:
            # SD TV/HD TV
            result = (Quality.SDTV, Quality.HDTV)[ep.tv == 'hd']
        elif ep.raw or ep.mpeg:
            # RawHD
            result = Quality.RAWHDTV

        return Quality.UNKNOWN if result is None else result
예제 #56
0
    def retrieveShowMetadata(self, folder):
        """
        Used only when mass adding Existing Shows, using previously generated Show metadata to reduce the need to query TVDB.
        """

        empty_return = (None, None, None)

        assert isinstance(folder, six.text_type)

        metadata_path = ek(os.path.join, folder, self._show_metadata_filename)

        if not ek(os.path.isdir, folder) or not ek(os.path.isfile,
                                                   metadata_path):
            logger.log(
                "Can't load the metadata file from " + metadata_path +
                ", it doesn't exist", logger.DEBUG)
            return empty_return

        logger.log("Loading show info from metadata file in " + metadata_path,
                   logger.DEBUG)

        try:
            with io.open(metadata_path, 'rb') as xmlFileObj:
                showXML = etree.ElementTree(file=xmlFileObj)

            if showXML.findtext('title') is None or (
                    showXML.findtext('tvdbid') is None
                    and showXML.findtext('id') is None):
                logger.log(
                    "Invalid info in tvshow.nfo (missing name or id): {0} {1} {2}"
                    .format(showXML.findtext('title'),
                            showXML.findtext('tvdbid'),
                            showXML.findtext('id')))
                return empty_return

            name = showXML.findtext('title')

            indexer_id_text = showXML.findtext('tvdbid') or showXML.findtext(
                'id')
            if indexer_id_text:
                indexer_id = try_int(indexer_id_text, None)
                if indexer_id is None or indexer_id < 1:
                    logger.log(
                        "Invalid Indexer ID (" + str(indexer_id) +
                        "), not using metadata file", logger.DEBUG)
                    return empty_return
            else:
                logger.log(
                    "Empty <id> or <tvdbid> field in NFO, unable to find a ID, not using metadata file",
                    logger.DEBUG)
                return empty_return

            indexer = 1
            epg_url_text = showXML.findtext('episodeguide/url')
            if epg_url_text:
                epg_url = epg_url_text.lower()
                if str(indexer_id) in epg_url and 'tvrage' in epg_url:
                    logger.log(
                        "Invalid Indexer ID (" + str(indexer_id) +
                        "), not using metadata file because it has TVRage info",
                        logger.WARNING)
                    return empty_return

        except Exception as e:
            logger.log(
                "There was an error parsing your existing metadata file: '" +
                metadata_path + "' error: " + ex(e), logger.WARNING)
            return empty_return

        return indexer_id, name, indexer
예제 #57
0
    def submit_errors(self):

        submitter_result = ''
        issue_id = None

        gh_credentials = (sickbeard.GIT_AUTH_TYPE == 0 and sickbeard.GIT_USERNAME and sickbeard.GIT_PASSWORD) \
            or (sickbeard.GIT_AUTH_TYPE == 1 and sickbeard.GIT_TOKEN)

        if not all((gh_credentials, sickbeard.DEBUG, sickbeard.gh,
                    classes.ErrorViewer.errors)):
            submitter_result = 'Please set your GitHub token or username and password in the config and enable debug. Unable to submit issue ticket to GitHub!'
            return submitter_result, issue_id

        try:
            from .versionChecker import CheckVersion
            checkversion = CheckVersion()
            checkversion.check_for_new_version()
            commits_behind = checkversion.updater.get_num_commits_behind()
        except Exception:
            submitter_result = 'Could not check if your SickChill is updated, unable to submit issue ticket to GitHub!'
            return submitter_result, issue_id

        if commits_behind is None or commits_behind > 0:
            submitter_result = 'Please update SickChill, unable to submit issue ticket to GitHub with an outdated version!'
            return submitter_result, issue_id

        if self.submitter_running:
            submitter_result = 'Issue submitter is running, please wait for it to complete'
            return submitter_result, issue_id

        self.submitter_running = True

        try:
            # read log file
            __log_data = None

            if ek(os.path.isfile, self.log_file):
                with io.open(self.log_file, encoding='utf-8') as log_f:
                    __log_data = log_f.readlines()

            for i in range(1, int(sickbeard.LOG_NR)):
                f_name = '{0}.{1:d}'.format(self.log_file, i)
                if ek(os.path.isfile, f_name) and (len(__log_data) <= 500):
                    with io.open(f_name, encoding='utf-8') as log_f:
                        __log_data += log_f.readlines()

            __log_data = list(reversed(__log_data))

            # parse and submit errors to issue tracker
            for cur_error in sorted(classes.ErrorViewer.errors,
                                    key=lambda error: error.time,
                                    reverse=True)[:500]:
                try:
                    title_error = ss(str(cur_error.title))
                    if not title_error or title_error == 'None':
                        title_error = re.match(
                            r'^[A-Za-z0-9\-\[\] :]+::\s(?:\[[\w]{7}\])\s*(.*)$',
                            ss(cur_error.message)).group(1)

                    if len(title_error) > 1000:
                        title_error = title_error[0:1000]

                except Exception as err_msg:
                    self.log(
                        'Unable to get error title : {0}'.format(ex(err_msg)),
                        ERROR)
                    title_error = 'UNKNOWN'

                gist = None
                regex = r'^(?P<time>{time})\s+(?P<level>[A-Z]+)\s+[A-Za-z0-9\-\[\] :]+::.*$'.format(
                    time=re.escape(cur_error.time))
                for i, data in enumerate(__log_data):
                    match = re.match(regex, data)
                    if match:
                        level = match.group('level')
                        if LOGGING_LEVELS[level] == ERROR:
                            paste_data = ''.join(__log_data[i:i + 50])
                            if paste_data:
                                gist = sickbeard.gh.get_user().create_gist(
                                    False, {
                                        'sickchill.log':
                                        InputFileContent(paste_data)
                                    })
                            break
                    else:
                        gist = 'No ERROR found'

                try:
                    locale_name = locale.getdefaultlocale()[1]
                except Exception:
                    locale_name = 'unknown'

                if gist and gist != 'No ERROR found':
                    log_link = 'Link to Log: {0}'.format(gist.html_url)
                else:
                    log_link = 'No Log available with ERRORS:'

                msg = [
                    '### INFO',
                    'Python Version: **{0}**'.format(sys.version[:120].replace(
                        '\n', '')),
                    'Operating System: **{0}**'.format(platform.platform()),
                    'Locale: {0}'.format(locale_name),
                    'Branch: **{0}**'.format(sickbeard.BRANCH),
                    'Commit: SickChill/SickChill@{0}'.format(
                        sickbeard.CUR_COMMIT_HASH),
                    log_link,
                    '### ERROR',
                    '```',
                    cur_error.message,
                    '```',
                    '---',
                    '_STAFF NOTIFIED_: @SickChill/owners @SickChill/moderators',
                ]

                message = '\n'.join(msg)
                title_error = '[APP SUBMITTED]: {0}'.format(title_error)

                repo = sickbeard.gh.get_organization(
                    sickbeard.GIT_ORG).get_repo(sickbeard.GIT_REPO)
                reports = repo.get_issues(state='all')

                def is_ascii_error(title):
                    # [APP SUBMITTED]: 'ascii' codec can't encode characters in position 00-00: ordinal not in range(128)
                    # [APP SUBMITTED]: 'charmap' codec can't decode byte 0x00 in position 00: character maps to <undefined>
                    return re.search(
                        r'.* codec can\'t .*code .* in position .*:',
                        title) is not None

                def is_malformed_error(title):
                    # [APP SUBMITTED]: not well-formed (invalid token): line 0, column 0
                    return re.search(
                        r'.* not well-formed \(invalid token\): line .* column .*',
                        title) is not None

                ascii_error = is_ascii_error(title_error)
                malformed_error = is_malformed_error(title_error)

                issue_found = False
                for report in reports:
                    if title_error.rsplit(' :: ')[-1] in report.title or \
                        (malformed_error and is_malformed_error(report.title)) or \
                            (ascii_error and is_ascii_error(report.title)):

                        issue_id = report.number
                        if not report.raw_data['locked']:
                            if report.create_comment(message):
                                submitter_result = 'Commented on existing issue #{0} successfully!'.format(
                                    issue_id)
                            else:
                                submitter_result = 'Failed to comment on found issue #{0}!'.format(
                                    issue_id)
                        else:
                            submitter_result = 'Issue #{0} is locked, check GitHub to find info about the error.'.format(
                                issue_id)

                        issue_found = True
                        break

                if not issue_found:
                    issue = repo.create_issue(title_error, message)
                    if issue:
                        issue_id = issue.number
                        submitter_result = 'Your issue ticket #{0} was submitted successfully!'.format(
                            issue_id)
                    else:
                        submitter_result = 'Failed to create a new issue!'

                if issue_id and cur_error in classes.ErrorViewer.errors:
                    # clear error from error list
                    classes.ErrorViewer.errors.remove(cur_error)
        except RateLimitExceededException:
            submitter_result = 'Your Github user has exceeded its API rate limit, please try again later'
            issue_id = None
        except TwoFactorException:
            submitter_result = (
                'Your Github account requires Two-Factor Authentication, '
                'please change your auth method in the config')
            issue_id = None
        except Exception:
            self.log(traceback.format_exc(), ERROR)
            submitter_result = 'Exception generated in issue submitter, please check the log'
            issue_id = None
        finally:
            self.submitter_running = False

        return submitter_result, issue_id
예제 #58
0
    def start(self):  # pylint: disable=too-many-branches,too-many-statements
        """
        Start SickChill
        """
        # do some preliminary stuff
        sickbeard.MY_FULLNAME = ek(os.path.normpath, ek(os.path.abspath, __file__))
        sickbeard.MY_NAME = ek(os.path.basename, sickbeard.MY_FULLNAME)
        sickbeard.PROG_DIR = ek(os.path.dirname, sickbeard.MY_FULLNAME)
        sickbeard.LOCALE_DIR = ek(os.path.join, sickbeard.PROG_DIR, 'locale')
        sickbeard.DATA_DIR = sickbeard.PROG_DIR
        sickbeard.MY_ARGS = sys.argv[1:]

        try:
            locale.setlocale(locale.LC_ALL, '')
            sickbeard.SYS_ENCODING = locale.getpreferredencoding()
        except (locale.Error, IOError):
            sickbeard.SYS_ENCODING = 'UTF-8'

        # pylint: disable=no-member
        if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING.lower() in ('ansi_x3.4-1968', 'us-ascii', 'ascii', 'charmap') or \
                (sys.platform.startswith('win') and sys.getwindowsversion()[0] >= 6 and str(getattr(sys.stdout, 'device', sys.stdout).encoding).lower() in ('cp65001', 'charmap')):
            sickbeard.SYS_ENCODING = 'UTF-8'

        # TODO: Continue working on making this unnecessary, this hack creates all sorts of hellish problems
        if not hasattr(sys, 'setdefaultencoding'):
            reload_module(sys)

        try:
            # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
            sys.setdefaultencoding(sickbeard.SYS_ENCODING)  # pylint: disable=no-member
        except (AttributeError, LookupError):
            sys.exit('Sorry, you MUST add the SickChill folder to the PYTHONPATH environment variable\n'
                     'or find another way to force Python to use {} for string encoding.'.format(sickbeard.SYS_ENCODING))

        # Rename the main thread
        threading.currentThread().name = 'MAIN'

        args = SickChillArgumentParser(sickbeard.PROG_DIR).parse_args()

        if args.force_update:
            result = self.force_update()
            sys.exit(int(not result))  # Ok -> 0 , Error -> 1

        # Need console logging for SickBeard.py and SickBeard-console.exe
        sickbeard.NO_RESIZE = args.noresize
        self.console_logging = (not hasattr(sys, 'frozen')) or (sickbeard.MY_NAME.lower().find('-console') > 0) and not args.quiet
        self.no_launch = args.nolaunch
        self.forced_port = args.port
        if args.daemon:
            self.run_as_daemon = platform.system() != 'Windows'
            self.console_logging = False
            self.no_launch = True

        self.create_pid = bool(args.pidfile)
        self.pid_file = args.pidfile
        if self.pid_file and ek(os.path.exists, self.pid_file):
            # If the pid file already exists, SickChill may still be running, so exit
            raise SystemExit('PID file: {0} already exists. Exiting.'.format(self.pid_file))

        sickbeard.DATA_DIR = ek(os.path.abspath, args.datadir) if args.datadir else sickbeard.DATA_DIR
        sickbeard.CONFIG_FILE = ek(os.path.abspath, args.config) if args.config else ek(os.path.join, sickbeard.DATA_DIR, 'config.ini')

        # The pid file is only useful in daemon mode, make sure we can write the file properly
        if self.create_pid:
            if self.run_as_daemon:
                pid_dir = ek(os.path.dirname, self.pid_file)
                if not ek(os.access, pid_dir, os.F_OK):
                    sys.exit('PID dir: {0} doesn\'t exist. Exiting.'.format(pid_dir))
                if not ek(os.access, pid_dir, os.W_OK):
                    raise SystemExit('PID dir: {0} must be writable (write permissions). Exiting.'.format(pid_dir))
            else:
                if self.console_logging:
                    sys.stdout.write('Not running in daemon mode. PID file creation disabled.\n')
                self.create_pid = False

        # Make sure that we can create the data dir
        if not ek(os.access, sickbeard.DATA_DIR, os.F_OK):
            try:
                ek(os.makedirs, sickbeard.DATA_DIR, 0o744)
            except os.error:
                raise SystemExit('Unable to create data directory: {0}'.format(sickbeard.DATA_DIR))

        # Make sure we can write to the data dir
        if not ek(os.access, sickbeard.DATA_DIR, os.W_OK):
            raise SystemExit('Data directory must be writeable: {0}'.format(sickbeard.DATA_DIR))

        # Make sure we can write to the config file
        if not ek(os.access, sickbeard.CONFIG_FILE, os.W_OK):
            if ek(os.path.isfile, sickbeard.CONFIG_FILE):
                raise SystemExit('Config file must be writeable: {0}'.format(sickbeard.CONFIG_FILE))
            elif not ek(os.access, ek(os.path.dirname, sickbeard.CONFIG_FILE), os.W_OK):
                raise SystemExit('Config file root dir must be writeable: {0}'.format(ek(os.path.dirname, sickbeard.CONFIG_FILE)))

        ek(os.chdir, sickbeard.DATA_DIR)

        # Check if we need to perform a restore first
        restore_dir = ek(os.path.join, sickbeard.DATA_DIR, 'restore')
        if ek(os.path.exists, restore_dir):
            success = self.restore_db(restore_dir, sickbeard.DATA_DIR)
            if self.console_logging:
                sys.stdout.write('Restore: restoring DB and config.ini {0}!\n'.format(('FAILED', 'SUCCESSFUL')[success]))

        # Load the config and publish it to the sickbeard package
        if self.console_logging and not ek(os.path.isfile, sickbeard.CONFIG_FILE):
            sys.stdout.write('Unable to find {0}, all settings will be default!\n'.format(sickbeard.CONFIG_FILE))

        sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE, encoding='UTF-8')

        # Initialize the config and our threads
        sickbeard.initialize(consoleLogging=self.console_logging)

        if self.run_as_daemon:
            self.daemonize()

        # Get PID
        sickbeard.PID = os.getpid()

        # Build from the DB to start with
        self.load_shows_from_db()

        logger.log('Starting SickChill [{branch}] using \'{config}\''.format
                   (branch=sickbeard.BRANCH, config=sickbeard.CONFIG_FILE))

        self.clear_cache()

        if self.forced_port:
            logger.log('Forcing web server to port {port}'.format(port=self.forced_port))
            self.start_port = self.forced_port
        else:
            self.start_port = sickbeard.WEB_PORT

        if sickbeard.WEB_LOG:
            self.log_dir = sickbeard.LOG_DIR
        else:
            self.log_dir = None

        # sickbeard.WEB_HOST is available as a configuration value in various
        # places but is not configurable. It is supported here for historic reasons.
        if sickbeard.WEB_HOST and sickbeard.WEB_HOST != '0.0.0.0':
            self.web_host = sickbeard.WEB_HOST
        else:
            self.web_host = '' if sickbeard.WEB_IPV6 else '0.0.0.0'

        # web server options
        self.web_options = {
            'port': int(self.start_port),
            'host': self.web_host,
            'data_root': ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME),
            'web_root': sickbeard.WEB_ROOT,
            'log_dir': self.log_dir,
            'username': sickbeard.WEB_USERNAME,
            'password': sickbeard.WEB_PASSWORD,
            'enable_https': sickbeard.ENABLE_HTTPS,
            'handle_reverse_proxy': sickbeard.HANDLE_REVERSE_PROXY,
            'https_cert': ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_CERT),
            'https_key': ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_KEY),
        }

        # start web server
        self.web_server = SRWebServer(self.web_options)
        self.web_server.start()

        # Fire up all our threads
        sickbeard.start()

        # Build internal name cache
        name_cache.buildNameCache()

        # Pre-populate network timezones, it isn't thread safe
        network_timezones.update_network_dict()

        # sure, why not?
        if sickbeard.USE_FAILED_DOWNLOADS:
            failed_history.trimHistory()

        # Check for metadata indexer updates for shows (sets the next aired ep!)
        # sickbeard.showUpdateScheduler.forceRun()

        # Launch browser
        if sickbeard.LAUNCH_BROWSER and not (self.no_launch or self.run_as_daemon):
            sickbeard.launchBrowser('https' if sickbeard.ENABLE_HTTPS else 'http', self.start_port, sickbeard.WEB_ROOT)

        # main loop
        while True:
            time.sleep(1)
예제 #59
0
    def run(self):

        super(QueueItemAdd, self).run()

        if self.showDir:
            try:
                assert isinstance(self.showDir, six.text_type)
            except AssertionError:
                logger.log(traceback.format_exc(), logger.WARNING)
                self._finish_early()
                return

        logger.log('Starting to add show {0}'.format(
            'by ShowDir: {0}'.format(self.showDir) if self.
            showDir else 'by Indexer Id: {0}'.format(self.indexer_id)))
        # make sure the Indexer IDs are valid
        try:
            s = sickchill.indexer.series_by_id(indexerid=self.indexer_id,
                                               indexer=self.indexer,
                                               language=self.lang)
            if not s:
                error_string = 'Could not find show with id:{0} on {1}, skipping'.format(
                    self.indexer_id, sickchill.indexer.name(self.indexer))

                logger.log(error_string)
                ui.notifications.error('Unable to add show', error_string)

                self._finish_early()
                return

            # Let's try to create the show Dir if it's not provided. This way we force the show dir to build build using the
            # Indexers provided series name
            if self.root_dir and not self.showDir:
                if not s.seriesName:
                    logger.log(
                        'Unable to get a show {0}, can\'t add the show'.format(
                            self.showDir))
                    self._finish_early()
                    return

                self.showDir = ek(os.path.join, self.root_dir,
                                  sanitize_filename(s.seriesName))

                dir_exists = makeDir(self.showDir)
                if not dir_exists:
                    logger.log(
                        'Unable to create the folder {0}, can\'t add the show'.
                        format(self.showDir))
                    self._finish_early()
                    return

                chmodAsParent(self.showDir)

            # this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show
            if getattr(s, 'seriesName', None) is None:
                # noinspection PyPep8
                error_string = 'Show in {0} has no name on {1}, probably searched with the wrong language. Delete .nfo and add manually in the correct language.'.format(
                    self.showDir, sickchill.indexer.name(self.indexer))

                logger.log(error_string, logger.WARNING)
                ui.notifications.error('Unable to add show', error_string)

                self._finish_early()
                return
        except Exception as error:
            error_string = 'Unable to look up the show in {0} on {1} using ID {2}, not using the NFO. Delete .nfo and try adding manually again.'.format(
                self.showDir, sickchill.indexer.name(self.indexer),
                self.indexer_id)

            logger.log('{0}: {1}'.format(error_string, error), logger.ERROR)
            ui.notifications.error('Unable to add show', error_string)

            if sickbeard.USE_TRAKT:
                trakt_api = TraktAPI(sickbeard.SSL_VERIFY,
                                     sickbeard.TRAKT_TIMEOUT)

                title = self.showDir.split('/')[-1]
                data = {
                    'shows': [{
                        'title': title,
                        'ids': {
                            sickchill.indexer.slug(self.indexer):
                            self.indexer_id
                        }
                    }]
                }
                trakt_api.traktRequest('sync/watchlist/remove',
                                       data,
                                       method='POST')

            self._finish_early()
            return

        try:
            try:
                newShow = TVShow(self.indexer, self.indexer_id, self.lang)
            except MultipleShowObjectsException as error:
                # If we have the show in our list, but the location is wrong, lets fix it and refresh!
                existing_show = Show.find(sickbeard.showList, self.indexer_id)
                # noinspection PyProtectedMember
                if existing_show and not ek(os.path.isdir,
                                            existing_show._location):
                    newShow = existing_show
                else:
                    raise error

            newShow.loadFromIndexer()

            self.show = newShow

            # set up initial values
            self.show.location = self.showDir
            self.show.subtitles = self.subtitles if self.subtitles is not None else sickbeard.SUBTITLES_DEFAULT
            self.show.subtitles_sr_metadata = self.subtitles_sr_metadata
            self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT
            self.show.season_folders = self.season_folders if self.season_folders is not None else sickbeard.SEASON_FOLDERS_DEFAULT
            self.show.anime = self.anime if self.anime is not None else sickbeard.ANIME_DEFAULT
            self.show.scene = self.scene if self.scene is not None else sickbeard.SCENE_DEFAULT
            self.show.paused = self.paused if self.paused is not None else False

            # set up default new/missing episode status
            logger.log(
                'Setting all episodes to the specified default status: {0}'.
                format(self.show.default_ep_status))
            self.show.default_ep_status = self.default_status

            if self.show.anime:
                self.show.release_groups = BlackAndWhiteList(
                    self.show.indexerid)
                if self.blacklist:
                    self.show.release_groups.set_black_keywords(self.blacklist)
                if self.whitelist:
                    self.show.release_groups.set_white_keywords(self.whitelist)

            # # be smart-ish about this
            # if self.show.genre and 'talk show' in self.show.genre.lower():
            #     self.show.air_by_date = 1
            # if self.show.genre and 'documentary' in self.show.genre.lower():
            #     self.show.air_by_date = 0
            # if self.show.classification and 'sports' in self.show.classification.lower():
            #     self.show.sports = 1

        except Exception as error:
            error_string = 'Unable to add {0} due to an error with {1}'.format(
                self.show.name if self.show else 'show',
                sickchill.indexer.name(self.indexer))

            logger.log('{0}: {1}'.format(error_string, error), logger.ERROR)

            logger.log('Error trying to add show: {0}'.format(error),
                       logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)

            ui.notifications.error('Unable to add show', error_string)

            self._finish_early()
            return

        except MultipleShowObjectsException:
            error_string = 'The show in {0} is already in your show list, skipping'.format(
                self.showDir)
            logger.log(error_string, logger.WARNING)
            ui.notifications.error('Show skipped', error_string)

            self._finish_early()
            return

        logger.log('Retrieving show info from IMDb', logger.DEBUG)
        try:
            self.show.loadIMDbInfo()
        except imdb_exceptions.IMDbError as error:
            logger.log(' Something wrong on IMDb api: {0}'.format(error),
                       logger.WARNING)
        except Exception as error:
            logger.log('Error loading IMDb info: {0}'.format(error),
                       logger.ERROR)

        try:
            self.show.saveToDB()
        except Exception as error:
            logger.log(
                'Error saving the show to the database: {0}'.format(error),
                logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)
            self._finish_early()
            raise

        # add it to the show list
        if not Show.find(sickbeard.showList, self.indexer_id):
            sickbeard.showList.append(self.show)

        try:
            self.show.loadEpisodesFromIndexer()
        except Exception as error:
            logger.log(
                'Error with {0}, not creating episode list: {1}'.format(
                    self.show.idxr.name, error), logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)

        # update internal name cache
        name_cache.buildNameCache(self.show)

        try:
            self.show.loadEpisodesFromDir()
        except Exception as error:
            logger.log('Error searching dir for episodes: {0}'.format(error),
                       logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)

        # if they set default ep status to WANTED then run the backlog to search for episodes
        # FIXME: This needs to be a backlog queue item!!!
        if self.show.default_ep_status == WANTED:
            logger.log(
                'Launching backlog for this show since its episodes are WANTED'
            )
            sickbeard.backlogSearchScheduler.action.searchBacklog([self.show])

        self.show.writeMetadata()
        self.show.updateMetadata()
        self.show.populateCache()

        self.show.flushEpisodes()

        if sickbeard.USE_TRAKT:
            # if there are specific episodes that need to be added by trakt
            sickbeard.traktCheckerScheduler.action.manageNewShow(self.show)
            # add show to trakt.tv library
            if sickbeard.TRAKT_SYNC:
                sickbeard.traktCheckerScheduler.action.addShowToTraktLibrary(
                    self.show)

            if sickbeard.TRAKT_SYNC_WATCHLIST:
                logger.log('update watchlist')
                notifiers.trakt_notifier.update_watchlist(show_obj=self.show)

        # Load XEM data to DB for show
        scene_numbering.xem_refresh(self.show.indexerid,
                                    self.show.indexer,
                                    force=True)

        # check if show has XEM mapping so we can determine if searches should go by scene numbering or indexer numbering.
        if not self.scene and scene_numbering.get_xem_numbering_for_show(
                self.show.indexerid, self.show.indexer):
            self.show.scene = 1

        # After initial add, set to default_status_after.
        self.show.default_ep_status = self.default_status_after

        super(QueueItemAdd, self).finish()
        self.finish()
예제 #60
0
def log_data(min_level, log_filter, log_search, max_lines):
    regex = r"^(\d\d\d\d)\-(\d\d)\-(\d\d)\s*(\d\d)\:(\d\d):(\d\d)\s*([A-Z]+)\s*(.+?)\s*\:\:\s*(.*)$"
    if log_filter not in LOG_FILTERS:
        log_filter = '<NONE>'

    final_data = []

    log_files = []
    if ek(os.path.isfile, Wrapper.instance.log_file):
        log_files.append(Wrapper.instance.log_file)

        for i in range(1, int(sickbeard.LOG_NR)):
            name = Wrapper.instance.log_file + "." + str(i)
            if not ek(os.path.isfile, name):
                break
            log_files.append(name)
    else:
        return final_data

    data = []
    for _log_file in log_files:
        if len(data) < max_lines:
            with io.open(_log_file, 'r', encoding='utf-8') as f:
                data += [
                    line.strip() + '\n' for line in reversed(f.readlines())
                    if line.strip()
                ]
        else:
            break

    found_lines = 0
    for x in data:
        match = re.match(regex, x)

        if match:
            level = match.group(7)
            log_name = match.group(8)

            if not sickbeard.DEBUG and level == 'DEBUG':
                continue

            if not sickbeard.DBDEBUG and level == 'DB':
                continue

            if level not in LOGGING_LEVELS:
                final_data.append('AA ' + x)
                found_lines += 1
            elif log_search and log_search.lower() in x.lower():
                final_data.append(x)
                found_lines += 1
            elif not log_search and LOGGING_LEVELS[level] >= int(
                    min_level) and (log_filter == '<NONE>'
                                    or log_name.startswith(log_filter)):
                final_data.append(x)
                found_lines += 1
        else:
            final_data.append('AA ' + x)
            found_lines += 1

        if found_lines >= max_lines:
            break

    return final_data