Пример #1
0
def _remove_old_zoneinfo():
    """
    Removes zoneinfo tar.gz file from repository, as we do not need it
    """
    if zoneinfo.ZONEINFOFILE is not None:
        cur_zoneinfo = ek(basename, zoneinfo.ZONEINFOFILE)
    else:
        return

    cur_file = helpers.real_path(
        ek(join, ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
    for (path, dirs, files) in ek(
            os.walk, helpers.real_path(ek(os.path.dirname,
                                          zoneinfo.__file__))):
        for filename in files:
            if filename.endswith('.tar.gz'):
                file_w_path = ek(join, path, filename)
                if file_w_path != cur_file and ek(isfile, file_w_path):
                    try:
                        ek(os.remove, file_w_path)
                        logger.log(u'Delete unneeded old zoneinfo File: %s' %
                                   file_w_path)
                    except:
                        logger.log(u'Unable to delete: %s' % file_w_path,
                                   logger.WARNING)
Пример #2
0
def _remove_old_zoneinfo():
    if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
        cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
    else:
        return

    cur_file = helpers.real_path(
        ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__),
              cur_zoneinfo))

    for (path, dirs, files) in ek.ek(
            os.walk,
            helpers.real_path(
                ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__))):
        for filename in files:
            if filename.endswith('.tar.gz'):
                file_w_path = ek.ek(join, path, filename)
                if file_w_path != cur_file and ek.ek(isfile, file_w_path):
                    try:
                        ek.ek(os.remove, file_w_path)
                        logger.log(u'Delete unneeded old zoneinfo File: %s' %
                                   file_w_path)
                    except:
                        logger.log(u'Unable to delete: %s' % file_w_path,
                                   logger.ERROR)
Пример #3
0
 def test_real_path(self):
     """
     Test real_path
     """
     self.assertEqual(
         helpers.real_path('/usr/SickChill/../root/real/path/'),
         helpers.real_path('/usr/root/real/path/'))
Пример #4
0
    def _delete_folder(self, folder, check_empty=True):

        # check if it's a folder
        if not ek.ek(os.path.isdir, folder):
            return False

        # make sure it isn't TV_DOWNLOAD_DIR
        if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(
                sickbeard.TV_DOWNLOAD_DIR) == helpers.real_path(folder):
            return False

        # check if it's empty folder when wanted checked
        if check_empty and ek.ek(os.listdir, folder):
            return False

        # try deleting folder
        try:
            shutil.rmtree(folder)
        except (OSError, IOError) as e:
            logger.log(
                u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)),
                logger.WARNING)
            return False

        if ek.ek(os.path.isdir, folder):
            logger.log(u'Warning: unable to delete folder: %s' % folder,
                       logger.WARNING)
            return False

        self._log_helper(u'Deleted folder ' + folder, logger.MESSAGE)
        return True
Пример #5
0
def delete_folder(folder, check_empty=True):
    """
    Removes a folder from the filesystem

    :param folder: Path to folder to remove
    :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True
    :return: True on success, False on failure
    """

    # check if it's a folder
    if not ek(os.path.isdir, folder):
        return False

    # check if it isn't TV_DOWNLOAD_DIR
    if sickbeard.TV_DOWNLOAD_DIR:
        if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
            return False

    # check if it's empty folder when wanted checked
    if check_empty:
        check_files = ek(os.listdir, folder)
        if check_files:
            logger.log(u"Not deleting folder " + folder + " found the following files: " + str(check_files), logger.INFO)
            return False

        try:
            logger.log(u"Deleting folder (if it's empty): " + folder)
            os.rmdir(folder)
        except (OSError, IOError), e:
            logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
            return False
Пример #6
0
def delete_folder(folder, check_empty=True):
    """
    Removes a folder from the filesystem

    :param folder: Path to folder to remove
    :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True
    :return: True on success, False on failure
    """

    # check if it's a folder
    if not ek(os.path.isdir, folder):
        return False

    # check if it isn't TV_DOWNLOAD_DIR
    if sickbeard.TV_DOWNLOAD_DIR:
        if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
            return False

    # check if it's empty folder when wanted checked
    if check_empty:
        check_files = ek(os.listdir, folder)
        if check_files:
            logger.log(u"Not deleting folder " + folder + " found the following files: " + str(check_files), logger.INFO)
            return False

        try:
            logger.log(u"Deleting folder (if it's empty): " + folder)
            os.rmdir(folder)
        except (OSError, IOError), e:
            logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
            return False
Пример #7
0
def delete_folder(folder, check_empty=True):

    # check if it's a folder
    if not ek.ek(os.path.isdir, folder):
        return False

    # check if it isn't TV_DOWNLOAD_DIR
    if sickbeard.TV_DOWNLOAD_DIR:
        if helpers.real_path(folder) == helpers.real_path(
                sickbeard.TV_DOWNLOAD_DIR):
            return False

    # check if it's empty folder when wanted checked
    if check_empty:
        check_files = ek.ek(os.listdir, folder)
        if check_files:
            return False

    # try deleting folder
    try:
        logger.log(u"Deleting folder: " + folder)
        shutil.rmtree(folder)
    except (OSError, IOError), e:
        logger.log(
            u"Warning: unable to delete folder: " + folder + ": " + ex(e),
            logger.WARNING)
        return False
Пример #8
0
    def _delete_folder(self, folder, check_empty=True):

        # check if it's a folder
        if not ek.ek(os.path.isdir, folder):
            return False

        # make sure it isn't TV_DOWNLOAD_DIR
        if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(sickbeard.TV_DOWNLOAD_DIR) == helpers.real_path(folder):
            return False

        # check if it's empty folder when wanted checked
        if check_empty and ek.ek(os.listdir, folder):
            return False

        # try deleting folder
        try:
            shutil.rmtree(folder)
        except (OSError, IOError) as e:
            logger.log(u'Warning: unable to delete folder: %s: %s' % (folder, ex(e)), logger.WARNING)
            return False

        if ek.ek(os.path.isdir, folder):
            logger.log(u'Warning: unable to delete folder: %s' % folder, logger.WARNING)
            return False

        self._log_helper(u'Deleted folder ' + folder, logger.MESSAGE)
        return True
Пример #9
0
def _remove_old_zoneinfo():
    if zoneinfo.ZONEINFOFILE is not None:
        cur_zoneinfo = ek.ek(basename, zoneinfo.ZONEINFOFILE)
    else:
        return

    cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
    for (path, dirs, files) in ek.ek(os.walk, helpers.real_path(ek.ek(os.path.dirname, zoneinfo.__file__))):
        for filename in files:
            if filename.endswith(".tar.gz"):
                file_w_path = ek.ek(join, path, filename)
                if file_w_path != cur_file and ek.ek(isfile, file_w_path):
                    try:
                        ek.ek(os.remove, file_w_path)
                        logger.log(u"Delete unneeded old zoneinfo File: %s" % file_w_path)
                    except:
                        logger.log(u"Unable to delete: %s" % file_w_path, logger.WARNING)
Пример #10
0
def _remove_old_zoneinfo():
    zonefilename = zoneinfo._ZONEFILENAME
    if None is zonefilename:
        return
    cur_zoneinfo = ek.ek(basename, zonefilename)

    cur_file = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))

    for (path, dirs, files) in ek.ek(os.walk,
                                     helpers.real_path(ek.ek(os.path.dirname, zoneinfo.__file__))):
        for filename in files:
            if filename.endswith('.tar.gz'):
                file_w_path = ek.ek(join, path, filename)
                if file_w_path != cur_file and ek.ek(isfile, file_w_path):
                    try:
                        ek.ek(os.remove, file_w_path)
                        logger.log(u'Delete unneeded old zoneinfo File: %s' % file_w_path)
                    except:
                        logger.log(u'Unable to delete: %s' % file_w_path, logger.ERROR)
Пример #11
0
def _remove_old_zoneinfo():
    zonefilename = zoneinfo.ZONEFILENAME
    if None is zonefilename:
        return
    cur_zoneinfo = ek.ek(basename, zonefilename)

    cur_file = helpers.real_path(ek.ek(join, sickbeard.ZONEINFO_DIR, cur_zoneinfo))

    for (path, dirs, files) in chain.from_iterable(ek.ek(os.walk,
                                     helpers.real_path(di)) for di in (sickbeard.ZONEINFO_DIR, ek.ek(os.path.dirname, zoneinfo.__file__))):
        for filename in files:
            if filename.endswith('.tar.gz'):
                file_w_path = ek.ek(join, path, filename)
                if file_w_path != cur_file and ek.ek(isfile, file_w_path):
                    try:
                        ek.ek(os.remove, file_w_path)
                        logger.log(u'Delete unneeded old zoneinfo File: %s' % file_w_path)
                    except:
                        logger.log(u'Unable to delete: %s' % file_w_path, logger.ERROR)
Пример #12
0
 def remove_zoneinfo(cls):
     # delete all existing zoneinfo files
     for (path, dirs, files) in ek.ek(os.walk, helpers.real_path(sickbeard.ZONEINFO_DIR)):
         for filename in files:
             if filename.endswith('.tar.gz'):
                 file_w_path = ek.ek(os.path.join, path, filename)
                 try:
                     ek.ek(os.remove, file_w_path)
                 except (StandardError, Exception):
                     pass
Пример #13
0
def _remove_old_zoneinfo():
    """
    Removes zoneinfo tar.gz file from repository, as we do not need it
    """
    if zoneinfo.ZONEINFOFILE is not None:
        cur_zoneinfo = ek(basename, zoneinfo.ZONEINFOFILE)
    else:
        return

    cur_file = helpers.real_path(ek(join, ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
    for (path, dirs, files) in ek(os.walk, helpers.real_path(ek(os.path.dirname, zoneinfo.__file__))):
        for filename in files:
            if filename.endswith('.tar.gz'):
                file_w_path = ek(join, path, filename)
                if file_w_path != cur_file and ek(isfile, file_w_path):
                    try:
                        ek(os.remove, file_w_path)
                        logger.log(u'Delete unneeded old zoneinfo File: %s' % file_w_path)
                    except:
                        logger.log(u'Unable to delete: %s' % file_w_path, logger.WARNING)
def update_network_dict():
    _remove_old_zoneinfo()
    _update_zoneinfo()

    d = {}

    # network timezones are stored in a git submodule
    loc = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/network_timezones.txt'))

    with open(loc, 'r') as file:
        data = file.read()

    if data is None:
        logger.log(u'Updating network timezones failed', logger.ERROR)
        load_network_dict()
        return

    try:
        for line in data.splitlines():
            (key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
            if key is None or val is None:
                continue
            d[key] = val
    except (IOError, OSError):
        pass

    my_db = db.DBConnection('cache.db')

    # load current network timezones
    old_d = dict(my_db.select('SELECT * FROM network_timezones'))

    # list of sql commands to update the network_timezones table
    cl = []
    for cur_d, cur_t in d.iteritems():
        h_k = old_d.has_key(cur_d)
        if h_k and cur_t != old_d[cur_d]:
            # update old record
            cl.append(
                ['UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?', [cur_d, cur_t, cur_d]])
        elif not h_k:
            # add new record
            cl.append(['INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)', [cur_d, cur_t]])
        if h_k:
            del old_d[cur_d]

    # remove deleted records
    if len(old_d) > 0:
        old_items = list(va for va in old_d)
        cl.append(['DELETE FROM network_timezones WHERE network_name IN (%s)' % ','.join(['?'] * len(old_items)), old_items])

    # change all network timezone infos at once (much faster)
    if len(cl) > 0:
        my_db.mass_action(cl)
        load_network_dict()
Пример #15
0
 def remove_zoneinfo(cls):
     # delete all existing zoneinfo files
     for (path,
          dirs, files) in ek.ek(os.walk,
                                helpers.real_path(sickbeard.ZONEINFO_DIR)):
         for filename in files:
             if filename.endswith('.tar.gz'):
                 file_w_path = ek.ek(os.path.join, path, filename)
                 try:
                     ek.ek(os.remove, file_w_path)
                 except (StandardError, Exception):
                     pass
Пример #16
0
def delete_folder(folder, check_empty=True):
    """
    Removes a folder from the filesystem

    :param folder: Path to folder to remove
    :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True
    :return: True on success, False on failure
    """

    # check if it's a folder
    if not ek(os.path.isdir, folder):
        return False

    # check if it isn't TV_DOWNLOAD_DIR
    if sickbeard.TV_DOWNLOAD_DIR:
        if helpers.real_path(folder) == helpers.real_path(
                sickbeard.TV_DOWNLOAD_DIR):
            return False

    # check if it's empty folder when wanted checked
    try:
        if check_empty:
            check_files = ek(os.listdir, folder)
            if check_files:
                logging.info(
                    "Not deleting folder {} found the following files: {}".
                    format(folder, check_files))
                return False

            logging.info("Deleting folder (if it's empty): " + folder)
            ek(os.rmdir, folder)
        else:
            logging.info("Deleting folder: " + folder)
            ek(removetree, folder)
    except (OSError, IOError) as e:
        logging.warning("Warning: unable to delete folder: {}: {}".format(
            folder, ex(e)))
        return False

    return True
Пример #17
0
def delete_folder(folder, check_empty=True):
    """
    Removes a folder from the filesystem

    :param folder: Path to folder to remove
    :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True
    :return: True on success, False on failure
    """

    # check if it's a folder
    if not ek(os.path.isdir, folder):
        return False

    # check if it isn't TV_DOWNLOAD_DIR
    if sickbeard.TV_DOWNLOAD_DIR:
        if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
            return False

    # check if it's empty folder when wanted checked
    try:
        if check_empty:
            check_files = ek(os.listdir, folder)
            if check_files:
                logging.info("Not deleting folder {} found the following files: {}".format(folder, check_files))
                return False

            logging.info("Deleting folder (if it's empty): " + folder)
            ek(os.rmdir, folder)
        else:
            logging.info("Deleting folder: " + folder)
            ek(removetree, folder)
    except (OSError, IOError) as e:
        logging.warning("Warning: unable to delete folder: {}: {}".format(folder, ex(e)))
        return False

    return True
Пример #18
0
def delete_folder(folder, check_empty=True):

    # check if it's a folder
    if not ek.ek(os.path.isdir, folder):
        return False

    # check if it isn't TV_DOWNLOAD_DIR
    if sickbeard.TV_DOWNLOAD_DIR:
        if helpers.real_path(folder) == helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
            return False

    # check if it's empty folder when wanted checked
    if check_empty:
        check_files = ek.ek(os.listdir, folder)
        if check_files:
            logger.log(u"Not deleting folder " + folder + " found the following files: " + str(check_files), logger.INFO)
            return False
        
        try:
            logger.log(u"Deleting folder (if it's empty): " + folder)
            os.rmdir(folder)
        except (OSError, IOError), e:
            logger.log(u"Warning: unable to delete folder: " + folder + ": " + ex(e), logger.WARNING)
            return False
Пример #19
0
    def find_parent(path):
        """
        Test path is inside a parent folder

        :param path: Path to check
        :type path: String
        :return: Parent root dir that matches path, or None
        :rtype: String, None
        """
        build_path = (lambda old_path: '%s%s' % (helpers.real_path(old_path).rstrip(os.path.sep), os.path.sep))

        process_path = build_path(path)
        for parent in map(lambda p: build_path(p), sickbeard.ROOT_DIRS.split('|')[1:]):
            if process_path.startswith(parent):
                return parent.rstrip(os.path.sep)
Пример #20
0
def update_network_dict():
    _remove_old_zoneinfo()
    _update_zoneinfo()

    loc = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/network_timezones.txt'))

    with open(loc, 'r') as file:
        data = file.read()

    if data is None:
        logger.log(u'Updating network timezones failed', logger.ERROR)
        load_network_dict()
        return

    d = {}
    try:
        for line in data.splitlines():
            (key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
            if key is None or val is None:
                continue
            d[key] = val
    except (IOError, OSError):
        pass

    my_db = db.DBConnection('cache.db')

    network_list = dict(my_db.select('SELECT * FROM network_timezones;'))

    queries = []
    for network, timezone in d.iteritems():
        existing = network_list.has_key(network)
        if not existing:
            queries.append(['INSERT OR IGNORE INTO network_timezones VALUES (?,?);', [network, timezone]])
        elif network_list[network] is not timezone:
            queries.append(['UPDATE OR IGNORE network_timezones SET timezone = ? WHERE network_name = ?;', [timezone, network]])

        if existing:
            del network_list[network]

    if network_list:
        purged = list(x for x in network_list)
        queries.append(['DELETE FROM network_timezones WHERE network_name IN (%s);' % ','.join(['?'] * len(purged)), purged])

    if queries:
        my_db.mass_action(queries)
        load_network_dict()
Пример #21
0
def retrieve_exceptions():
    """
    Looks up the exceptions on github, parses them into a dict, and inserts them into the
    scene_exceptions table in cache.db. Also clears the scene name cache.
    """
    global exception_dict, anidb_exception_dict, xem_exception_dict

    # exceptions are stored in submodules in this repo, sourced from the github repos
    # TODO: `git submodule update`
    for indexer in sickbeard.indexerApi().indexers:
        if shouldRefresh(sickbeard.indexerApi(indexer).name):
            logger.log(u"Checking for scene exception updates for " +
                       sickbeard.indexerApi(indexer).name + "")

            loc = sickbeard.indexerApi(indexer).config['scene_loc']
            if loc.startswith("http"):
                data = helpers.getURL(loc)
            else:
                loc = helpers.real_path(
                    ek.ek(os.path.join, ek.ek(os.path.dirname, __file__), loc))
                with open(loc, 'r') as file:
                    data = file.read()

            if data is None:
                # When data is None, trouble connecting to github, or reading file failed
                logger.log(
                    u"Check scene exceptions update failed. Unable to update from: "
                    + loc, logger.WARNING)
                continue

            setLastRefresh(sickbeard.indexerApi(indexer).name)

            # each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
            for cur_line in data.splitlines():
                indexer_id, sep, aliases = cur_line.partition(
                    ':')  # @UnusedVariable

                if not aliases:
                    continue

                indexer_id = int(indexer_id)

                # regex out the list of shows, taking \' into account
                # alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
                alias_list = [{
                    re.sub(r'\\(.)', r'\1', x): -1
                } for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
                exception_dict[indexer_id] = alias_list
                del alias_list

            # cleanup
            del data

    # XEM scene exceptions
    _xem_exceptions_fetcher()
    for xem_ex in xem_exception_dict:
        if xem_ex in exception_dict:
            exception_dict[
                xem_ex] = exception_dict[xem_ex] + xem_exception_dict[xem_ex]
        else:
            exception_dict[xem_ex] = xem_exception_dict[xem_ex]

    # AniDB scene exceptions
    _anidb_exceptions_fetcher()
    for anidb_ex in anidb_exception_dict:
        if anidb_ex in exception_dict:
            exception_dict[anidb_ex] = exception_dict[
                anidb_ex] + anidb_exception_dict[anidb_ex]
        else:
            exception_dict[anidb_ex] = anidb_exception_dict[anidb_ex]

    changed_exceptions = False

    # write all the exceptions we got off the net into the database
    myDB = db.DBConnection('cache.db')
    for cur_indexer_id in exception_dict:

        # get a list of the existing exceptions for this ID
        existing_exceptions = [
            x["show_name"] for x in myDB.select(
                "SELECT * FROM scene_exceptions WHERE indexer_id = ?",
                [cur_indexer_id])
        ]

        if not cur_indexer_id in exception_dict:
            continue

        for cur_exception_dict in exception_dict[cur_indexer_id]:
            cur_exception, curSeason = cur_exception_dict.items()[0]

            # if this exception isn't already in the DB then add it
            if cur_exception not in existing_exceptions:
                myDB.action(
                    "INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
                    [cur_indexer_id, cur_exception, curSeason])
                changed_exceptions = True

    # since this could invalidate the results of the cache we clear it out after updating
    if changed_exceptions:
        logger.log(u"Updated scene exceptions", logger.DEBUG)
    else:
        logger.log(u"No scene exceptions update needed", logger.DEBUG)

    # cleanup
    exception_dict.clear()
    anidb_exception_dict.clear()
    xem_exception_dict.clear()
Пример #22
0
 def test_root_folder(self):
     for root_dirs, path, expected in root_folder_tests:
         sickbeard.ROOT_DIRS = root_dirs
         self.assertEqual(expected and real_path(expected) or expected, ProcessTVShow.find_parent(path))
Пример #23
0
def _update_zoneinfo():
    global sb_timezone
    sb_timezone = tz.tzlocal()

    # now check if the zoneinfo needs update
    url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt'

    url_data = helpers.getURL(url_zv)
    if url_data is None:
        # When urlData is None, trouble connecting to github
        logger.log(u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv,
                   logger.WARNING)
        return

    zonefilename = zoneinfo._ZONEFILENAME
    cur_zoneinfo = zonefilename
    if None is not cur_zoneinfo:
        cur_zoneinfo = ek.ek(basename, zonefilename)
    zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
    zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek(os.path.isfile, zonefile) else None
    (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
    newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo)
    if not newtz_regex or len(newtz_regex.groups()) != 1:
        return
    newtzversion = newtz_regex.group(1)

    if cur_zoneinfo is not None and zonemetadata is not None and 'tzversion' in zonemetadata and zonemetadata['tzversion'] == newtzversion:
        return

    # now load the new zoneinfo
    url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo

    zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)

    if ek.ek(os.path.exists, zonefile_tmp):
        try:
            ek.ek(os.remove, zonefile_tmp)
        except:
            logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR)
            return

    if not helpers.download_file(url_tar, zonefile_tmp):
        return

    if not ek.ek(os.path.exists, zonefile_tmp):
        logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR)
        return

    new_hash = str(helpers.md5_for_file(zonefile_tmp))

    if zoneinfo_md5.upper() == new_hash.upper():
        logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE)
        try:
            # remove the old zoneinfo file
            if cur_zoneinfo is not None:
                old_file = helpers.real_path(
                    ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
                if ek.ek(os.path.exists, old_file):
                    ek.ek(os.remove, old_file)
            # rename downloaded file
            ek.ek(os.rename, zonefile_tmp, zonefile)
            from dateutil.zoneinfo import gettz
            if '_CLASS_ZONE_INSTANCE' in gettz.func_globals:
                gettz.func_globals.__setitem__('_CLASS_ZONE_INSTANCE', list())

            sb_timezone = tz.tzlocal()
        except:
            _remove_zoneinfo_failed(zonefile_tmp)
            return
    else:
        _remove_zoneinfo_failed(zonefile_tmp)
        logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR)
        return
Пример #24
0
def retrieve_exceptions():
    """
    Looks up the exceptions on github, parses them into a dict, and inserts them into the
    scene_exceptions table in cache.db. Also clears the scene name cache.
    """
    global exception_dict, anidb_exception_dict, xem_exception_dict

    # exceptions are stored in submodules in this repo, sourced from the github repos
    # TODO: `git submodule update`
    for indexer in sickbeard.indexerApi().indexers:
        if shouldRefresh(sickbeard.indexerApi(indexer).name):
            logger.log(u"Checking for scene exception updates for " + sickbeard.indexerApi(indexer).name + "")

            loc = sickbeard.indexerApi(indexer).config['scene_loc']
            if loc.startswith("http"):
                data = helpers.getURL(loc, session=sickbeard.indexerApi(indexer).session)
            else:
                loc = helpers.real_path(ek.ek(os.path.join, ek.ek(os.path.dirname, __file__), loc))
                with open(loc, 'r') as file:
                    data = file.read()

            if data is None:
                # When data is None, trouble connecting to github, or reading file failed
                logger.log(u"Check scene exceptions update failed. Unable to update from: " + loc, logger.WARNING)
                continue

            setLastRefresh(sickbeard.indexerApi(indexer).name)

            # each exception is on one line with the format indexer_id: 'show name 1', 'show name 2', etc
            for cur_line in data.splitlines():
                indexer_id, sep, aliases = cur_line.partition(':')  # @UnusedVariable

                if not aliases:
                    continue

                indexer_id = int(indexer_id)

                # regex out the list of shows, taking \' into account
                # alias_list = [re.sub(r'\\(.)', r'\1', x) for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
                alias_list = [{re.sub(r'\\(.)', r'\1', x): -1} for x in re.findall(r"'(.*?)(?<!\\)',?", aliases)]
                exception_dict[indexer_id] = alias_list
                del alias_list

            # cleanup
            del data

    # XEM scene exceptions
    _xem_exceptions_fetcher()
    for xem_ex in xem_exception_dict:
        if xem_ex in exception_dict:
            exception_dict[xem_ex] = exception_dict[xem_ex] + xem_exception_dict[xem_ex]
        else:
            exception_dict[xem_ex] = xem_exception_dict[xem_ex]

    # AniDB scene exceptions
    _anidb_exceptions_fetcher()
    for anidb_ex in anidb_exception_dict:
        if anidb_ex in exception_dict:
            exception_dict[anidb_ex] = exception_dict[anidb_ex] + anidb_exception_dict[anidb_ex]
        else:
            exception_dict[anidb_ex] = anidb_exception_dict[anidb_ex]

    changed_exceptions = False

    # write all the exceptions we got off the net into the database
    myDB = db.DBConnection('cache.db')
    for cur_indexer_id in exception_dict:

        # get a list of the existing exceptions for this ID
        existing_exceptions = [x["show_name"] for x in
                               myDB.select("SELECT * FROM scene_exceptions WHERE indexer_id = ?", [cur_indexer_id])]

        if not cur_indexer_id in exception_dict:
            continue

        for cur_exception_dict in exception_dict[cur_indexer_id]:
            cur_exception, curSeason = cur_exception_dict.items()[0]

            # if this exception isn't already in the DB then add it
            if cur_exception not in existing_exceptions:
                myDB.action("INSERT INTO scene_exceptions (indexer_id, show_name, season) VALUES (?,?,?)",
                            [cur_indexer_id, cur_exception, curSeason])
                changed_exceptions = True

    # since this could invalidate the results of the cache we clear it out after updating
    if changed_exceptions:
        logger.log(u"Updated scene exceptions", logger.DEBUG)
    else:
        logger.log(u"No scene exceptions update needed", logger.DEBUG)

    # cleanup
    exception_dict.clear()
    anidb_exception_dict.clear()
    xem_exception_dict.clear()
Пример #25
0
def _update_zoneinfo():
    global sb_timezone
    sb_timezone = tz.tzlocal()

    # now check if the zoneinfo needs update
    url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt'

    url_data = helpers.getURL(url_zv)
    if url_data is None:
        # When urlData is None, trouble connecting to github
        logger.log(u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' % url_zv,
                   logger.WARNING)
        return

    if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
        cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
    else:
        cur_zoneinfo = None
    (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')

    if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo):
        return

    # now load the new zoneinfo
    url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo

    zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
    zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)

    if ek.ek(os.path.exists, zonefile_tmp):
        try:
            ek.ek(os.remove, zonefile_tmp)
        except:
            logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR)
            return

    if not helpers.download_file(url_tar, zonefile_tmp):
        return

    if not ek.ek(os.path.exists, zonefile_tmp):
        logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR)
        return

    new_hash = str(helpers.md5_for_file(zonefile_tmp))

    if zoneinfo_md5.upper() == new_hash.upper():
        logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.MESSAGE)
        try:
            # remove the old zoneinfo file
            if cur_zoneinfo is not None:
                old_file = helpers.real_path(
                    ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
                if ek.ek(os.path.exists, old_file):
                    ek.ek(os.remove, old_file)
            # rename downloaded file
            ek.ek(os.rename, zonefile_tmp, zonefile)
            # load the new zoneinfo
            reload(lib.dateutil.zoneinfo)
            sb_timezone = tz.tzlocal()
        except:
            _remove_zoneinfo_failed(zonefile_tmp)
            return
    else:
        _remove_zoneinfo_failed(zonefile_tmp)
        logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR)
        return
Пример #26
0
def _update_zoneinfo():

    global sb_timezone
    sb_timezone = tz.tzlocal()

    # now check if the zoneinfo needs update
    url_zv = 'https://github.com/Prinz23/sb_network_timezones/raw/master/zoneinfo.txt'

    url_data = helpers.getURL(url_zv)

    if url_data is None:
        # When urlData is None, trouble connecting to github
        logger.log(u"Loading zoneinfo.txt failed. Unable to get URL: " + url_zv, logger.ERROR)
        return

    if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
        cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
    else:
        cur_zoneinfo = None
    (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')

    if ((cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo)):
        return

    # now load the new zoneinfo
    url_tar = u'https://github.com/Prinz23/sb_network_timezones/raw/master/' + new_zoneinfo
    
    zonefile = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
    zonefile_tmp = re.sub(r"\.tar\.gz$",'.tmp', zonefile)

    if (ek.ek(os.path.exists,zonefile_tmp)):
        try:
            ek.ek(os.remove,zonefile_tmp)
        except:
            logger.log(u"Unable to delete: " + zonefile_tmp,logger.ERROR)
            return

    if not helpers.download_file(url_tar, zonefile_tmp):
        return

    if not ek.ek(os.path.exists,zonefile_tmp):
        logger.log(u"Download of " + zonefile_tmp + " failed.",logger.ERROR)
        return

    new_hash = str(helpers.md5_for_file(zonefile_tmp))

    if (zoneinfo_md5.upper() == new_hash.upper()):
        logger.log(u"Updating timezone info with new one: " + new_zoneinfo,logger.MESSAGE)
        try:
            # remove the old zoneinfo file
            if (cur_zoneinfo is not None):
                old_file = helpers.real_path(ek.ek(join,ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
                if (ek.ek(os.path.exists,old_file)):
                    ek.ek(os.remove,old_file)
            # rename downloaded file
            ek.ek(os.rename,zonefile_tmp,zonefile)
            # load the new zoneinfo
            reload(lib.dateutil.zoneinfo)
            sb_timezone = tz.tzlocal()
        except:
            _remove_zoneinfo_failed(zonefile_tmp)
            return
    else:
        _remove_zoneinfo_failed(zonefile_tmp)
        logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(),logger.ERROR)
        return
Пример #27
0
def _update_zoneinfo():
    global sb_timezone
    sb_timezone = tz.tzlocal()

    # now check if the zoneinfo needs update
    url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt'

    try:
        url_data = helpers.getURL(url_zv)
        if not url_data:
            raise

        if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
            cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
        else:
            cur_zoneinfo = None

        (new_zoneinfo,
         zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
    except:
        # When urlData is None, trouble connecting to github
        logger.log(
            u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s'
            % url_zv, logger.WARNING)
        return

    if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo):
        return

    # now load the new zoneinfo
    url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo

    zonefile = helpers.real_path(
        ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__),
              new_zoneinfo))
    zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)

    if ek.ek(os.path.exists, zonefile_tmp):
        try:
            ek.ek(os.remove, zonefile_tmp)
        except:
            logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR)
            return

    if not helpers.download_file(url_tar, zonefile_tmp):
        return

    if not ek.ek(os.path.exists, zonefile_tmp):
        logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR)
        return

    new_hash = str(helpers.md5_for_file(zonefile_tmp))

    if zoneinfo_md5.upper() == new_hash.upper():
        logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo,
                   logger.INFO)
        try:
            # remove the old zoneinfo file
            if cur_zoneinfo is not None:
                old_file = helpers.real_path(
                    ek.ek(
                        join,
                        ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__),
                        cur_zoneinfo))
                if ek.ek(os.path.exists, old_file):
                    ek.ek(os.remove, old_file)
            # rename downloaded file
            ek.ek(os.rename, zonefile_tmp, zonefile)
            # load the new zoneinfo
            reload(lib.dateutil.zoneinfo)
            sb_timezone = tz.tzlocal()
        except:
            _remove_zoneinfo_failed(zonefile_tmp)
            return
    else:
        _remove_zoneinfo_failed(zonefile_tmp)
        logger.log(
            u'MD5 hash does not match: %s File: %s' %
            (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR)
        return
Пример #28
0
 def test_real_path(self):
     """
     Test real_path
     """
     self.assertEqual(helpers.real_path('/usr/SickRage/../root/real/path/'), helpers.real_path('/usr/root/real/path/'))
Пример #29
0
def _update_zoneinfo():
    global sb_timezone
    sb_timezone = tz.tzlocal()

    # TODO `git subtree pull` commands on updates

    loc_zv = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/zoneinfo.txt'))

    # Read version file
    try:
        with open(loc_zv, 'r') as file:
            data = file.read()
        if not data:
            raise

        # Filename of existing zoneinfo
        if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
            cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
        else:
            cur_zoneinfo = None

        # Filename and hash of new zoneinfo
        (new_zoneinfo, zoneinfo_md5) = data.decode('utf-8').strip().rsplit(u' ')
    except Exception as e:
        logger.log(u'Crazy problem with zoneinfo: %s' % ex(e), logger.ERROR)
        return

    if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo):
        return

    # now load the new zoneinfo
    loc_tar = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, __file__), u'../lib/network_timezones/%s' % new_zoneinfo))

    zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
    zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)

    if ek.ek(os.path.exists, zonefile_tmp):
        try:
            ek.ek(os.remove, zonefile_tmp)
        except:
            logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR)
            return

    if not helpers.copyFile(loc_tar, zonefile_tmp):
        return

    if not ek.ek(os.path.exists, zonefile_tmp):
        logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR)
        return

    new_hash = str(helpers.md5_for_file(zonefile_tmp))

    if zoneinfo_md5.upper() == new_hash.upper():
        logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.INFO)
        try:
            # remove the old zoneinfo file
            if cur_zoneinfo is not None:
                old_file = helpers.real_path(
                    ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
                if ek.ek(os.path.exists, old_file):
                    ek.ek(os.remove, old_file)
            # rename downloaded file
            ek.ek(os.rename, zonefile_tmp, zonefile)
            # load the new zoneinfo
            reload(lib.dateutil.zoneinfo)
            sb_timezone = tz.tzlocal()
        except:
            _remove_zoneinfo_failed(zonefile_tmp)
            return
    else:
        _remove_zoneinfo_failed(zonefile_tmp)
        logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR)
        return
Пример #30
0
def _update_zoneinfo():
    """
    Request new zoneinfo directly from repository
    """
    global sb_timezone
    sb_timezone = tz.tzlocal()
    url_zv = 'http://sickragetv.github.io/network_timezones/zoneinfo.txt'
    try:
        url_data = helpers.getURL(url_zv, session=requests.Session())
        if not url_data:
            raise

        # Filename of existing zoneinfo
        if zoneinfo.ZONEINFOFILE is not None:
            cur_zoneinfo = ek(basename, zoneinfo.ZONEINFOFILE)
        else:
            cur_zoneinfo = None

        # Filename and hash of new zoneinfo
        (new_zoneinfo, zoneinfo_md5) = url_data.strip().rsplit(u' ')
    except Exception as e:
        logger.log(u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s' %
                url_zv, logger.WARNING)
        return

    if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo):
        return

    # now load the new zoneinfo
    url_tar = u'http://sickragetv.github.io/network_timezones/%s' % new_zoneinfo

    zonefile = helpers.real_path(ek(join, ek(os.path.dirname, zoneinfo.__file__), new_zoneinfo))
    zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)

    if ek(os.path.exists, zonefile_tmp):
        try:
            ekk(os.remove, zonefile_tmp)
        except:
            logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.WARNING)
            return

    if not helpers.download_file(url_tar, zonefile_tmp, session=requests.Session()):
        return

    if not ek(os.path.exists, zonefile_tmp):
        logger.log(u'Download of %s failed.' % zonefile_tmp, logger.WARNING)
        return

    new_hash = str(helpers.md5_for_file(zonefile_tmp))

    if zoneinfo_md5.upper() == new_hash.upper():
        logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo, logger.INFO)
        try:
            # remove the old zoneinfo file
            if cur_zoneinfo is not None:
                old_file = helpers.real_path(
                    ek(join, ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
                if ek(os.path.exists, old_file):
                    ek(os.remove, old_file)
            # rename downloaded file
            ek(os.rename, zonefile_tmp, zonefile)
            # load the new zoneinfo
            reload(zoneinfo)
            sb_timezone = tz.tzlocal()
        except:
            _remove_zoneinfo_failed(zonefile_tmp)
            return
    else:
        _remove_zoneinfo_failed(zonefile_tmp)
        logger.log(u'MD5 hash does not match: %s File: %s' % (zoneinfo_md5.upper(), new_hash.upper()), logger.WARNING)
        return
Пример #31
0
def validate_dir(process_path, release_name, failed, result):  # pylint: disable=too-many-locals,too-many-branches,too-many-return-statements
    """
    Check if directory is valid for processing

    :param process_path: Directory to check
    :param release_name: Original NZB/Torrent name
    :param failed: Previously failed objects
    :param result: Previous results
    :return: True if dir is valid for processing, False if not
    """

    result.output += log_helper("Processing folder " + process_path, logger.DEBUG)

    upper_name = ek(os.path.basename, process_path).upper()
    if upper_name.startswith('_FAILED_') or upper_name.endswith('_FAILED_'):
        result.output += log_helper("The directory name indicates it failed to extract.", logger.DEBUG)
        failed = True
    elif upper_name.startswith('_UNDERSIZED_') or upper_name.endswith('_UNDERSIZED_'):
        result.output += log_helper("The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG)
        failed = True
    elif upper_name.startswith('_UNPACK') or upper_name.endswith('_UNPACK'):
        result.output += log_helper("The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG)
        result.missed_files.append("{0} : Being unpacked".format(process_path))
        return False

    if failed:
        process_failed(process_path, release_name, result)
        result.missed_files.append("{0} : Failed download".format(process_path))
        return False

    if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(process_path) != helpers.real_path(sickbeard.TV_DOWNLOAD_DIR) and helpers.is_hidden_folder(process_path):
        result.output += log_helper("Ignoring hidden folder: {0}".format(process_path), logger.DEBUG)
        result.missed_files.append("{0} : Hidden folder".format(process_path))
        return False

    # make sure the dir isn't inside a show dir
    main_db_con = db.DBConnection()
    sql_results = main_db_con.select("SELECT location FROM tv_shows")

    for sqlShow in sql_results:
        if process_path.lower().startswith(ek(os.path.realpath, sqlShow[b"location"]).lower() + os.sep) or \
                process_path.lower() == ek(os.path.realpath, sqlShow[b"location"]).lower():

            result.output += log_helper(
                "Cannot process an episode that's already been moved to its show dir, skipping " + process_path,
                logger.WARNING)
            return False

    for current_directory, directory_names, file_names in ek(os.walk, process_path, topdown=False, followlinks=sickbeard.PROCESSOR_FOLLOW_SYMLINKS):
        sync_files = filter(is_sync_file, file_names)
        if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
            result.output += log_helper("Found temporary sync files: {0} in path: {1}".format(sync_files, ek(os.path.join, process_path, sync_files[0])))
            result.output += log_helper("Skipping post processing for folder: {0}".format(process_path))
            result.missed_files.append("{0} : Sync files found".format(ek(os.path.join, process_path, sync_files[0])))
            continue

        found_files = filter(helpers.is_media_file, file_names)
        if sickbeard.UNPACK == 1:
            found_files += filter(helpers.is_rar_file, file_names)

        if current_directory != sickbeard.TV_DOWNLOAD_DIR and found_files:
            found_files.append(ek(os.path.basename, current_directory))

        for found_file in found_files:
            try:
                NameParser().parse(found_file, cache_result=False)
            except (InvalidNameException, InvalidShowException) as e:
                pass
            else:
                return True

    result.output += log_helper("{0} : No processable items found in folder".format(process_path), logger.DEBUG)
    return False
Пример #32
0
def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, failed=False, type="automatic"):
    """
    Scans through the files in dirName and processes whatever media files it finds

    dirName: The folder name to look in
    nzbName: The NZB name which resulted in this folder being downloaded
    force: True to postprocess already postprocessed files
    failed: Boolean for whether or not the download failed
    type: Type of postprocessing automatic or manual
    """

    global process_result, returnStr

    returnStr = u""

    returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)

    returnStr += logHelper(u"TV_DOWNLOAD_DIR: " + sickbeard.TV_DOWNLOAD_DIR, logger.DEBUG)

    # if they passed us a real dir then assume it's the one we want
    if ek.ek(os.path.isdir, dirName):
        dirName = ek.ek(os.path.realpath, dirName)

    # if the client and Sickbeard are not on the same machine translate the Dir in a network dir
    elif sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR) \
    and helpers.real_path(dirName) != helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
        dirName = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR, ek.ek(os.path.abspath, dirName).split(os.path.sep)[-1])
        returnStr += logHelper(u"Trying to use folder " + dirName, logger.DEBUG)

    # if we didn't find a real dir then quit
    if not ek.ek(os.path.isdir, dirName):
        returnStr += logHelper(u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.", logger.DEBUG)
        return returnStr

    path, dirs, files = get_path_dir_files(dirName, nzbName, type)

    returnStr += logHelper(u"PostProcessing Path: " + path, logger.DEBUG)
    returnStr += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG)

    rarFiles = filter(helpers.isRarFile, files)
    rarContent = unRAR(path, rarFiles, force)
    files += rarContent
    videoFiles = filter(helpers.isMediaFile, files)
    videoInRar = filter(helpers.isMediaFile, rarContent)

    returnStr += logHelper(u"PostProcessing Files: " + str(files), logger.DEBUG)
    returnStr += logHelper(u"PostProcessing VideoFiles: " + str(videoFiles), logger.DEBUG)
    returnStr += logHelper(u"PostProcessing RarContent: " + str(rarContent), logger.DEBUG)
    returnStr += logHelper(u"PostProcessing VideoInRar: " + str(videoInRar), logger.DEBUG)

    # If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten).
    nzbNameOriginal = nzbName
    if len(videoFiles) >= 2:
        nzbName = None

    if not process_method:
        process_method = sickbeard.PROCESS_METHOD

    process_result = True

    #Don't Link media when the media is extracted from a rar in the same path
    if process_method in ('hardlink', 'symlink') and videoInRar:
        process_media(path, videoInRar, nzbName, 'move', force, is_priority)
        delete_files(path, rarContent)
        for video in set(videoFiles) - set(videoInRar):
            process_media(path, [video], nzbName, process_method, force, is_priority)
    else:
        for video in videoFiles:
            process_media(path, [video], nzbName, process_method, force, is_priority)

    #Process Video File in all TV Subdir
    for dir in [x for x in dirs if validateDir(path, x, nzbNameOriginal, failed)]:

        process_result = True

        for processPath, processDir, fileList in ek.ek(os.walk, ek.ek(os.path.join, path, dir), topdown=False):

            rarFiles = filter(helpers.isRarFile, fileList)
            rarContent = unRAR(processPath, rarFiles, force)
            fileList = set(fileList + rarContent)
            videoFiles = filter(helpers.isMediaFile, fileList)
            videoInRar = filter(helpers.isMediaFile, rarContent)
            notwantedFiles = [x for x in fileList if x not in videoFiles]

            #Don't Link media when the media is extracted from a rar in the same path
            if process_method in ('hardlink', 'symlink') and videoInRar:
                process_media(processPath, videoInRar, nzbName, 'move', force, is_priority)
                process_media(processPath, set(videoFiles) - set(videoInRar), nzbName, process_method, force, is_priority)
                delete_files(processPath, rarContent)
            else:
                process_media(processPath, videoFiles, nzbName, process_method, force, is_priority)

                #Avoid to delete files
                if process_method != "move" or not process_result \
                or type == "manual":
                    continue

                delete_files(processPath, notwantedFiles)

                if process_method == "move" \
                and helpers.real_path(processPath) != helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
                    delete_dir(processPath)

    return returnStr
Пример #33
0
def validate_dir(process_path, release_name, failed, result):  # pylint: disable=too-many-locals,too-many-branches,too-many-return-statements
    """
    Check if directory is valid for processing

    :param process_path: Directory to check
    :param release_name: Original NZB/Torrent name
    :param failed: Previously failed objects
    :param result: Previous results
    :return: True if dir is valid for processing, False if not
    """

    result.output += log_helper("Processing folder " + process_path, logger.DEBUG)

    upper_name = ek(os.path.basename, process_path).upper()
    if upper_name.startswith('_FAILED_') or upper_name.endswith('_FAILED_'):
        result.output += log_helper("The directory name indicates it failed to extract.", logger.DEBUG)
        failed = True
    elif upper_name.startswith('_UNDERSIZED_') or upper_name.endswith('_UNDERSIZED_'):
        result.output += log_helper("The directory name indicates that it was previously rejected for being undersized.", logger.DEBUG)
        failed = True
    elif upper_name.startswith('_UNPACK') or upper_name.endswith('_UNPACK'):
        result.output += log_helper("The directory name indicates that this release is in the process of being unpacked.", logger.DEBUG)
        result.missed_files.append("{0} : Being unpacked".format(process_path))
        return False

    if failed:
        process_failed(process_path, release_name, result)
        result.missed_files.append("{0} : Failed download".format(process_path))
        return False

    if sickbeard.TV_DOWNLOAD_DIR and helpers.real_path(process_path) != helpers.real_path(sickbeard.TV_DOWNLOAD_DIR) and helpers.is_hidden_folder(process_path):
        result.output += log_helper("Ignoring hidden folder: {0}".format(process_path), logger.DEBUG)
        result.missed_files.append("{0} : Hidden folder".format(process_path))
        return False

    # make sure the dir isn't inside a show dir
    main_db_con = db.DBConnection()
    sql_results = main_db_con.select("SELECT location FROM tv_shows")

    for sqlShow in sql_results:
        if process_path.lower().startswith(ek(os.path.realpath, sqlShow[b"location"]).lower() + os.sep) or \
                process_path.lower() == ek(os.path.realpath, sqlShow[b"location"]).lower():

            result.output += log_helper(
                "Cannot process an episode that's already been moved to its show dir, skipping " + process_path,
                logger.WARNING)
            return False

    for current_directory, directory_names, file_names in ek(os.walk, process_path, topdown=False, followlinks=sickbeard.PROCESSOR_FOLLOW_SYMLINKS):
        sync_files = filter(is_sync_file, file_names)
        if sync_files and sickbeard.POSTPONE_IF_SYNC_FILES:
            result.output += log_helper("Found temporary sync files: {0} in path: {1}".format(sync_files, ek(os.path.join, process_path, sync_files[0])))
            result.output += log_helper("Skipping post processing for folder: {0}".format(process_path))
            result.missed_files.append("{0} : Sync files found".format(ek(os.path.join, process_path, sync_files[0])))
            continue

        found_files = filter(helpers.is_media_file, file_names)
        if sickbeard.UNPACK == 1:
            found_files += filter(helpers.is_rar_file, file_names)

        if current_directory != sickbeard.TV_DOWNLOAD_DIR and found_files:
            found_files.append(ek(os.path.basename, current_directory))

        for found_file in found_files:
            try:
                NameParser().parse(found_file, cache_result=False)
            except (InvalidNameException, InvalidShowException) as e:
                pass
            else:
                return True

    result.output += log_helper("{0} : No processable items found in folder".format(process_path), logger.DEBUG)
    return False
Пример #34
0
def _update_zoneinfo():
    global sb_timezone
    sb_timezone = tz.tzlocal()

    # now check if the zoneinfo needs update
    url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt'

    url_data = helpers.getURL(url_zv)
    if url_data is None:
        # When urlData is None, trouble connecting to github
        logger.log(u"Loading zoneinfo.txt failed. Unable to get URL: " + url_zv, logger.ERROR)
        return

    if (lib.dateutil.zoneinfo.ZONEINFOFILE is not None):
        cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
    else:
        cur_zoneinfo = None
    (new_zoneinfo, zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')

    if ((cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo)):
        return

    # now load the new zoneinfo
    url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/' + new_zoneinfo

    zonefile = helpers.real_path(ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), new_zoneinfo))
    zonefile_tmp = re.sub(r"\.tar\.gz$", '.tmp', zonefile)

    if (ek.ek(os.path.exists, zonefile_tmp)):
        try:
            ek.ek(os.remove, zonefile_tmp)
        except:
            logger.log(u"Unable to delete: " + zonefile_tmp, logger.ERROR)
            return

    if not helpers.download_file(url_tar, zonefile_tmp):
        return

    if not ek.ek(os.path.exists, zonefile_tmp):
        logger.log(u"Download of " + zonefile_tmp + " failed.", logger.ERROR)
        return

    new_hash = str(helpers.md5_for_file(zonefile_tmp))

    if (zoneinfo_md5.upper() == new_hash.upper()):
        logger.log(u"Updating timezone info with new one: " + new_zoneinfo, logger.MESSAGE)
        try:
            # remove the old zoneinfo file
            if (cur_zoneinfo is not None):
                old_file = helpers.real_path(
                    ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__), cur_zoneinfo))
                if (ek.ek(os.path.exists, old_file)):
                    ek.ek(os.remove, old_file)
            # rename downloaded file
            ek.ek(os.rename, zonefile_tmp, zonefile)
            # load the new zoneinfo
            reload(lib.dateutil.zoneinfo)
            sb_timezone = tz.tzlocal()
        except:
            _remove_zoneinfo_failed(zonefile_tmp)
            return
    else:
        _remove_zoneinfo_failed(zonefile_tmp)
        logger.log(u"MD5 HASH doesn't match: " + zoneinfo_md5.upper() + ' File: ' + new_hash.upper(), logger.ERROR)
        return
Пример #35
0
def _update_zoneinfo():
    global sb_timezone
    sb_timezone = tz.tzlocal()

    # now check if the zoneinfo needs update
    url_zv = 'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/zoneinfo.txt'

    url_data = helpers.getURL(url_zv)
    if url_data is None:
        # When urlData is None, trouble connecting to github
        logger.log(
            u'Loading zoneinfo.txt failed, this can happen from time to time. Unable to get URL: %s'
            % url_zv, logger.WARNING)
        return

    zonefilename = zoneinfo._ZONEFILENAME
    cur_zoneinfo = zonefilename
    if None is not cur_zoneinfo:
        cur_zoneinfo = ek.ek(basename, zonefilename)
    zonefile = helpers.real_path(
        ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__), cur_zoneinfo))
    zonemetadata = zoneinfo.gettz_db_metadata() if ek.ek(
        os.path.isfile, zonefile) else None
    (new_zoneinfo,
     zoneinfo_md5) = url_data.decode('utf-8').strip().rsplit(u' ')
    newtz_regex = re.search(r'(\d{4}[^.]+)', new_zoneinfo)
    if not newtz_regex or len(newtz_regex.groups()) != 1:
        return
    newtzversion = newtz_regex.group(1)

    if cur_zoneinfo is not None and zonemetadata is not None and 'tzversion' in zonemetadata and zonemetadata[
            'tzversion'] == newtzversion:
        return

    # now load the new zoneinfo
    url_tar = u'https://raw.githubusercontent.com/Prinz23/sb_network_timezones/master/%s' % new_zoneinfo

    zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)

    if ek.ek(os.path.exists, zonefile_tmp):
        try:
            ek.ek(os.remove, zonefile_tmp)
        except:
            logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR)
            return

    if not helpers.download_file(url_tar, zonefile_tmp):
        return

    if not ek.ek(os.path.exists, zonefile_tmp):
        logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR)
        return

    new_hash = str(helpers.md5_for_file(zonefile_tmp))

    if zoneinfo_md5.upper() == new_hash.upper():
        logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo,
                   logger.MESSAGE)
        try:
            # remove the old zoneinfo file
            if cur_zoneinfo is not None:
                old_file = helpers.real_path(
                    ek.ek(join, ek.ek(os.path.dirname, zoneinfo.__file__),
                          cur_zoneinfo))
                if ek.ek(os.path.exists, old_file):
                    ek.ek(os.remove, old_file)
            # rename downloaded file
            ek.ek(os.rename, zonefile_tmp, zonefile)
            from dateutil.zoneinfo import gettz
            if '_CLASS_ZONE_INSTANCE' in gettz.func_globals:
                gettz.func_globals.__setitem__('_CLASS_ZONE_INSTANCE', list())

            sb_timezone = tz.tzlocal()
        except:
            _remove_zoneinfo_failed(zonefile_tmp)
            return
    else:
        _remove_zoneinfo_failed(zonefile_tmp)
        logger.log(
            u'MD5 hash does not match: %s File: %s' %
            (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR)
        return
Пример #36
0
def process_dir(process_path,
                release_name=None,
                process_method=None,
                force=False,
                is_priority=None,
                delete_on=False,
                failed=False,
                mode="auto"):
    """
    Scans through the files in process_path and processes whatever media files it finds

    :param process_path: The folder name to look in
    :param release_name: The NZB/Torrent name which resulted in this folder being downloaded
    :param force: True to process previously processed files
    :param failed: Boolean for whether or not the download failed
    :param mode: Type of postprocessing auto or manual
    """

    result = ProcessResult()

    # if they passed us a real dir then assume it's the one we want
    if ek(os.path.isdir, process_path):
        process_path = ek(os.path.realpath, process_path)
        result.output += log_helper(
            u"Processing folder {0}".format(process_path), logger.DEBUG)

    # if the client and SickRage are not on the same machine translate the directory into a network directory
    elif all([
            sickbeard.TV_DOWNLOAD_DIR,
            ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR),
            ek(os.path.normpath, process_path) == ek(os.path.normpath,
                                                     sickbeard.TV_DOWNLOAD_DIR)
    ]):
        process_path = ek(
            os.path.join, sickbeard.TV_DOWNLOAD_DIR,
            ek(os.path.abspath, process_path).split(os.path.sep)[-1])
        result.output += log_helper(
            u"Trying to use folder: {0} ".format(process_path), logger.DEBUG)

    # if we didn't find a real dir then quit
    if not ek(os.path.isdir, process_path):
        result.output += log_helper(
            u"Unable to figure out what folder to process. "
            u"If your downloader and SickRage aren't on the same PC "
            u"make sure you fill out your TV download dir in the config.",
            logger.DEBUG)
        return result.output

    process_method = process_method or sickbeard.PROCESS_METHOD

    directories_from_rars = []
    for current_directory, directory_names, file_names in ek(
            os.walk, process_path):
        if not validate_dir(current_directory, release_name, failed, result):
            continue

        result.result = True

        file_names = [f for f in file_names if not is_torrent_or_nzb_file(f)]

        rar_files = [
            x for x in file_names
            if helpers.is_rar_file(ek(os.path.join, current_directory, x))
        ]
        if rar_files:
            extracted_directories = unrar(current_directory, rar_files, force,
                                          result)

            # Add the directories to the walk directories
            result.output += log_helper(
                u"Adding extracted directories to the list of directories to process: {0}"
                .format(extracted_directories), logger.DEBUG)
            directories_from_rars += extracted_directories

        video_files = filter(helpers.is_media_file, file_names)
        unwanted_files = [
            x for x in file_names if x not in video_files and x != '.stfolder'
        ]
        if unwanted_files:
            result.output += log_helper(
                u"Found unwanted files: {0}".format(unwanted_files),
                logger.DEBUG)

        process_media(current_directory, video_files, release_name,
                      process_method, force, is_priority, result)

        # Delete all file not needed and avoid deleting files if Manual PostProcessing
        if not (process_method == u"move"
                and result.result) or (mode == u"manual" and not delete_on):
            continue

        delete_folder(ek(os.path.join, current_directory, u'@eaDir'), False)
        delete_files(current_directory, unwanted_files, result)

        if all([
                not sickbeard.NO_DELETE or mode == u"manual",
                process_method == u"move", sickbeard.TV_DOWNLOAD_DIR
                and helpers.real_path(current_directory) != helpers.real_path(
                    sickbeard.TV_DOWNLOAD_DIR)
        ]):

            if delete_folder(current_directory, check_empty=True):
                result.output += log_helper(
                    u"Deleted folder: {0}".format(current_directory),
                    logger.DEBUG)

    for directory_from_rar in directories_from_rars:
        process_dir(
            process_path=directory_from_rar,
            release_name=ek(os.path.basename, directory_from_rar),
            process_method=('move',
                            process_method)[process_method in ('move',
                                                               'copy')],
            force=force,
            is_priority=is_priority,
            delete_on=sickbeard.DELRARCONTENTS,
            failed=failed,
            mode=mode)

        if sickbeard.DELRARCONTENTS:
            delete_folder(directory_from_rar, False)

    result.output += log_helper(
        (u"Processing Failed", u"Successfully processed")[result.aggresult],
        (logger.WARNING, logger.INFO)[result.aggresult])
    if result.missed_files:
        result.output += log_helper(u"Some items were not processed.")
        for missed_file in result.missed_files:
            result.output += log_helper(missed_file)

    return result.output
Пример #37
0
def _update_zoneinfo():
    global sb_timezone
    sb_timezone = tz.tzlocal()

    # TODO `git subtree pull` commands on updates

    loc_zv = helpers.real_path(
        ek.ek(join, ek.ek(os.path.dirname, __file__),
              u'../lib/network_timezones/zoneinfo.txt'))

    # Read version file
    try:
        with open(loc_zv, 'r') as file:
            data = file.read()
        if not data:
            raise

        # Filename of existing zoneinfo
        if lib.dateutil.zoneinfo.ZONEINFOFILE is not None:
            cur_zoneinfo = ek.ek(basename, lib.dateutil.zoneinfo.ZONEINFOFILE)
        else:
            cur_zoneinfo = None

        # Filename and hash of new zoneinfo
        (new_zoneinfo,
         zoneinfo_md5) = data.decode('utf-8').strip().rsplit(u' ')
    except Exception as e:
        logger.log(u'Crazy problem with zoneinfo: %s' % ex(e), logger.ERROR)
        return

    if (cur_zoneinfo is not None) and (new_zoneinfo == cur_zoneinfo):
        return

    # now load the new zoneinfo
    loc_tar = helpers.real_path(
        ek.ek(join, ek.ek(os.path.dirname, __file__),
              u'../lib/network_timezones/%s' % new_zoneinfo))

    zonefile = helpers.real_path(
        ek.ek(join, ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__),
              new_zoneinfo))
    zonefile_tmp = re.sub(r'\.tar\.gz$', '.tmp', zonefile)

    if ek.ek(os.path.exists, zonefile_tmp):
        try:
            ek.ek(os.remove, zonefile_tmp)
        except:
            logger.log(u'Unable to delete: %s' % zonefile_tmp, logger.ERROR)
            return

    if not helpers.copyFile(loc_tar, zonefile_tmp):
        return

    if not ek.ek(os.path.exists, zonefile_tmp):
        logger.log(u'Download of %s failed.' % zonefile_tmp, logger.ERROR)
        return

    new_hash = str(helpers.md5_for_file(zonefile_tmp))

    if zoneinfo_md5.upper() == new_hash.upper():
        logger.log(u'Updating timezone info with new one: %s' % new_zoneinfo,
                   logger.INFO)
        try:
            # remove the old zoneinfo file
            if cur_zoneinfo is not None:
                old_file = helpers.real_path(
                    ek.ek(
                        join,
                        ek.ek(os.path.dirname, lib.dateutil.zoneinfo.__file__),
                        cur_zoneinfo))
                if ek.ek(os.path.exists, old_file):
                    ek.ek(os.remove, old_file)
            # rename downloaded file
            ek.ek(os.rename, zonefile_tmp, zonefile)
            # load the new zoneinfo
            reload(lib.dateutil.zoneinfo)
            sb_timezone = tz.tzlocal()
        except:
            _remove_zoneinfo_failed(zonefile_tmp)
            return
    else:
        _remove_zoneinfo_failed(zonefile_tmp)
        logger.log(
            u'MD5 hash does not match: %s File: %s' %
            (zoneinfo_md5.upper(), new_hash.upper()), logger.ERROR)
        return
Пример #38
0
def update_network_dict():
    _remove_old_zoneinfo()
    _update_zoneinfo()

    d = {}

    # network timezones are stored in a git submodule
    loc = helpers.real_path(
        ek.ek(join, ek.ek(os.path.dirname, __file__),
              u'../lib/network_timezones/network_timezones.txt'))

    with open(loc, 'r') as file:
        data = file.read()

    if data is None:
        logger.log(u'Updating network timezones failed', logger.ERROR)
        load_network_dict()
        return

    try:
        for line in data.splitlines():
            (key, val) = line.decode('utf-8').strip().rsplit(u':', 1)
            if key is None or val is None:
                continue
            d[key] = val
    except (IOError, OSError):
        pass

    my_db = db.DBConnection('cache.db')

    # load current network timezones
    old_d = dict(my_db.select('SELECT * FROM network_timezones'))

    # list of sql commands to update the network_timezones table
    cl = []
    for cur_d, cur_t in d.iteritems():
        h_k = old_d.has_key(cur_d)
        if h_k and cur_t != old_d[cur_d]:
            # update old record
            cl.append([
                'UPDATE network_timezones SET network_name=?, timezone=? WHERE network_name=?',
                [cur_d, cur_t, cur_d]
            ])
        elif not h_k:
            # add new record
            cl.append([
                'INSERT INTO network_timezones (network_name, timezone) VALUES (?,?)',
                [cur_d, cur_t]
            ])
        if h_k:
            del old_d[cur_d]

    # remove deleted records
    if len(old_d) > 0:
        old_items = list(va for va in old_d)
        cl.append([
            'DELETE FROM network_timezones WHERE network_name IN (%s)' %
            ','.join(['?'] * len(old_items)), old_items
        ])

    # change all network timezone infos at once (much faster)
    if len(cl) > 0:
        my_db.mass_action(cl)
        load_network_dict()
Пример #39
0
def processDir(dirName,
               nzbName=None,
               process_method=None,
               force=False,
               is_priority=None,
               failed=False,
               type="automatic"):
    """
    Scans through the files in dirName and processes whatever media files it finds

    dirName: The folder name to look in
    nzbName: The NZB name which resulted in this folder being downloaded
    force: True to postprocess already postprocessed files
    failed: Boolean for whether or not the download failed
    type: Type of postprocessing automatic or manual
    """

    global process_result, returnStr

    returnStr = u""

    returnStr += logHelper(u"Processing folder " + dirName, logger.DEBUG)

    returnStr += logHelper(u"TV_DOWNLOAD_DIR: " + sickbeard.TV_DOWNLOAD_DIR,
                           logger.DEBUG)

    # if they passed us a real dir then assume it's the one we want
    if ek.ek(os.path.isdir, dirName):
        dirName = ek.ek(os.path.realpath, dirName)

    # if the client and Sickbeard are not on the same machine translate the Dir in a network dir
    elif sickbeard.TV_DOWNLOAD_DIR and ek.ek(os.path.isdir, sickbeard.TV_DOWNLOAD_DIR) \
    and helpers.real_path(dirName) != helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
        dirName = ek.ek(os.path.join, sickbeard.TV_DOWNLOAD_DIR,
                        ek.ek(os.path.abspath, dirName).split(os.path.sep)[-1])
        returnStr += logHelper(u"Trying to use folder " + dirName,
                               logger.DEBUG)

    # if we didn't find a real dir then quit
    if not ek.ek(os.path.isdir, dirName):
        returnStr += logHelper(
            u"Unable to figure out what folder to process. If your downloader and Sick Beard aren't on the same PC make sure you fill out your TV download dir in the config.",
            logger.DEBUG)
        return returnStr

    path, dirs, files = get_path_dir_files(dirName, nzbName, type)

    returnStr += logHelper(u"PostProcessing Path: " + path, logger.DEBUG)
    returnStr += logHelper(u"PostProcessing Dirs: " + str(dirs), logger.DEBUG)

    rarFiles = filter(helpers.isRarFile, files)
    rarContent = unRAR(path, rarFiles, force)
    files += rarContent
    videoFiles = filter(helpers.isMediaFile, files)
    videoInRar = filter(helpers.isMediaFile, rarContent)

    returnStr += logHelper(u"PostProcessing Files: " + str(files),
                           logger.DEBUG)
    returnStr += logHelper(u"PostProcessing VideoFiles: " + str(videoFiles),
                           logger.DEBUG)
    returnStr += logHelper(u"PostProcessing RarContent: " + str(rarContent),
                           logger.DEBUG)
    returnStr += logHelper(u"PostProcessing VideoInRar: " + str(videoInRar),
                           logger.DEBUG)

    # If nzbName is set and there's more than one videofile in the folder, files will be lost (overwritten).
    nzbNameOriginal = nzbName
    if len(videoFiles) >= 2:
        nzbName = None

    if not process_method:
        process_method = sickbeard.PROCESS_METHOD

    process_result = True

    #Don't Link media when the media is extracted from a rar in the same path
    if process_method in ('hardlink', 'symlink') and videoInRar:
        process_media(path, videoInRar, nzbName, 'move', force, is_priority)
        delete_files(path, rarContent)
        for video in set(videoFiles) - set(videoInRar):
            process_media(path, [video], nzbName, process_method, force,
                          is_priority)
    else:
        for video in videoFiles:
            process_media(path, [video], nzbName, process_method, force,
                          is_priority)

    #Process Video File in all TV Subdir
    for dir in [
            x for x in dirs if validateDir(path, x, nzbNameOriginal, failed)
    ]:

        process_result = True

        for processPath, processDir, fileList in ek.ek(os.walk,
                                                       ek.ek(
                                                           os.path.join, path,
                                                           dir),
                                                       topdown=False):

            rarFiles = filter(helpers.isRarFile, fileList)
            rarContent = unRAR(processPath, rarFiles, force)
            fileList = set(fileList + rarContent)
            videoFiles = filter(helpers.isMediaFile, fileList)
            videoInRar = filter(helpers.isMediaFile, rarContent)
            notwantedFiles = [x for x in fileList if x not in videoFiles]

            #Don't Link media when the media is extracted from a rar in the same path
            if process_method in ('hardlink', 'symlink') and videoInRar:
                process_media(processPath, videoInRar, nzbName, 'move', force,
                              is_priority)
                process_media(processPath,
                              set(videoFiles) - set(videoInRar), nzbName,
                              process_method, force, is_priority)
                delete_files(processPath, rarContent)
            else:
                process_media(processPath, videoFiles, nzbName, process_method,
                              force, is_priority)

                #Avoid to delete files
                if process_method != "move" or not process_result \
                or type == "manual":
                    continue

                delete_files(processPath, notwantedFiles)

                if process_method == "move" \
                and helpers.real_path(processPath) != helpers.real_path(sickbeard.TV_DOWNLOAD_DIR):
                    delete_dir(processPath)

    return returnStr