Exemple #1
0
    def saveSubtitles(self, use_subtitles=None, subtitles_plugins=None, subtitles_languages=None, subtitles_dir=None, subtitles_perfect_match=None,
                      service_order=None, subtitles_history=None, subtitles_finder_frequency=None, subtitles_erase_cache=None,
                      subtitles_multi=None, embedded_subtitles_all=None, subtitles_extra_scripts=None, subtitles_pre_scripts=None, subtitles_hearing_impaired=None,
                      addic7ed_user=None, addic7ed_pass=None, itasa_user=None, itasa_pass=None, legendastv_user=None, legendastv_pass=None, opensubtitles_user=None, opensubtitles_pass=None,
                      subtitles_keep_only_wanted=None, embedded_subtitles_unknown_lang=None, subtitles_stop_at_first=None):
        """
        Save Subtitle Search related settings
        """
        results = []

        config.change_SUBTITLES_FINDER_FREQUENCY(subtitles_finder_frequency)
        config.change_USE_SUBTITLES(use_subtitles)
        app.SUBTITLES_ERASE_CACHE = config.checkbox_to_value(subtitles_erase_cache)
        app.SUBTITLES_LANGUAGES = [code.strip() for code in subtitles_languages.split(',') if code.strip() in subtitles.subtitle_code_filter()] if subtitles_languages else []
        app.SUBTITLES_DIR = subtitles_dir
        app.SUBTITLES_PERFECT_MATCH = config.checkbox_to_value(subtitles_perfect_match)
        app.SUBTITLES_HISTORY = config.checkbox_to_value(subtitles_history)
        app.IGNORE_EMBEDDED_SUBS = config.checkbox_to_value(embedded_subtitles_all)
        app.ACCEPT_UNKNOWN_EMBEDDED_SUBS = config.checkbox_to_value(embedded_subtitles_unknown_lang)
        app.SUBTITLES_STOP_AT_FIRST = config.checkbox_to_value(subtitles_stop_at_first)
        app.SUBTITLES_HEARING_IMPAIRED = config.checkbox_to_value(subtitles_hearing_impaired)
        app.SUBTITLES_MULTI = 1 if len(app.SUBTITLES_LANGUAGES) > 1 else config.checkbox_to_value(subtitles_multi)
        app.SUBTITLES_KEEP_ONLY_WANTED = config.checkbox_to_value(subtitles_keep_only_wanted)
        app.SUBTITLES_EXTRA_SCRIPTS = [x.strip() for x in subtitles_extra_scripts.split('|') if x.strip()]
        app.SUBTITLES_PRE_SCRIPTS = [x.strip() for x in subtitles_pre_scripts.split('|') if x.strip()]

        # Subtitles services
        services_str_list = service_order.split()
        subtitles_services_list = []
        subtitles_services_enabled = []
        for cur_service_str in services_str_list:
            cur_service, cur_enabled = cur_service_str.split(':')
            subtitles_services_list.append(cur_service)
            subtitles_services_enabled.append(int(cur_enabled))

        app.SUBTITLES_SERVICES_LIST = subtitles_services_list
        app.SUBTITLES_SERVICES_ENABLED = subtitles_services_enabled

        app.ADDIC7ED_USER = addic7ed_user or ''
        app.ADDIC7ED_PASS = addic7ed_pass or ''
        app.ITASA_USER = itasa_user or ''
        app.ITASA_PASS = itasa_pass or ''
        app.LEGENDASTV_USER = legendastv_user or ''
        app.LEGENDASTV_PASS = legendastv_pass or ''
        app.OPENSUBTITLES_USER = opensubtitles_user or ''
        app.OPENSUBTITLES_PASS = opensubtitles_pass or ''

        app.instance.save_config()
        # Reset provider pool so next time we use the newest settings
        subtitles.get_provider_pool.invalidate()

        if results:
            for x in results:
                logger.log(x, logger.ERROR)
            ui.notifications.error('Error(s) Saving Configuration',
                                   '<br>\n'.join(results))
        else:
            ui.notifications.message('Configuration Saved', os.path.join(app.CONFIG_FILE))

        return self.redirect('/config/subtitles/')
Exemple #2
0
    def addShowToBlacklist(self, seriesid):
        # URL parameters
        data = {'shows': [{'ids': {'tvdb': seriesid}}]}

        trakt_settings = {
            'trakt_api_secret': app.TRAKT_API_SECRET,
            'trakt_api_key': app.TRAKT_API_KEY,
            'trakt_access_token': app.TRAKT_ACCESS_TOKEN,
            'trakt_refresh_token': app.TRAKT_REFRESH_TOKEN
        }

        show_name = get_showname_from_indexer(INDEXER_TVDBV2, seriesid)
        try:
            trakt_api = TraktApi(timeout=app.TRAKT_TIMEOUT,
                                 ssl_verify=app.SSL_VERIFY,
                                 **trakt_settings)
            trakt_api.request('users/{0}/lists/{1}/items'.format(
                app.TRAKT_USERNAME, app.TRAKT_BLACKLIST_NAME),
                              data,
                              method='POST')
            ui.notifications.message(
                'Success!', "Added show '{0}' to blacklist".format(show_name))
        except Exception as e:
            ui.notifications.error(
                'Error!',
                "Unable to add show '{0}' to blacklist. Check logs.".format(
                    show_name))
            logger.log(
                "Error while adding show '{0}' to trakt blacklist: {1}".format(
                    show_name, e), logger.WARNING)
Exemple #3
0
    def post(self, *args, **kwargs):
        """
        Submit Login
        """

        api_key = None

        username = app.WEB_USERNAME
        password = app.WEB_PASSWORD

        if all([(self.get_argument('username') == username or not username),
                (self.get_argument('password') == password or not password)]):
            api_key = app.API_KEY

        if app.NOTIFY_ON_LOGIN and not helpers.is_ip_private(self.request.remote_ip):
            notifiers.notify_login(self.request.remote_ip)

        if api_key:
            remember_me = int(self.get_argument('remember_me', default=0) or 0)
            self.set_secure_cookie(app.SECURE_TOKEN, api_key, expires_days=30 if remember_me else None)
            logger.log('User logged into the Medusa web interface', logger.INFO)
        else:
            logger.log('User attempted a failed login to the Medusa web interface from IP: {ip}'.format
                       (ip=self.request.remote_ip), logger.WARNING)

        redirect_page = self.get_argument('next', None)
        if redirect_page:
            self.redirect('{page}'.format(page=self.get_argument('next')))
        else:
            self.redirect('/{page}/'.format(page=app.DEFAULT_PAGE))
Exemple #4
0
    def saveSubtitles(self, use_subtitles=None, subtitles_plugins=None, subtitles_languages=None, subtitles_dir=None, subtitles_perfect_match=None,
                      service_order=None, subtitles_history=None, subtitles_finder_frequency=None, subtitles_erase_cache=None,
                      subtitles_multi=None, embedded_subtitles_all=None, subtitles_extra_scripts=None, subtitles_pre_scripts=None, subtitles_hearing_impaired=None,
                      addic7ed_user=None, addic7ed_pass=None, itasa_user=None, itasa_pass=None, legendastv_user=None, legendastv_pass=None, opensubtitles_user=None, opensubtitles_pass=None,
                      subtitles_keep_only_wanted=None, embedded_subtitles_unknown_lang=None, subtitles_stop_at_first=None):
        """
        Save Subtitle Search related settings
        """
        results = []

        config.change_SUBTITLES_FINDER_FREQUENCY(subtitles_finder_frequency)
        config.change_USE_SUBTITLES(use_subtitles)
        app.SUBTITLES_ERASE_CACHE = config.checkbox_to_value(subtitles_erase_cache)
        app.SUBTITLES_LANGUAGES = [code.strip() for code in subtitles_languages.split(',') if code.strip() in subtitles.subtitle_code_filter()] if subtitles_languages else []
        app.SUBTITLES_DIR = subtitles_dir
        app.SUBTITLES_PERFECT_MATCH = config.checkbox_to_value(subtitles_perfect_match)
        app.SUBTITLES_HISTORY = config.checkbox_to_value(subtitles_history)
        app.IGNORE_EMBEDDED_SUBS = config.checkbox_to_value(embedded_subtitles_all)
        app.ACCEPT_UNKNOWN_EMBEDDED_SUBS = config.checkbox_to_value(embedded_subtitles_unknown_lang)
        app.SUBTITLES_STOP_AT_FIRST = config.checkbox_to_value(subtitles_stop_at_first)
        app.SUBTITLES_HEARING_IMPAIRED = config.checkbox_to_value(subtitles_hearing_impaired)
        app.SUBTITLES_MULTI = 1 if len(app.SUBTITLES_LANGUAGES) > 1 else config.checkbox_to_value(subtitles_multi)
        app.SUBTITLES_KEEP_ONLY_WANTED = config.checkbox_to_value(subtitles_keep_only_wanted)
        app.SUBTITLES_EXTRA_SCRIPTS = [x.strip() for x in subtitles_extra_scripts.split('|') if x.strip()]
        app.SUBTITLES_PRE_SCRIPTS = [x.strip() for x in subtitles_pre_scripts.split('|') if x.strip()]

        # Subtitles services
        services_str_list = service_order.split()
        subtitles_services_list = []
        subtitles_services_enabled = []
        for curServiceStr in services_str_list:
            cur_service, cur_enabled = curServiceStr.split(':')
            subtitles_services_list.append(cur_service)
            subtitles_services_enabled.append(int(cur_enabled))

        app.SUBTITLES_SERVICES_LIST = subtitles_services_list
        app.SUBTITLES_SERVICES_ENABLED = subtitles_services_enabled

        app.ADDIC7ED_USER = addic7ed_user or ''
        app.ADDIC7ED_PASS = addic7ed_pass or ''
        app.ITASA_USER = itasa_user or ''
        app.ITASA_PASS = itasa_pass or ''
        app.LEGENDASTV_USER = legendastv_user or ''
        app.LEGENDASTV_PASS = legendastv_pass or ''
        app.OPENSUBTITLES_USER = opensubtitles_user or ''
        app.OPENSUBTITLES_PASS = opensubtitles_pass or ''

        app.instance.save_config()
        # Reset provider pool so next time we use the newest settings
        subtitles.get_provider_pool.invalidate()

        if results:
            for x in results:
                logger.log(x, logger.ERROR)
            ui.notifications.error('Error(s) Saving Configuration',
                                   '<br>\n'.join(results))
        else:
            ui.notifications.message('Configuration Saved', os.path.join(app.CONFIG_FILE))

        return self.redirect('/config/subtitles/')
Exemple #5
0
    def subtitles_enabled(*args):
        """Try to parse names to a show and check whether the show has subtitles enabled.

        :param args:
        :return:
        :rtype: bool
        """
        for name in args:
            if not name:
                continue

            try:
                parse_result = NameParser().parse(name)
                if parse_result.show.indexerid:
                    main_db_con = db.DBConnection()
                    sql_results = main_db_con.select(
                        "SELECT subtitles FROM tv_shows WHERE indexer_id = ? LIMIT 1",
                        [parse_result.show.indexerid])
                    return bool(
                        sql_results[0][b'subtitles']) if sql_results else False

                logger.log('Empty indexer ID for: {name}'.format(name=name),
                           logger.WARNING)
            except (InvalidNameException, InvalidShowException):
                logger.log(
                    'Not enough information to parse filename into a valid show. Consider adding scene '
                    'exceptions or improve naming for: {name}'.format(
                        name=name), logger.WARNING)
        return False
Exemple #6
0
    def render(self, *args, **kwargs):
        """
        Render the Page template
        """
        for key in self.arguments:
            if key not in kwargs:
                kwargs[key] = self.arguments[key]

        kwargs['makoStartTime'] = time.time()
        try:
            return self.template.render_unicode(*args, **kwargs)
        except Exception:
            kwargs['title'] = '500'
            kwargs['header'] = 'Mako Error'
            kwargs['backtrace'] = RichTraceback()
            for (filename, lineno, function,
                 _) in kwargs['backtrace'].traceback:
                logger.log(
                    u'File {name}, line {line}, in {func}'.format(
                        name=filename, line=lineno, func=function),
                    logger.DEBUG)
            logger.log(u'{name}: {error}'.format(
                name=kwargs['backtrace'].error.__class__.__name__,
                error=kwargs['backtrace'].error))
            return get_lookup().get_template('500.mako').render_unicode(
                *args, **kwargs)
Exemple #7
0
    def saveAnime(self,
                  use_anidb=None,
                  anidb_username=None,
                  anidb_password=None,
                  anidb_use_mylist=None,
                  split_home=None,
                  split_home_in_tabs=None):
        """
        Save anime related settings
        """

        results = []

        app.USE_ANIDB = config.checkbox_to_value(use_anidb)
        app.ANIDB_USERNAME = anidb_username
        app.ANIDB_PASSWORD = anidb_password
        app.ANIDB_USE_MYLIST = config.checkbox_to_value(anidb_use_mylist)
        app.ANIME_SPLIT_HOME = config.checkbox_to_value(split_home)
        app.ANIME_SPLIT_HOME_IN_TABS = config.checkbox_to_value(
            split_home_in_tabs)

        app.instance.save_config()

        if results:
            for x in results:
                logger.log(x, logger.ERROR)
            ui.notifications.error('Error(s) Saving Configuration',
                                   '<br>\n'.join(results))
        else:
            ui.notifications.message('Configuration Saved',
                                     os.path.join(app.CONFIG_FILE))

        return self.redirect('/config/anime/')
Exemple #8
0
    def post(self, *args, **kwargs):
        """
        Submit Login
        """

        api_key = None

        username = app.WEB_USERNAME
        password = app.WEB_PASSWORD

        if all([(self.get_argument('username') == username or not username),
                (self.get_argument('password') == password or not password)]):
            api_key = app.API_KEY

        if app.NOTIFY_ON_LOGIN and not helpers.is_ip_private(
                self.request.remote_ip):
            notifiers.notify_login(self.request.remote_ip)

        if api_key:
            remember_me = int(self.get_argument('remember_me', default=0) or 0)
            self.set_secure_cookie(app.SECURE_TOKEN,
                                   api_key,
                                   expires_days=30 if remember_me else None)
            logger.log('User logged into the Medusa web interface',
                       logger.INFO)
        else:
            logger.log(
                'User attempted a failed login to the Medusa web interface from IP: {ip}'
                .format(ip=self.request.remote_ip), logger.WARNING)

        redirect_page = self.get_argument('next', None)
        if redirect_page:
            self.redirect('{page}'.format(page=self.get_argument('next')))
        else:
            self.redirect('/{page}/'.format(page=app.DEFAULT_PAGE))
Exemple #9
0
    def saveAnime(self, use_anidb=None, anidb_username=None, anidb_password=None, anidb_use_mylist=None,
                  split_home=None, split_home_in_tabs=None):
        """
        Save anime related settings
        """

        results = []

        app.USE_ANIDB = config.checkbox_to_value(use_anidb)
        app.ANIDB_USERNAME = anidb_username
        app.ANIDB_PASSWORD = anidb_password
        app.ANIDB_USE_MYLIST = config.checkbox_to_value(anidb_use_mylist)
        app.ANIME_SPLIT_HOME = config.checkbox_to_value(split_home)
        app.ANIME_SPLIT_HOME_IN_TABS = config.checkbox_to_value(split_home_in_tabs)

        app.instance.save_config()

        if results:
            for x in results:
                logger.log(x, logger.ERROR)
            ui.notifications.error('Error(s) Saving Configuration',
                                   '<br>\n'.join(results))
        else:
            ui.notifications.message('Configuration Saved', os.path.join(app.CONFIG_FILE))

        return self.redirect('/config/anime/')
Exemple #10
0
    def __init__(self, filename=None, suffix=None, row_type='dict'):

        self.filename = filename or app.APPLICATION_DB
        self.suffix = suffix
        self.row_type = row_type

        try:
            if self.filename not in db_cons or not db_cons[self.filename]:
                db_locks[self.filename] = threading.Lock()

                self.connection = sqlite3.connect(self.path, 20, check_same_thread=False)
                self.connection.text_factory = DBConnection._unicode_text_factory

                db_cons[self.filename] = self.connection
            else:
                self.connection = db_cons[self.filename]

            # start off row factory configured as before out of
            # paranoia but wait to do so until other potential users
            # of the shared connection are done using
            # it... technically not required as row factory is reset
            # in all the public methods after the lock has been
            # aquired
            with db_locks[self.filename]:
                self._set_row_factory()

        except sqlite3.OperationalError:
            logger.log(u'Please check your database owner/permissions: {}'.format(
                       self.path, logger.WARNING))
        except Exception as e:
            logger.log(u'DB error: ' + ex(e), logger.ERROR)
            raise
Exemple #11
0
    def __init__(self, filename=None, suffix=None, row_type='dict'):

        self.filename = filename or app.APPLICATION_DB
        self.suffix = suffix
        self.row_type = row_type

        try:
            if self.filename not in db_cons or not db_cons[self.filename]:
                db_locks[self.filename] = threading.Lock()

                self.connection = sqlite3.connect(self.path,
                                                  20,
                                                  check_same_thread=False)
                self.connection.text_factory = DBConnection._unicode_text_factory

                db_cons[self.filename] = self.connection
            else:
                self.connection = db_cons[self.filename]

            # start off row factory configured as before out of
            # paranoia but wait to do so until other potential users
            # of the shared connection are done using
            # it... technically not required as row factory is reset
            # in all the public methods after the lock has been
            # aquired
            with db_locks[self.filename]:
                self._set_row_factory()

        except sqlite3.OperationalError:
            logger.log(
                u'Please check your database owner/permissions: {}'.format(
                    self.path, logger.WARNING))
        except Exception as e:
            logger.log(u'DB error: ' + ex(e), logger.ERROR)
            raise
Exemple #12
0
def get_season_nzbs(name, url_data, season):
    """
    Split a season NZB into episodes

    :param name: NZB name
    :param url_data: URL to get data from
    :param season: Season to check
    :return: dict of (episode files, xml matches)
    """

    # TODO: clean up these regex'es, comment them, and make them all raw strings
    regex_string = {
        # Match the xmlns in an nzb
        # Example:  nzbElement.getchildren()[1].tag == '{http://www.newzbin.com/DTD/2003/nzb}file'
        #           regex match returns  'http://www.newzbin.com/DTD/2003/nzb'
        'nzb_xmlns': r"{(http://[\w_\./]+nzb)}file",
        'scene_name': '([\w\._\ ]+)[\. ]S%02d[\. ]([\w\._\-\ ]+)[\- ]([\w_\-\ ]+?)',  # pylint: disable=anomalous-backslash-in-string
        'episode': '\.S%02d(?:[E0-9]+)\.[\w\._]+\-\w+',  # pylint: disable=anomalous-backslash-in-string
    }

    try:
        show_xml = ETree.ElementTree(ETree.XML(url_data))
    except SyntaxError:
        logger.log(u"Unable to parse the XML of " + name + ", not splitting it", logger.ERROR)  # pylint: disable=no-member
        return {}, ''

    nzb_element = show_xml.getroot()

    scene_name_match = re.search(regex_string['scene_name'] % season, name, re.I)
    if scene_name_match:
        show_name = scene_name_match.groups()[0]
    else:  # Make sure we aren't missing valid results after changing name_parser and the quality detection
        # Most of these will likely be invalid shows
        logger.log(u"Unable to parse " + name + " into a scene name.", logger.DEBUG)   # pylint: disable=no-member
        return {}, ''

    regex = '(' + re.escape(show_name) + regex_string['episode'] % season + ')'
    regex = regex.replace(' ', '.')

    ep_files = {}
    xmlns = None

    for cur_file in nzb_element.getchildren():
        xmlns_match = re.match(regex_string['nzb_xmlns'], cur_file.tag)
        if not xmlns_match:
            continue
        else:
            xmlns = xmlns_match.group(1)
        match = re.search(regex, cur_file.get("subject"), re.I)
        if not match:
            # regex couldn't match cur_file.get("subject")
            continue
        cur_ep = match.group(1)
        if cur_ep not in ep_files:
            ep_files[cur_ep] = [cur_file]
        else:
            ep_files[cur_ep].append(cur_file)
    # TODO: Decide what to do if we found multiple valid xmlns strings, should we only return the last???
    return ep_files, xmlns
Exemple #13
0
    def forceBacklog(self):
        # force it to run the next time it looks
        result = app.backlog_search_scheduler.forceRun()
        if result:
            logger.log('Backlog search forced')
            ui.notifications.message('Backlog search started')

        return self.redirect('/manage/manageSearches/')
Exemple #14
0
    def forceBacklog(self):
        # force it to run the next time it looks
        result = app.backlog_search_scheduler.forceRun()
        if result:
            logger.log('Backlog search forced')
            ui.notifications.message('Backlog search started')

        return self.redirect('/manage/manageSearches/')
Exemple #15
0
    def forceSubtitlesFinder(self):
        # force it to run the next time it looks
        result = app.subtitles_finder_scheduler.forceRun()
        if result:
            logger.log('Subtitle search forced')
            ui.notifications.message('Subtitle search started')

        return self.redirect('/manage/manageSearches/')
Exemple #16
0
    def forceSubtitlesFinder(self):
        # force it to run the next time it looks
        result = app.subtitles_finder_scheduler.forceRun()
        if result:
            logger.log('Subtitle search forced')
            ui.notifications.message('Subtitle search started')

        return self.redirect('/manage/manageSearches/')
Exemple #17
0
def find_release(ep_obj):
    """
    Find releases in history by show ID and season.

    Return None for release if multiple found or no release found.
    """
    release = None
    provider = None

    # Clear old snatches for this release if any exist
    failed_db_con = db.DBConnection('failed.db')
    failed_db_con.action(
        'DELETE FROM history '
        'WHERE showid = {0}'
        ' AND season = {1}'
        ' AND episode = {2}'
        ' AND date < ( SELECT max(date)'
        '              FROM history'
        '              WHERE showid = {0}'
        '               AND season = {1}'
        '               AND episode = {2}'
        '             )'.format
        (ep_obj.series.indexerid, ep_obj.season, ep_obj.episode)
    )

    # Search for release in snatch history
    results = failed_db_con.select(
        'SELECT release, provider, date '
        'FROM history '
        'WHERE showid=?'
        ' AND season=?'
        ' AND episode=?',
        [ep_obj.series.indexerid, ep_obj.season, ep_obj.episode]
    )

    for result in results:
        release = str(result['release'])
        provider = str(result['provider'])
        date = result['date']

        # Clear any incomplete snatch records for this release if any exist
        failed_db_con.action(
            'DELETE FROM history '
            'WHERE release=?'
            ' AND date!=?',
            [release, date]
        )

        # Found a previously failed release
        logger.log(u'Failed release found for {show} {ep}: {release}'.format
                   (show=ep_obj.series.name, ep=episode_num(ep_obj.season, ep_obj.episode),
                    release=result['release']), logger.DEBUG)
        return release, provider

    # Release was not found
    logger.log(u'No releases found for {show} {ep}'.format
               (show=ep_obj.series.name, ep=episode_num(ep_obj.season, ep_obj.episode)), logger.DEBUG)
    return release, provider
Exemple #18
0
def log_failed(release):
    """Log release as failed in failed.db."""
    log_str = u''
    size = -1
    provider = ''

    release = prepare_failed_name(release)

    failed_db_con = db.DBConnection('failed.db')
    sql_results = failed_db_con.select(
        'SELECT * '
        'FROM history '
        'WHERE release=?',
        [release]
    )

    if not sql_results:
        logger.log(u'Release not found in snatch history: {0}'.format(release), logger.WARNING)
    elif len(sql_results) == 1:
        size = sql_results[0]['size']
        provider = sql_results[0]['provider']
    else:
        logger.log(u'Multiple logged snatches found for release',
                   logger.WARNING)
        sizes = len(set(x['size'] for x in sql_results))
        providers = len(set(x['provider'] for x in sql_results))
        if sizes == 1:
            logger.log(u'However, they are all the same size. '
                       u'Continuing with found size.', logger.WARNING)
            size = sql_results[0]['size']
        else:
            logger.log(u'They also vary in size. '
                       u'Deleting the logged snatches and recording this '
                       u'release with no size/provider', logger.WARNING)
            for result in sql_results:
                delete_logged_snatch(
                    result['release'],
                    result['size'],
                    result['provider']
                )

        if providers == 1:
            logger.log(u'They are also from the same provider. '
                       u'Using it as well.')
            provider = sql_results[0]['provider']

    if not has_failed(release, size, provider):
        failed_db_con = db.DBConnection('failed.db')
        failed_db_con.action(
            'INSERT INTO failed (release, size, provider) '
            'VALUES (?, ?, ?)',
            [release, size, provider]
        )

    delete_logged_snatch(release, size, provider)

    return log_str
Exemple #19
0
def update_network_dict():
    """Update timezone information from Medusa repositories."""
    logger.log('Started updating network timezones', logger.DEBUG)
    url = '{base_url}/sb_network_timezones/network_timezones.txt'.format(base_url=app.BASE_PYMEDUSA_URL)
    response = session.get(url)
    if not response or not response.text:
        logger.log('Updating network timezones failed, this can happen from time to time. URL: %s' % url, logger.INFO)
        load_network_dict()
        return

    remote_networks = {}
    try:
        for line in response.text.splitlines():
            (key, val) = line.strip().rsplit(u':', 1)
            if key is None or val is None:
                continue
            remote_networks[key] = val
    except (IOError, OSError) as error:
        logger.log('Unable to build the network dictionary. Aborting update. Error: {error}'.format
                   (error=error), logger.WARNING)
        return

    # Don't continue because if empty dict, var `existing` be false for all networks, thus deleting all
    if not remote_networks:
        logger.log(u'Unable to update network timezones as fetched network dict is empty', logger.WARNING)
        return

    cache_db_con = db.DBConnection('cache.db')
    sql_result = cache_db_con.select('SELECT network_name, timezone FROM network_timezones;')

    network_list = {
        row['network_name']: row['timezone']
        for row in sql_result
    }

    queries = []
    for network, timezone in iteritems(remote_networks):
        existing = network in network_list
        if not existing:
            queries.append(['INSERT OR IGNORE INTO network_timezones VALUES (?,?);', [network, timezone]])
        elif network_list[network] != timezone:
            queries.append(['UPDATE OR IGNORE network_timezones SET timezone = ? WHERE network_name = ?;', [timezone, network]])

        if existing:
            # if the network from cache DB is in the remote network, remove from the `to remove` list
            del network_list[network]

    if network_list:
        # Delete all networks that are not in the remote network list
        purged = [x for x in network_list]
        queries.append(['DELETE FROM network_timezones WHERE network_name IN (%s);' % ','.join(['?'] * len(purged)), purged])

    if queries:
        cache_db_con.mass_action(queries)
        load_network_dict()

    logger.log(u'Finished updating network timezones', logger.DEBUG)
Exemple #20
0
def upgradeDatabase(connection, schema):
    """
    Perform database upgrade and provide logging

    :param connection: Existing DB Connection to use
    :param schema: New schema to upgrade to
    """
    logger.log(u"Checking database structure..." + connection.filename, logger.DEBUG)
    _processUpgrade(connection, schema)
Exemple #21
0
def log_failed(release):
    """Log release as failed in failed.db."""
    log_str = u''
    size = -1
    provider = ''

    release = prepare_failed_name(release)

    failed_db_con = db.DBConnection('failed.db')
    sql_results = failed_db_con.select(
        'SELECT * '
        'FROM history '
        'WHERE release=?',
        [release]
    )

    if not sql_results:
        logger.log(u'Release not found in snatch history: {0}'.format(release), logger.WARNING)
    elif len(sql_results) == 1:
        size = sql_results[0]['size']
        provider = sql_results[0]['provider']
    else:
        logger.log(u'Multiple logged snatches found for release',
                   logger.WARNING)
        sizes = len(set(x['size'] for x in sql_results))
        providers = len(set(x['provider'] for x in sql_results))
        if sizes == 1:
            logger.log(u'However, they are all the same size. '
                       u'Continuing with found size.', logger.WARNING)
            size = sql_results[0]['size']
        else:
            logger.log(u'They also vary in size. '
                       u'Deleting the logged snatches and recording this '
                       u'release with no size/provider', logger.WARNING)
            for result in sql_results:
                delete_logged_snatch(
                    result['release'],
                    result['size'],
                    result['provider']
                )

        if providers == 1:
            logger.log(u'They are also from the same provider. '
                       u'Using it as well.')
            provider = sql_results[0]['provider']

    if not has_failed(release, size, provider):
        failed_db_con = db.DBConnection('failed.db')
        failed_db_con.action(
            'INSERT INTO failed (release, size, provider) '
            'VALUES (?, ?, ?)',
            [release, size, provider]
        )

    delete_logged_snatch(release, size, provider)

    return log_str
def update_network_dict():
    """Update timezone information from Medusa repositories."""
    logger.log('Started updating network timezones', logger.DEBUG)
    url = '{base_url}/sb_network_timezones/network_timezones.txt'.format(base_url=BASE_PYMEDUSA_URL)
    response = session.get(url)
    if not response or not response.text:
        logger.log('Updating network timezones failed, this can happen from time to time. URL: %s' % url, logger.INFO)
        load_network_dict()
        return

    remote_networks = {}
    try:
        for line in response.text.splitlines():
            (key, val) = line.strip().rsplit(u':', 1)
            if key is None or val is None:
                continue
            remote_networks[key] = val
    except (IOError, OSError) as error:
        logger.log('Unable to build the network dictionary. Aborting update. Error: {error}'.format
                   (error=error), logger.WARNING)
        return

    # Don't continue because if empty dict, var `existing` be false for all networks, thus deleting all
    if not remote_networks:
        logger.log(u'Unable to update network timezones as fetched network dict is empty', logger.WARNING)
        return

    cache_db_con = db.DBConnection('cache.db')
    sql_result = cache_db_con.select('SELECT network_name, timezone FROM network_timezones;')

    network_list = {
        row['network_name']: row['timezone']
        for row in sql_result
    }

    queries = []
    for network, timezone in iteritems(remote_networks):
        existing = network in network_list
        if not existing:
            queries.append(['INSERT OR IGNORE INTO network_timezones VALUES (?,?);', [network, timezone]])
        elif network_list[network] != timezone:
            queries.append(['UPDATE OR IGNORE network_timezones SET timezone = ? WHERE network_name = ?;', [timezone, network]])

        if existing:
            # if the network from cache DB is in the remote network, remove from the `to remove` list
            del network_list[network]

    if network_list:
        # Delete all networks that are not in the remote network list
        purged = [x for x in network_list]
        queries.append(['DELETE FROM network_timezones WHERE network_name IN (%s);' % ','.join(['?'] * len(purged)), purged])

    if queries:
        cache_db_con.mass_action(queries)
        load_network_dict()

    logger.log(u'Finished updating network timezones', logger.DEBUG)
Exemple #23
0
def upgradeDatabase(connection, schema):
    """
    Perform database upgrade and provide logging

    :param connection: Existing DB Connection to use
    :param schema: New schema to upgrade to
    """
    logger.log(u'Checking database structure...' + connection.filename, logger.DEBUG)
    _processUpgrade(connection, schema)
Exemple #24
0
    def subtitleMissedPP(self):
        t = PageTemplate(rh=self, filename='manage_subtitleMissedPP.mako')
        app.RELEASES_IN_PP = []
        for root, _, files in os.walk(app.TV_DOWNLOAD_DIR, topdown=False):
            # Skip folders that are being used for unpacking
            if u'_UNPACK' in root.upper():
                continue
            for filename in sorted(files):
                if not is_media_file(filename):
                    continue

                video_path = os.path.join(root, filename)
                video_date = datetime.datetime.fromtimestamp(os.stat(video_path).st_ctime)
                video_age = datetime.datetime.today() - video_date

                tv_episode = Episode.from_filepath(video_path)

                if not tv_episode:
                    logger.log(u"Filename '{0}' cannot be parsed to an episode".format(filename), logger.DEBUG)
                    continue

                ep_status = tv_episode.status
                if ep_status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST):
                    status = 'snatched'
                elif ep_status == DOWNLOADED:
                    status = 'downloaded'
                else:
                    continue

                if not tv_episode.series.subtitles:
                    continue

                related_files = PostProcessor(video_path).list_associated_files(video_path, subtitles_only=True)
                if related_files:
                    continue

                age_hours = divmod(video_age.seconds, 3600)[0]
                age_minutes = divmod(video_age.seconds, 60)[0]
                if video_age.days > 0:
                    age_unit = 'd'
                    age_value = video_age.days
                elif age_hours > 0:
                    age_unit = 'h'
                    age_value = age_hours
                else:
                    age_unit = 'm'
                    age_value = age_minutes

                app.RELEASES_IN_PP.append({'release': video_path, 'seriesid': tv_episode.series.indexerid,
                                           'show_name': tv_episode.series.name, 'season': tv_episode.season,
                                           'episode': tv_episode.episode, 'status': status, 'age': age_value,
                                           'age_unit': age_unit, 'date': video_date,
                                           'indexername': tv_episode.series.indexer_name})

        return t.render(releases_in_pp=app.RELEASES_IN_PP,
                        controller='manage', action='subtitleMissedPP')
Exemple #25
0
    def run(self):

        ShowQueueItem.run(self)

        logger.log(
            u'{id}: Downloading subtitles for {show}'.format(
                id=self.show.indexerid, show=self.show.name), logger.INFO)

        self.show.download_subtitles()
        self.finish()
Exemple #26
0
 def _try_rollback(self):
     if not self.connection:
         return
     try:
         self.connection.rollback()
     except sqlite3.OperationalError as error:
         # See https://github.com/pymedusa/Medusa/issues/3190
         if 'no transaction is active' in error.args[0]:
             logger.log('Rollback not needed, skipping', logger.DEBUG)
         else:
             logger.log('Failed to perform rollback: {error!r}'.format(error=error), logger.ERROR)
Exemple #27
0
 def _try_rollback(self):
     if not self.connection:
         return
     try:
         self.connection.rollback()
     except sqlite3.OperationalError as error:
         # See https://github.com/pymedusa/Medusa/issues/3190
         if 'no transaction is active' in error.args[0]:
             logger.log('Rollback not needed, skipping', logger.DEBUG)
         else:
             logger.log('Failed to perform rollback: {error!r}'.format(error=error), logger.ERROR)
Exemple #28
0
 def clear_cache():
     """
     Remove the Mako cache directory
     """
     try:
         cache_folder = ek(os.path.join, app.CACHE_DIR, 'mako')
         if os.path.isdir(cache_folder):
             shutil.rmtree(cache_folder)
     except Exception:  # pylint: disable=broad-except
         logger.log('Unable to remove the cache/mako directory!',
                    logger.WARNING)
Exemple #29
0
 def is_rar_supported():
     """Check rar unpacking support."""
     try:
         rarfile.custom_check([rarfile.UNRAR_TOOL], True)
     except rarfile.RarExecError:
         logger.log('UNRAR tool not available.', logger.WARNING)
         return False
     except Exception as msg:
         logger.log('Rar Not Supported: {error}'.format(error=ex(msg)),
                    logger.ERROR)
         return False
     return True
Exemple #30
0
    def action(self, query, args=None, fetchall=False, fetchone=False):
        """
        Execute single query

        :param query: Query string
        :param args: Arguments to query string
        :param fetchall: Boolean to indicate all results must be fetched
        :param fetchone: Boolean to indicate one result must be fetched (to walk results for instance)
        :return: query results
        """
        if query is None:
            return

        sql_results = None
        attempt = 0

        with db_locks[self.filename]:
            self._set_row_factory()
            while attempt < 5:
                try:
                    if args is None:
                        logger.log(self.filename + ': ' + query, logger.DB)
                    else:
                        logger.log(
                            self.filename + ': ' + query + ' with args ' +
                            str(args), logger.DB)

                    sql_results = self._execute(query,
                                                args,
                                                fetchall=fetchall,
                                                fetchone=fetchone)
                    self.connection.commit()

                    # get out of the connection attempt loop since we were successful
                    break
                except sqlite3.OperationalError as e:
                    if 'unable to open database file' in e.args[
                            0] or 'database is locked' in e.args[0]:
                        logger.log(u'DB error: ' + ex(e), logger.WARNING)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.log(u'DB error: ' + ex(e), logger.ERROR)
                        raise
                except sqlite3.DatabaseError as e:
                    logger.log(u'Fatal error executing query: ' + ex(e),
                               logger.ERROR)
                    raise

            # time.sleep(0.02)

            return sql_results
Exemple #31
0
def check_valid_abd_naming(pattern=None):
    """
    Checks if the name is can be parsed back to its original form for an air-by-date format.

    :return: true if the naming is valid, false if not.
    """
    if pattern is None:
        pattern = app.NAMING_PATTERN

    logger.log(u'Checking whether the pattern ' + pattern + ' is valid for an air-by-date episode', logger.DEBUG)
    valid = validate_name(pattern, abd=True)

    return valid
Exemple #32
0
    def post(self, route, *args, **kwargs):
        try:
            # route -> method obj
            route = route.strip('/').replace('.', '_').replace('-', '_') or 'index'
            method = getattr(self, route)

            results = yield self.async_call(method)
            self.finish(results)

        except Exception:
            logger.log(u'Failed doing web ui post request {route!r}: {error}'.format
                       (route=route, error=traceback.format_exc()), logger.DEBUG)
            raise HTTPError(404)
Exemple #33
0
def save_nzb(nzb_name, nzb_string):
    """
    Save NZB to disk

    :param nzb_name: Filename/path to write to
    :param nzb_string: Content to write in file
    """
    try:
        with open(nzb_name + ".nzb", 'w') as nzb_fh:
            nzb_fh.write(nzb_string)

    except EnvironmentError as error:
        logger.log(u"Unable to save NZB: " + ex(error), logger.ERROR)  # pylint: disable=no-member
Exemple #34
0
def check_valid_abd_naming(pattern=None):
    """
    Checks if the name is can be parsed back to its original form for an air-by-date format.

    :return: true if the naming is valid, false if not.
    """
    if pattern is None:
        pattern = app.NAMING_PATTERN

    logger.log(u'Checking whether the pattern ' + pattern + ' is valid for an air-by-date episode', logger.DEBUG)
    valid = validate_name(pattern, abd=True)

    return valid
Exemple #35
0
    def post(self, route, *args, **kwargs):
        try:
            # route -> method obj
            route = route.strip('/').replace('.', '_').replace('-', '_') or 'index'
            method = getattr(self, route)

            results = yield self.async_call(method)
            self.finish(results)

        except Exception:
            logger.log(u'Failed doing web ui post request {route!r}: {error}'.format
                       (route=route, error=traceback.format_exc()), logger.DEBUG)
            raise HTTPError(404)
Exemple #36
0
    def _execute(self, query, args=None, fetchall=False, fetchone=False):
        """
        Executes DB query

        :param query: Query to execute
        :param args: Arguments in query
        :param fetchall: Boolean to indicate all results must be fetched
        :param fetchone: Boolean to indicate one result must be fetched (to walk results for instance)
        :return: query results
        """
        try:
            cursor = self.connection.cursor()
            if not args:
                sql_results = cursor.execute(query)
            else:
                sql_results = cursor.execute(query, args)
            if fetchall:
                return sql_results.fetchall()
            elif fetchone:
                return sql_results.fetchone()
            return sql_results
        except sqlite3.OperationalError as e:
            # This errors user should be able to fix it.
            if 'unable to open database file' in e.args[0] or \
               'database is locked' in e.args[0] or \
               'database or disk is full' in e.args[0]:
                logger.log(u'DB error: {0!r}'.format(e), logger.WARNING)
            else:
                logger.log(u"Query: '{0}'. Arguments: '{1}'".format(query, args))
                logger.log(u'DB error: {0!r}'.format(e), logger.ERROR)
                raise
        except Exception as e:
            logger.log(u'DB error: {0!r}'.format(e), logger.ERROR)
            raise
Exemple #37
0
    def move_torrent(info_hash, release_names):
        """Move torrent to a given seeding folder after PP."""
        if release_names:
            # Log 'release' or 'releases'
            s = 's' if len(release_names) > 1 else ''
            release_names = ', '.join(release_names)
        else:
            s = ''
            release_names = 'N/A'

        logger.log('Trying to move torrent after post-processing', logger.DEBUG)
        client = torrent.get_client_class(app.TORRENT_METHOD)()
        torrent_moved = False
        try:
            torrent_moved = client.move_torrent(info_hash)
        except AttributeError:
            logger.log("Your client doesn't support moving torrents to new location", logger.WARNING)
            return False

        if torrent_moved:
            logger.log("Moved torrent for release{s} '{release}' with hash: {hash} to: '{path}'".format
                       (release=release_names, hash=info_hash, path=app.TORRENT_SEED_LOCATION, s=s),
                       logger.DEBUG)
            return True
        else:
            logger.log("Couldn't move torrent for release{s} '{release}' with hash: {hash} to: '{path}'. "
                       "Please check logs.".format(release=release_names, hash=info_hash, s=s,
                                                   path=app.TORRENT_SEED_LOCATION), logger.WARNING)
            return False
Exemple #38
0
    def move_torrent(info_hash, release_names):
        """Move torrent to a given seeding folder after PP."""
        if release_names:
            # Log 'release' or 'releases'
            s = 's' if len(release_names) > 1 else ''
            release_names = ', '.join(release_names)
        else:
            s = ''
            release_names = 'N/A'

        logger.log('Trying to move torrent after post-processing', logger.DEBUG)
        client = torrent.get_client_class(app.TORRENT_METHOD)()
        torrent_moved = False
        try:
            torrent_moved = client.move_torrent(info_hash)
        except AttributeError:
            logger.log("Your client doesn't support moving torrents to new location", logger.WARNING)
            return False

        if torrent_moved:
            logger.log("Moved torrent for release{s} '{release}' with hash: {hash} to: '{path}'".format
                       (release=release_names, hash=info_hash, path=app.TORRENT_SEED_LOCATION, s=s),
                       logger.DEBUG)
            return True
        else:
            logger.log("Couldn't move torrent for release{s} '{release}' with hash: {hash} to: '{path}'. "
                       'Please check logs.'.format(release=release_names, hash=info_hash, s=s,
                                                   path=app.TORRENT_SEED_LOCATION), logger.WARNING)
            return False
Exemple #39
0
def restoreDatabase(version):
    """
    Restores a database to a previous version (backup file of version must still exist)

    :param version: Version to restore to
    :return: True if restore succeeds, False if it fails
    """
    from medusa import helpers
    logger.log(u"Restoring database before trying upgrade again")
    if not helpers.restore_versioned_file(dbFilename(suffix='v' + str(version)), version):
        logger.log_error_and_exit(u"Database restore failed, abort upgrading database")
        return False
    else:
        return True
Exemple #40
0
    def getDBcompare():
        checkversion = CheckVersion()  # @TODO: replace with settings var
        db_status = checkversion.getDBcompare()

        if db_status == 'upgrade':
            logger.log(u'Checkout branch has a new DB version - Upgrade',
                       logger.DEBUG)
            return json.dumps({
                'status': 'success',
                'message': 'upgrade',
            })
        elif db_status == 'equal':
            logger.log(u'Checkout branch has the same DB version - Equal',
                       logger.DEBUG)
            return json.dumps({
                'status': 'success',
                'message': 'equal',
            })
        elif db_status == 'downgrade':
            logger.log(u'Checkout branch has an old DB version - Downgrade',
                       logger.DEBUG)
            return json.dumps({
                'status': 'success',
                'message': 'downgrade',
            })
        else:
            logger.log(u"Checkout branch couldn't compare DB version.",
                       logger.WARNING)
            return json.dumps({
                'status': 'error',
                'message': 'General exception',
            })
Exemple #41
0
 def testNZBget(host=None, username=None, password=None, use_https=False):
     try:
         connected_status, error_msg = nzbget.test_authentication(
             host, username, password, config.checkbox_to_value(use_https))
     except Exception as error:
         logger.log(
             'Error while testing NZBget connection: {error}'.format(
                 error=error), logger.WARNING)
         return 'Error while testing connection. Check warning logs.'
     if connected_status:
         return 'Success. Connected and authenticated'
     else:
         return 'Unable to connect to host. Error: {msg}'.format(
             msg=error_msg)
Exemple #42
0
def revert_episode(ep_obj):
    """Restore the episodes of a failed download to their original state."""
    failed_db_con = db.DBConnection('failed.db')
    sql_results = failed_db_con.select(
        'SELECT episode, old_status '
        'FROM history '
        'WHERE showid=?'
        ' AND indexer_id=?'
        ' AND season=?',
        [ep_obj.series.indexerid, ep_obj.series.indexer, ep_obj.season])

    history_eps = {res[b'episode']: res for res in sql_results}

    try:
        logger.log(
            u'Reverting episode status for {show} {ep}. Checking if we have previous status'
            .format(show=ep_obj.series.name,
                    ep=episode_num(ep_obj.season, ep_obj.episode)))
        with ep_obj.lock:
            if ep_obj.episode in history_eps:
                ep_obj.status = history_eps[ep_obj.episode]['old_status']
                logger.log(
                    u'Episode have a previous status to revert. Setting it back to {0}'
                    .format(statusStrings[ep_obj.status]), logger.DEBUG)
            else:
                logger.log(
                    u'Episode does not have a previous snatched status '
                    u'to revert. Setting it back to WANTED', logger.DEBUG)
                ep_obj.status = WANTED
            ep_obj.save_to_db()

    except EpisodeNotFoundException as error:
        logger.log(
            u'Unable to create episode, please set its status '
            u'manually: {error}'.format(error=error), logger.WARNING)
Exemple #43
0
    def _execute(self, query, args=None, fetchall=False, fetchone=False):
        """
        Executes DB query

        :param query: Query to execute
        :param args: Arguments in query
        :param fetchall: Boolean to indicate all results must be fetched
        :param fetchone: Boolean to indicate one result must be fetched (to walk results for instance)
        :return: query results
        """
        try:
            cursor = self.connection.cursor()
            if not args:
                sql_results = cursor.execute(query)
            else:
                sql_results = cursor.execute(query, args)
            if fetchall:
                return sql_results.fetchall()
            elif fetchone:
                return sql_results.fetchone()
            return sql_results
        except sqlite3.OperationalError as e:
            # This errors user should be able to fix it.
            if 'unable to open database file' in e.args[0] or \
               'database is locked' in e.args[0] or \
               'database or disk is full' in e.args[0]:
                logger.log(u'DB error: {0!r}'.format(e), logger.WARNING)
            else:
                logger.log(u"Query: '{0}'. Arguments: '{1}'".format(
                    query, args))
                logger.log(u'DB error: {0!r}'.format(e), logger.ERROR)
                raise
        except Exception as e:
            logger.log(u'DB error: {0!r}'.format(e), logger.ERROR)
            raise
Exemple #44
0
    def index(self):
        # TODO: SESSION: Check if this needs some more explicit exception handling.
        changes = HomeChangeLog.session.get_text(app.CHANGES_URL)

        if not changes:
            logger.log('Could not load changes from repo, giving a link!', logger.DEBUG)
            changes = 'Could not load changes from the repo. [Click here for CHANGES.md]({url})'.format(url=app.CHANGES_URL)

        t = PageTemplate(rh=self, filename='markdown.mako')
        data = markdown2.markdown(
            changes if changes else 'The was a problem connecting to github, please refresh and try again', extras=['header-ids']
        )

        return t.render(title='Changelog', header='Changelog', topmenu='system', data=data, controller='changes', action='index')
Exemple #45
0
def revert_episode(ep_obj):
    """Restore the episodes of a failed download to their original state."""
    failed_db_con = db.DBConnection('failed.db')
    sql_results = failed_db_con.select(
        'SELECT episode, status, quality '
        'FROM history '
        'WHERE showid=?'
        ' AND indexer_id=?'
        ' AND season=?',
        [ep_obj.series.indexerid, ep_obj.series.indexer, ep_obj.season]
    )

    history_eps = {res['episode']: res for res in sql_results}

    try:
        logger.log(u'Reverting episode status for {show} {ep}. Checking if we have previous status'.format
                   (show=ep_obj.series.name, ep=episode_num(ep_obj.season, ep_obj.episode)))
        with ep_obj.lock:
            if ep_obj.episode in history_eps:
                ep_obj.status = history_eps[ep_obj.episode]['status']
                logger.log(u'Episode have a previous status to revert. Setting it back to {0}'.format
                           (statusStrings[ep_obj.status]), logger.DEBUG)
            else:
                logger.log(u'Episode does not have a previous snatched status '
                           u'to revert. Setting it back to WANTED',
                           logger.DEBUG)
                ep_obj.status = WANTED
            ep_obj.save_to_db()

    except EpisodeNotFoundException as error:
        logger.log(u'Unable to create episode, please set its status '
                   u'manually: {error}'.format(error=error),
                   logger.WARNING)
def get_network_timezone(network, _network_dict):
    """
    Get a timezone of a network from a given network dict.

    :param network: network to look up (needle)
    :param _network_dict: dict to look up in (haystack)
    :return:
    """
    # Get the name of the networks timezone from _network_dict
    network_tz_name = _network_dict[network] if network in _network_dict else None

    if not network_tz_name and network and network not in missing_network_timezones:
        missing_network_timezones.add(network)
        if network is not None:
            logger.log('Missing time zone for network: %s' % network, logger.ERROR)

    return tz.gettz(network_tz_name) if network_tz_name else app_timezone
Exemple #47
0
    def render(self, *args, **kwargs):
        """Render the Page template."""
        for key in self.arguments:
            if key not in kwargs:
                kwargs[key] = self.arguments[key]

        kwargs['makoStartTime'] = time.time()
        try:
            return self.template.render_unicode(*args, **kwargs)
        except Exception:
            kwargs['backtrace'] = RichTraceback()
            for (filename, lineno, function, _) in kwargs['backtrace'].traceback:
                logger.log(u'File {name}, line {line}, in {func}'.format
                           (name=filename, line=lineno, func=function), logger.DEBUG)
            logger.log(u'{name}: {error}'.format
                       (name=kwargs['backtrace'].error.__class__.__name__, error=kwargs['backtrace'].error))
            return get_lookup().get_template('500.mako').render_unicode(*args, **kwargs)
Exemple #48
0
def _processUpgrade(connection, upgradeClass):
    instance = upgradeClass(connection)
    logger.log(u'Checking ' + prettyName(upgradeClass.__name__) + ' database upgrade', logger.DEBUG)
    if not instance.test():
        logger.log(u'Database upgrade required: ' + prettyName(upgradeClass.__name__), logger.DEBUG)
        try:
            instance.execute()
        except Exception as e:
            logger.log('Error in ' + str(upgradeClass.__name__) + ': ' + ex(e), logger.ERROR)
            raise

        logger.log(upgradeClass.__name__ + ' upgrade completed', logger.DEBUG)
    else:
        logger.log(upgradeClass.__name__ + ' upgrade not required', logger.DEBUG)

    for upgradeSubClass in upgradeClass.__subclasses__():
        _processUpgrade(connection, upgradeSubClass)
Exemple #49
0
    def action(self, query, args=None, fetchall=False, fetchone=False):
        """
        Execute single query

        :param query: Query string
        :param args: Arguments to query string
        :param fetchall: Boolean to indicate all results must be fetched
        :param fetchone: Boolean to indicate one result must be fetched (to walk results for instance)
        :return: query results
        """
        if query is None:
            return

        sql_results = None
        attempt = 0

        with db_locks[self.filename]:
            self._set_row_factory()
            while attempt < 5:
                try:
                    if args is None:
                        logger.log(self.filename + ': ' + query, logger.DB)
                    else:
                        logger.log(self.filename + ': ' + query + ' with args ' + str(args), logger.DB)

                    sql_results = self._execute(query, args, fetchall=fetchall, fetchone=fetchone)
                    self.connection.commit()

                    # get out of the connection attempt loop since we were successful
                    break
                except sqlite3.OperationalError as e:
                    if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
                        logger.log(u'DB error: ' + ex(e), logger.WARNING)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.log(u'DB error: ' + ex(e), logger.ERROR)
                        raise
                except sqlite3.DatabaseError as e:
                    logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
                    raise

            # time.sleep(0.02)

            return sql_results
Exemple #50
0
    def get(self, *args, **kwargs):
        """
        Get api key as json response.
        """
        api_key = None

        try:
            username = app.WEB_USERNAME
            password = app.WEB_PASSWORD

            if (self.get_argument('u', None) == username or not username) and \
                    (self.get_argument('p', None) == password or not password):
                api_key = app.API_KEY

            self.finish({'success': api_key is not None, 'api_key': api_key})
        except Exception:
            logger.log('Failed doing key request: {error}'.format(error=traceback.format_exc()), logger.ERROR)
            self.finish({'success': False, 'error': 'Failed returning results'})
Exemple #51
0
def mark_failed(ep_obj):
    """
    Mark an episode as failed.

    :param ep_obj: Episode object to mark as failed
    :return: empty string
    """
    log_str = u''

    try:
        with ep_obj.lock:
            ep_obj.status = FAILED
            ep_obj.save_to_db()

    except EpisodeNotFoundException as error:
        logger.log(u'Unable to get episode, please set its status '
                   u'manually: {error}'.format(error=error),
                   logger.WARNING)

    return log_str
Exemple #52
0
def check_valid_naming(pattern=None, multi=None, anime_type=None):
    """
    Checks if the name is can be parsed back to its original form for both single and multi episodes.

    :return: true if the naming is valid, false if not.
    """
    if pattern is None:
        pattern = app.NAMING_PATTERN

    if anime_type is None:
        anime_type = app.NAMING_ANIME

    logger.log(u'Checking whether the pattern ' + pattern + ' is valid for a single episode', logger.DEBUG)
    valid = validate_name(pattern, None, anime_type)

    if multi is not None:
        logger.log(u'Checking whether the pattern ' + pattern + ' is valid for a multi episode', logger.DEBUG)
        valid = valid and validate_name(pattern, multi, anime_type)

    return valid
Exemple #53
0
    def delete_folder(folder, check_empty=True):
        """
        Remove a folder from the filesystem.

        :param folder: Path to folder to remove
        :param check_empty: Boolean, check if the folder is empty before removing it, defaults to True
        :return: True on success, False on failure
        """
        # check if it's a folder
        if not os.path.isdir(folder):
            return False

        # check if it isn't TV_DOWNLOAD_DIR
        if app.TV_DOWNLOAD_DIR:
            if helpers.real_path(folder) == helpers.real_path(app.TV_DOWNLOAD_DIR):
                return False

        # check if it's empty folder when wanted checked
        if check_empty:
            check_files = os.listdir(folder)
            if check_files:
                logger.log('Not deleting folder {0} found the following files: {1}'.format
                           (folder, check_files), logger.INFO)
                return False

            try:
                logger.log("Deleting folder (if it's empty): {0}".format(folder))
                os.rmdir(folder)
            except (OSError, IOError) as error:
                logger.log('Unable to delete folder: {0}: {1}'.format(folder, ex(error)), logger.WARNING)
                return False
        else:
            try:
                logger.log('Deleting folder: {0}'.format(folder))
                shutil.rmtree(folder)
            except (OSError, IOError) as error:
                logger.log('Unable to delete folder: {0}: {1}'.format(folder, ex(error)), logger.WARNING)
                return False

        return True
Exemple #54
0
    def subtitles_enabled(*args):
        """Try to parse names to a show and check whether the show has subtitles enabled.

        :param args:
        :return:
        :rtype: bool
        """
        for name in args:
            if not name:
                continue

            try:
                parse_result = NameParser().parse(name)
                if parse_result.series.indexerid:
                    main_db_con = db.DBConnection()
                    sql_results = main_db_con.select('SELECT subtitles FROM tv_shows WHERE indexer = ? AND indexer_id = ? LIMIT 1',
                                                     [parse_result.series.indexer, parse_result.series.indexerid])
                    return bool(sql_results[0]['subtitles']) if sql_results else False

                logger.log('Empty indexer ID for: {name}'.format(name=name), logger.WARNING)
            except (InvalidNameException, InvalidShowException):
                logger.log('Not enough information to parse filename into a valid show. Consider adding scene '
                           'exceptions or improve naming for: {name}'.format(name=name), logger.WARNING)
        return False
Exemple #55
0
def find_release(ep_obj):
    """
    Find releases in history by show ID and season.

    Return None for release if multiple found or no release found.
    """
    release = None
    provider = None

    # Clear old snatches for this release if any exist
    failed_db_con = db.DBConnection('failed.db')
    failed_db_con.action(
        'DELETE FROM history '
        'WHERE showid = {0}'
        ' AND season = {1}'
        ' AND episode = {2}'
        ' AND indexer_id = {3}'
        ' AND date < ( SELECT max(date)'
        '              FROM history'
        '              WHERE showid = {0}'
        '               AND season = {1}'
        '               AND episode = {2}'
        '               AND indexer_id = {3}'
        '             )'.format
        (ep_obj.series.indexerid, ep_obj.season, ep_obj.episode, ep_obj.series.indexer)
    )

    # Search for release in snatch history
    results = failed_db_con.select(
        'SELECT release, provider, date '
        'FROM history '
        'WHERE showid=?'
        ' AND season=?'
        ' AND episode=?'
        ' AND indexer_id=?',
        [ep_obj.series.indexerid, ep_obj.season, ep_obj.episode, ep_obj.series.indexer]
    )

    for result in results:
        release = str(result['release'])
        provider = str(result['provider'])
        date = result['date']

        # Clear any incomplete snatch records for this release if any exist
        failed_db_con.action(
            'DELETE FROM history '
            'WHERE release=?'
            ' AND date!=?',
            [release, date]
        )

        # Found a previously failed release
        logger.log(u'Failed release found for {show} {ep}: {release}'.format
                   (show=ep_obj.series.name, ep=episode_num(ep_obj.season, ep_obj.episode),
                    release=result['release']), logger.DEBUG)
        return release, provider

    # Release was not found
    logger.log(u'No releases found for {show} {ep}'.format
               (show=ep_obj.series.name, ep=episode_num(ep_obj.season, ep_obj.episode)), logger.DEBUG)
    return release, provider
Exemple #56
0
    def massEditSubmit(self, paused=None, default_ep_status=None, dvd_order=None,
                       anime=None, sports=None, scene=None, season_folders=None, quality_preset=None,
                       subtitles=None, air_by_date=None, allowed_qualities=None, preferred_qualities=None, toEdit=None, *args,
                       **kwargs):
        allowed_qualities = allowed_qualities or []
        preferred_qualities = preferred_qualities or []

        dir_map = {}
        for cur_arg in kwargs:
            if not cur_arg.startswith('orig_root_dir_'):
                continue
            which_index = cur_arg.replace('orig_root_dir_', '')
            end_dir = kwargs['new_root_dir_{index}'.format(index=which_index)]
            dir_map[kwargs[cur_arg]] = end_dir

        series_slugs = toEdit.split('|') if toEdit else []
        errors = 0
        for series_slug in series_slugs:
            identifier = SeriesIdentifier.from_slug(series_slug)
            series_obj = Series.find_by_identifier(identifier)

            if not series_obj:
                continue

            cur_root_dir = os.path.dirname(series_obj._location)
            cur_show_dir = os.path.basename(series_obj._location)
            if cur_root_dir in dir_map and cur_root_dir != dir_map[cur_root_dir]:
                new_show_dir = os.path.join(dir_map[cur_root_dir], cur_show_dir)
                logger.log(u'For show {show.name} changing dir from {show._location} to {location}'.format
                           (show=series_obj, location=new_show_dir))
            else:
                new_show_dir = series_obj._location

            if paused == 'keep':
                new_paused = series_obj.paused
            else:
                new_paused = True if paused == 'enable' else False
            new_paused = 'on' if new_paused else 'off'

            if default_ep_status == 'keep':
                new_default_ep_status = series_obj.default_ep_status
            else:
                new_default_ep_status = default_ep_status

            if anime == 'keep':
                new_anime = series_obj.anime
            else:
                new_anime = True if anime == 'enable' else False
            new_anime = 'on' if new_anime else 'off'

            if sports == 'keep':
                new_sports = series_obj.sports
            else:
                new_sports = True if sports == 'enable' else False
            new_sports = 'on' if new_sports else 'off'

            if scene == 'keep':
                new_scene = series_obj.is_scene
            else:
                new_scene = True if scene == 'enable' else False
            new_scene = 'on' if new_scene else 'off'

            if air_by_date == 'keep':
                new_air_by_date = series_obj.air_by_date
            else:
                new_air_by_date = True if air_by_date == 'enable' else False
            new_air_by_date = 'on' if new_air_by_date else 'off'

            if dvd_order == 'keep':
                new_dvd_order = series_obj.dvd_order
            else:
                new_dvd_order = True if dvd_order == 'enable' else False
            new_dvd_order = 'on' if new_dvd_order else 'off'

            if season_folders == 'keep':
                new_season_folders = series_obj.season_folders
            else:
                new_season_folders = True if season_folders == 'enable' else False
            new_season_folders = 'on' if new_season_folders else 'off'

            if subtitles == 'keep':
                new_subtitles = series_obj.subtitles
            else:
                new_subtitles = True if subtitles == 'enable' else False

            new_subtitles = 'on' if new_subtitles else 'off'

            if quality_preset == 'keep':
                allowed_qualities, preferred_qualities = series_obj.current_qualities
            elif try_int(quality_preset, None):
                preferred_qualities = []

            exceptions_list = []

            errors += self.editShow(identifier.indexer.slug, identifier.id, new_show_dir, allowed_qualities,
                                    preferred_qualities, exceptions_list,
                                    defaultEpStatus=new_default_ep_status,
                                    season_folders=new_season_folders,
                                    paused=new_paused, sports=new_sports, dvd_order=new_dvd_order,
                                    subtitles=new_subtitles, anime=new_anime,
                                    scene=new_scene, air_by_date=new_air_by_date,
                                    directCall=True)

        if errors:
            ui.notifications.error('Errors', '{num} error{s} while saving changes. Please check logs'.format
                                   (num=errors, s='s' if errors > 1 else ''))

        return self.redirect('/manage/')
Exemple #57
0
 def log(self, message, level=logger.INFO):
     logger.log(message, level)
     self._output.append(message)
Exemple #58
0
    def mass_action(self, querylist=None, logTransaction=False, fetchall=False):
        """
        Execute multiple queries

        :param querylist: list of queries
        :param logTransaction: Boolean to wrap all in one transaction
        :param fetchall: Boolean, when using a select query force returning all results
        :return: list of results
        """
        # Remove Falsey types
        querylist = (q for q in querylist or [] if q)

        sql_results = []
        attempt = 0

        with db_locks[self.filename]:
            self._set_row_factory()
            while attempt < 5:
                try:
                    for qu in querylist:
                        if len(qu) == 1:
                            if logTransaction:
                                logger.log(qu[0], logger.DEBUG)
                            sql_results.append(self._execute(qu[0], fetchall=fetchall))
                        elif len(qu) > 1:
                            if logTransaction:
                                logger.log(qu[0] + ' with args ' + str(qu[1]), logger.DEBUG)
                            sql_results.append(self._execute(qu[0], qu[1], fetchall=fetchall))
                    self.connection.commit()
                    logger.log(u'Transaction with ' + str(len(sql_results)) + u' queries executed', logger.DEBUG)

                    # finished
                    break
                except sqlite3.OperationalError as e:
                    sql_results = []
                    self._try_rollback()
                    if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
                        logger.log(u'DB error: ' + ex(e), logger.WARNING)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.log(u'DB error: ' + ex(e), logger.ERROR)
                        raise
                except sqlite3.DatabaseError as e:
                    sql_results = []
                    self._try_rollback()
                    logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
                    raise

            # time.sleep(0.02)

            return sql_results
Exemple #59
0
    def saveGeneral(self, log_dir=None, log_nr=5, log_size=1, web_port=None, notify_on_login=None, web_log=None, encryption_version=None, web_ipv6=None,
                    trash_remove_show=None, trash_rotate_logs=None, update_frequency=None, skip_removed_files=None,
                    indexerDefaultLang='en', ep_default_deleted_status=None, launch_browser=None, showupdate_hour=3, web_username=None,
                    api_key=None, indexer_default=None, timezone_display=None, cpu_preset='NORMAL', layout_wide=None,
                    web_password=None, version_notify=None, enable_https=None, https_cert=None, https_key=None,
                    handle_reverse_proxy=None, sort_article=None, auto_update=None, notify_on_update=None,
                    proxy_setting=None, proxy_indexers=None, anon_redirect=None, git_path=None, git_remote=None,
                    calendar_unprotected=None, calendar_icons=None, debug=None, ssl_verify=None, no_restart=None, coming_eps_missed_range=None,
                    fuzzy_dating=None, trim_zero=None, date_preset=None, date_preset_na=None, time_preset=None,
                    indexer_timeout=None, download_url=None, rootDir=None, theme_name=None, default_page=None,
                    git_reset=None, git_reset_branches=None, git_auth_type=0, git_username=None, git_password=None, git_token=None,
                    display_all_seasons=None, subliminal_log=None, privacy_level='normal', fanart_background=None, fanart_background_opacity=None,
                    dbdebug=None, fallback_plex_enable=1, fallback_plex_notifications=1, fallback_plex_timeout=3, web_root=None, ssl_ca_bundle=None):

        results = []

        # Misc
        app.DOWNLOAD_URL = download_url
        app.INDEXER_DEFAULT_LANGUAGE = indexerDefaultLang
        app.EP_DEFAULT_DELETED_STATUS = int(ep_default_deleted_status)
        app.SKIP_REMOVED_FILES = config.checkbox_to_value(skip_removed_files)
        app.LAUNCH_BROWSER = config.checkbox_to_value(launch_browser)
        config.change_SHOWUPDATE_HOUR(showupdate_hour)
        config.change_VERSION_NOTIFY(config.checkbox_to_value(version_notify))
        app.AUTO_UPDATE = config.checkbox_to_value(auto_update)
        app.NOTIFY_ON_UPDATE = config.checkbox_to_value(notify_on_update)
        # app.LOG_DIR is set in config.change_LOG_DIR()
        app.LOG_NR = log_nr
        app.LOG_SIZE = float(log_size)

        app.TRASH_REMOVE_SHOW = config.checkbox_to_value(trash_remove_show)
        app.TRASH_ROTATE_LOGS = config.checkbox_to_value(trash_rotate_logs)
        config.change_UPDATE_FREQUENCY(update_frequency)
        app.LAUNCH_BROWSER = config.checkbox_to_value(launch_browser)
        app.SORT_ARTICLE = config.checkbox_to_value(sort_article)
        app.CPU_PRESET = cpu_preset
        app.ANON_REDIRECT = anon_redirect
        app.PROXY_SETTING = proxy_setting
        app.PROXY_INDEXERS = config.checkbox_to_value(proxy_indexers)
        app.GIT_AUTH_TYPE = int(git_auth_type)
        app.GIT_USERNAME = git_username
        app.GIT_PASSWORD = git_password
        app.GIT_TOKEN = git_token
        app.GIT_RESET = config.checkbox_to_value(git_reset)
        app.GIT_RESET_BRANCHES = [helpers.to_text(branch) for branch in
                                  helpers.ensure_list(git_reset_branches)]
        if app.GIT_PATH != git_path:
            app.GIT_PATH = git_path
            config.change_GIT_PATH()
        app.GIT_REMOTE = git_remote
        app.CALENDAR_UNPROTECTED = config.checkbox_to_value(calendar_unprotected)
        app.CALENDAR_ICONS = config.checkbox_to_value(calendar_icons)
        app.NO_RESTART = config.checkbox_to_value(no_restart)

        app.SSL_VERIFY = config.checkbox_to_value(ssl_verify)
        app.SSL_CA_BUNDLE = ssl_ca_bundle
        # app.LOG_DIR is set in config.change_LOG_DIR()
        app.COMING_EPS_MISSED_RANGE = int(coming_eps_missed_range)
        app.DISPLAY_ALL_SEASONS = config.checkbox_to_value(display_all_seasons)
        app.NOTIFY_ON_LOGIN = config.checkbox_to_value(notify_on_login)
        app.WEB_PORT = int(web_port)
        app.WEB_IPV6 = config.checkbox_to_value(web_ipv6)
        if config.checkbox_to_value(encryption_version) == 1:
            app.ENCRYPTION_VERSION = 2
        else:
            app.ENCRYPTION_VERSION = 0
        app.WEB_USERNAME = web_username
        app.WEB_PASSWORD = web_password
        app.WEB_ROOT = web_root

        app.DEBUG = config.checkbox_to_value(debug)
        app.DBDEBUG = config.checkbox_to_value(dbdebug)
        app.WEB_LOG = config.checkbox_to_value(web_log)
        app.SUBLIMINAL_LOG = config.checkbox_to_value(subliminal_log)

        # Added for tvdb / plex fallback
        app.FALLBACK_PLEX_ENABLE = config.checkbox_to_value(fallback_plex_enable)
        app.FALLBACK_PLEX_NOTIFICATIONS = config.checkbox_to_value(fallback_plex_notifications)
        app.FALLBACK_PLEX_TIMEOUT = try_int(fallback_plex_timeout)

        if not config.change_LOG_DIR(log_dir):
            results += ['Unable to create directory {dir}, '
                        'log directory not changed.'.format(dir=os.path.normpath(log_dir))]

        # Reconfigure the logger
        logger.reconfigure()

        # Validate github credentials
        try:
            if app.GIT_AUTH_TYPE == 0:
                github_client.authenticate(app.GIT_USERNAME, app.GIT_PASSWORD)
            else:
                github = github_client.token_authenticate(app.GIT_TOKEN)
                if app.GIT_USERNAME and app.GIT_USERNAME != github_client.get_user(gh=github):
                    app.GIT_USERNAME = github_client.get_user(gh=github)
        except (GithubException, IOError):
            logger.log('Error while validating your Github credentials.', logger.WARNING)

        app.PRIVACY_LEVEL = privacy_level.lower()

        app.FUZZY_DATING = config.checkbox_to_value(fuzzy_dating)
        app.TRIM_ZERO = config.checkbox_to_value(trim_zero)

        if date_preset:
            app.DATE_PRESET = date_preset

        if indexer_default:
            app.INDEXER_DEFAULT = try_int(indexer_default)

        if indexer_timeout:
            app.INDEXER_TIMEOUT = try_int(indexer_timeout)

        if time_preset:
            app.TIME_PRESET_W_SECONDS = time_preset
            app.TIME_PRESET = app.TIME_PRESET_W_SECONDS.replace(u':%S', u'')

        app.TIMEZONE_DISPLAY = timezone_display

        app.API_KEY = api_key

        app.ENABLE_HTTPS = config.checkbox_to_value(enable_https)

        if not config.change_HTTPS_CERT(https_cert):
            results += ['Unable to create directory {dir}, '
                        'https cert directory not changed.'.format(dir=os.path.normpath(https_cert))]

        if not config.change_HTTPS_KEY(https_key):
            results += ['Unable to create directory {dir}, '
                        'https key directory not changed.'.format(dir=os.path.normpath(https_key))]

        app.HANDLE_REVERSE_PROXY = config.checkbox_to_value(handle_reverse_proxy)

        config.change_theme(theme_name)

        app.LAYOUT_WIDE = config.checkbox_to_value(layout_wide)
        app.FANART_BACKGROUND = config.checkbox_to_value(fanart_background)
        app.FANART_BACKGROUND_OPACITY = fanart_background_opacity

        app.DEFAULT_PAGE = default_page

        app.instance.save_config()

        if results:
            for x in results:
                logger.log(x, logger.ERROR)
            ui.notifications.error('Error(s) Saving Configuration',
                                   '<br>\n'.join(results))
        else:
            ui.notifications.message('Configuration Saved', os.path.join(app.CONFIG_FILE))

        return self.redirect('/config/general/')