Example #1
0
    def _run_extra_scripts(self, ep_obj):
        """
        Executes any extra scripts defined in the config.

        ep_obj: The object to use when calling the extra script
        """
        for curScriptName in sickbeard.EXTRA_SCRIPTS:

            # generate a safe command line string to execute the script and provide all the parameters
            script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", curScriptName) if piece.strip()]
            script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0])
            self._log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)

            script_cmd = script_cmd + [ep_obj.location, self.file_path, str(ep_obj.show.indexerid), str(ep_obj.season),
                                       str(ep_obj.episode), str(ep_obj.airdate)]

            # use subprocess to run the command and capture output
            self._log(u"Executing command " + str(script_cmd))
            try:
                p = subprocess.Popen(script_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
                                     stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
                out, err = p.communicate()  # @UnusedVariable
                self._log(u"Script result: " + str(out), logger.DEBUG)

            except OSError, e:
                self._log(u"Unable to run extra_script: " + ex(e))

            except Exception, e:
                self._log(u"Unable to run extra_script: " + ex(e))
Example #2
0
    def _makeURL(self, result):
        urls = []
        filename = u''
        if result.url.startswith('magnet'):
            try:
                torrent_hash = re.findall('urn:btih:([\w]{32,40})', result.url)[0].upper()
                torrent_name = re.findall('dn=([^&]+)', result.url)[0]

                if len(torrent_hash) == 32:
                    torrent_hash = b16encode(b32decode(torrent_hash)).upper()

                if not torrent_hash:
                    logger.log("Unable to extract torrent hash from magnet: " + ex(result.url), logger.ERROR)
                    return (urls, filename)

                urls = [x.format(torrent_hash=torrent_hash, torrent_name=torrent_name) for x in self.btCacheURLS]
            except:
                logger.log("Unable to extract torrent hash or name from magnet: " + ex(result.url), logger.ERROR)
                return (urls, filename)
        else:
            urls = [result.url]

        if self.providerType == GenericProvider.TORRENT:
            filename = ek.ek(os.path.join, sickbeard.TORRENT_DIR,
                             helpers.sanitizeFileName(result.name) + '.' + self.providerType)

        elif self.providerType == GenericProvider.NZB:
            filename = ek.ek(os.path.join, sickbeard.NZB_DIR,
                             helpers.sanitizeFileName(result.name) + '.' + self.providerType)

        return (urls, filename)
Example #3
0
    def test_notify(self, username, disable_ssl, blacklist_name=None):
        """
        Sends a test notification to trakt with the given authentication info and returns a boolean
        representing success.

        api: The api string to use
        username: The username to use
        password: The password to use
        blacklist_name: slug of trakt list used to hide not interested show

        Returns: True if the request succeeded, False otherwise
        """
        try:
            trakt_api = TraktAPI(disable_ssl, sickbeard.TRAKT_TIMEOUT)
            trakt_api.validateAccount()
            if blacklist_name and blacklist_name is not None:
                trakt_lists = trakt_api.traktRequest("users/" + username + "/lists")
                found = False
                for trakt_list in trakt_lists:
                    if (trakt_list['ids']['slug'] == blacklist_name):
                        return "Test notice sent successfully to Trakt"
                if not found:
                    return "Trakt blacklist doesn't exists"
            else:
                return "Test notice sent successfully to Trakt"
        except (traktException, traktAuthException, traktServerBusy) as e:
            logger.log(u"Could not connect to Trakt service: %s" % ex(e), logger.WARNING)
            return "Test notice failed to Trakt: %s" % ex(e)
Example #4
0
def findSeason(show, season):

    logger.log(u"Searching for stuff we need from "+show.name+" season "+str(season))

    foundResults = {}

    didSearch = False

    for curProvider in providers.sortedProviderList():

        if not curProvider.isActive():
            continue

        try:
            curResults = curProvider.findSeasonResults(show, season)

            # make a list of all the results for this provider
            for curEp in curResults:

                # skip non-tv crap
                curResults[curEp] = filter(lambda x:  show_name_helpers.filterBadReleases(x.name, show) and show_name_helpers.isGoodResult(x.name, show), curResults[curEp])

                if curEp in foundResults:
                    foundResults[curEp] += curResults[curEp]
                else:
                    foundResults[curEp] = curResults[curEp]

        except exceptions.AuthException, e:
            logger.log(u"Authentication error: "+ex(e), logger.ERROR)
            continue
        except Exception, e:
            logger.log(u"Error while searching "+curProvider.name+", skipping: "+ex(e), logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)
            continue
Example #5
0
def searchForNeededEpisodes():

    logger.log(u"Searching all providers for any needed episodes")

    foundResults = {}

    didSearch = False

    # ask all providers for any episodes it finds
    for curProvider in providers.sortedProviderList():

        if not curProvider.isActive():
            continue

        curFoundResults = {}

        try:
            curFoundResults = curProvider.searchRSS()
        except exceptions.AuthException, e:
            logger.log(u"Authentication error: "+ex(e), logger.ERROR)
            continue
        except Exception, e:
            logger.log(u"Error while searching "+curProvider.name+", skipping: "+ex(e), logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)
            continue
Example #6
0
File: db.py Project: 3ne/SickRage
    def action(self, query, args=None):

        with db_lock:

            if query == None:
                return

            sqlResult = None
            attempt = 0

            while attempt < 5:
                time.sleep(0.01)
                try:
                    if args == None:
                        logger.log(self.filename + ": " + query, logger.DB)
                        sqlResult = self.connection.execute(query)
                    else:
                        logger.log(self.filename + ": " + query + " with args " + str(args), logger.DB)
                        sqlResult = self.connection.execute(query, args)
                    self.connection.commit()
                    # get out of the connection attempt loop since we were successful
                    break
                except sqlite3.OperationalError, e:
                    if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
                        logger.log(u"DB error: " + ex(e), logger.WARNING)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.log(u"DB error: " + ex(e), logger.ERROR)
                        raise
                except sqlite3.DatabaseError, e:
                    logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR)
                    raise
Example #7
0
    def action(self, query, args=None):

        with db_lock:

            if query is None:
                return

            sqlResult = None
            attempt = 0

            while attempt < 5:
                try:
                    if args is None:
                        logger.log(self.filename + ': ' + query, logger.DB)
                        sqlResult = self.connection.execute(query)
                    else:
                        logger.log(self.filename + ': ' + query + ' with args ' + str(args), logger.DB)
                        sqlResult = self.connection.execute(query, args)
                    self.connection.commit()
                    # get out of the connection attempt loop since we were successful
                    break
                except sqlite3.OperationalError as e:
                    if 'unable to open database file' in e.args[0] or 'database is locked' in e.args[0]:
                        logger.log(u'DB error: ' + ex(e), logger.WARNING)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.log(u'DB error: ' + ex(e), logger.ERROR)
                        raise
                except sqlite3.DatabaseError as e:
                    logger.log(u'Fatal error executing query: ' + ex(e), logger.ERROR)
                    raise

            return sqlResult
Example #8
0
    def _request(self, method='get', params=None, data=None, files=None, **kwargs):

        params = params or {}

        if time.time() > self.last_time + 1800 or not self.auth:
            self.last_time = time.time()
            self._get_auth()

        logger.log('%s: sending %s request to %s with ...' % (self.name, method.upper(), self.url), logger.DEBUG)
        lines = [('params', (str(params), '')[not params]),
                 ('data', (str(data), '')[not data]),
                 ('files', (str(files), '')[not files]),
                 ('json', (str(kwargs.get('json')), '')[not kwargs.get('json')])]
        m, c = 300, 100
        type_chunks = [(linetype, [ln[i:i + c] for i in range(0, min(len(ln), m), c)]) for linetype, ln in lines if ln]
        for (arg, chunks) in type_chunks:
            output = []
            nch = len(chunks) - 1
            for i, seg in enumerate(chunks):
                if nch == i and 'files' == arg:
                    sample = ' ..excerpt(%s/%s)' % (m, len(lines[2][1]))
                    seg = seg[0:c - (len(sample) - 2)] + sample
                output += ['%s: request %s= %s%s%s' % (self.name, arg, ('', '..')[bool(i)], seg, ('', '..')[i != nch])]
            for out in output:
                logger.log(out, logger.DEBUG)

        if not self.auth:
            logger.log('%s: Authentication Failed' % self.name, logger.ERROR)
            return False
        try:
            response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
                                                             timeout=kwargs.pop('timeout', 120), verify=False, **kwargs)
        except requests.exceptions.ConnectionError as e:
            logger.log('%s: Unable to connect %s' % (self.name, ex(e)), logger.ERROR)
            return False
        except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL):
            logger.log('%s: Invalid Host' % self.name, logger.ERROR)
            return False
        except requests.exceptions.HTTPError as e:
            logger.log('%s: Invalid HTTP Request %s' % (self.name, ex(e)), logger.ERROR)
            return False
        except requests.exceptions.Timeout as e:
            logger.log('%s: Connection Timeout %s' % (self.name, ex(e)), logger.ERROR)
            return False
        except Exception as e:
            logger.log('%s: Unknown exception raised when sending torrent to %s: %s' % (self.name, self.name, ex(e)),
                       logger.ERROR)
            return False

        if 401 == response.status_code:
            logger.log('%s: Invalid Username or Password, check your config' % self.name, logger.ERROR)
            return False

        if response.status_code in http_error_code.keys():
            logger.log('%s: %s' % (self.name, http_error_code[response.status_code]), logger.DEBUG)
            return False

        logger.log('%s: Response to %s request is %s' % (self.name, method.upper(), response.text), logger.DEBUG)

        return response
Example #9
0
def findSeason(show, season):

    myDB = db.DBConnection()
    allEps = [int(x["episode"]) for x in myDB.select("SELECT episode FROM tv_episodes WHERE showid = ? AND season = ?", [show.tvdbid, season])]
    logger.log(u"Episode list: "+str(allEps), logger.DEBUG)

    
    reallywanted=[]
    notwanted=[]
    finalResults = []
    for curEpNum in allEps:
        sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [show.tvdbid, season, curEpNum])
        epStatus = int(sqlResults[0]["status"])
        if epStatus ==3:
            reallywanted.append(curEpNum)
        else:
            notwanted.append(curEpNum)
    if notwanted != []:
        for EpNum in reallywanted:
            showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, show.tvdbid)
            episode = showObj.getEpisode(season, EpNum)
            res=findEpisode(episode, manualSearch=True)
            snatchEpisode(res)
        return
    else:
        logger.log(u"Searching for stuff we need from "+show.name+" season "+str(season))
    
        foundResults = {}
    
        didSearch = False
    
        for curProvider in providers.sortedProviderList():
    
            if not curProvider.isActive():
                continue
    
            try:
                curResults = curProvider.findSeasonResults(show, season)
    
                # make a list of all the results for this provider
                for curEp in curResults:
    
                    # skip non-tv crap
                    curResults[curEp] = filter(lambda x:  show_name_helpers.filterBadReleases(x.name) and show_name_helpers.isGoodResult(x.name, show), curResults[curEp])
    
                    if curEp in foundResults:
                        foundResults[curEp] += curResults[curEp]
                    else:
                        foundResults[curEp] = curResults[curEp]
    
            except exceptions.AuthException, e:
                logger.log(u"Authentication error: "+ex(e), logger.ERROR)
                continue
            except Exception, e:
                logger.log(u"Error while searching "+curProvider.name+", skipping: "+ex(e), logger.DEBUG)
                logger.log(traceback.format_exc(), logger.DEBUG)
                continue
    
            didSearch = True
Example #10
0
    def _request(self, method='get', params={}, data=None, files=None):
        response = None
        if time.time() > self.last_time + 1800 or not self.auth:
            self.last_time = time.time()
            self._get_auth()

        logger.log(
            self.name + u': Requested a ' + method.upper() +
            ' connection to url ' + self.url + ' with Params= ' + str(params) +
            ' Data=' + str(data if data else 'None')[0:99] +
            ('...' if len(data if data else 'None') > 200 else ''),
            logger.DEBUG
        )

        logger.log(
            self.name + u': Requested a ' + method.upper() +
            ' connection to url ' + self.url + ' with Params= ' + str(params) +
            ((' Data=' + str(data)[0:100] + ('...' if len(data) > 100 else ''))
             if data is not None else ''),
            logger.DEBUG
        )

        if not self.auth:
            logger.log(self.name + u': Authentication Failed', logger.ERROR)
            return False
        try:
            response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
                                                                  timeout=120, verify=False)
        except requests.exceptions.ConnectionError as e:
            logger.log(self.name + u': Unable to connect ' + ex(e), logger.ERROR)
            return False
        except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL):
            logger.log(self.name + u': Invalid Host', logger.ERROR)
            return False
        except requests.exceptions.HTTPError as e:
            logger.log(self.name + u': Invalid HTTP Request ' + ex(e), logger.ERROR)
            return False
        except requests.exceptions.Timeout as e:
            logger.log(self.name + u': Connection Timeout ' + ex(e), logger.ERROR)
            return False
        except Exception as e:
            logger.log(self.name + u': Unknown exception raised when sending torrent to ' + self.name + ': ' + ex(e),
                       logger.ERROR)
            return False

        if response.status_code == 401:
            logger.log(self.name + u': Invalid Username or Password, check your config', logger.ERROR)
            return False

        if response.status_code in http_error_code.keys():
            logger.log(self.name + u': ' + http_error_code[response.status_code], logger.DEBUG)
            return False

        logger.log(self.name + u': Response to ' + method.upper() + ' request is ' + response.text, logger.DEBUG)

        return response
Example #11
0
    def mass_action(self, querylist, logTransaction=False):

        with db_lock:
            # remove None types
            querylist = [i for i in querylist if i != None]

            if querylist == None:
                return

            sqlResult = []
            attempt = 0

            # Transaction
            self.connection.isolation_level = None
            self.connection.execute('BEGIN')

            while attempt < 5:
                try:

                    for qu in querylist:
                        if len(qu) == 1:
                            if logTransaction:
                                logger.log(qu[0], logger.DEBUG)
                            sqlResult.append(self.connection.execute(qu[0]))
                        elif len(qu) > 1:
                            if logTransaction:
                                logger.log(qu[0] + " with args " + str(qu[1]),
                                           logger.DEBUG)
                            sqlResult.append(
                                self.connection.execute(qu[0], qu[1]))

                    self.connection.commit()

                    logger.log(
                        u"Transaction with " + str(len(querylist)) +
                        u" queries executed", logger.DEBUG)
                    return sqlResult
                except sqlite3.OperationalError, e:
                    sqlResult = []
                    if self.connection:
                        self.connection.rollback()
                    if "unable to open database file" in e.args[
                            0] or "database is locked" in e.args[0]:
                        logger.log(u"DB error: " + ex(e), logger.WARNING)
                        attempt += 1
                        time.sleep(0.02)
                    else:
                        logger.log(u"DB error: " + ex(e), logger.ERROR)
                        raise
                except sqlite3.DatabaseError, e:
                    sqlResult = []
                    if self.connection:
                        self.connection.rollback()
                    logger.log(u"Fatal error executing query: " + ex(e),
                               logger.ERROR)
                    raise
Example #12
0
 def _api_call(self, apikey, params={}, results_per_page=1000, offset=0):
     server = jsonrpclib.Server('http://api.btnapps.net')
     
     search_results ={} 
     try:
         search_results = server.getTorrentsSearch(apikey, params, int(results_per_page), int(offset))
     except jsonrpclib.jsonrpc.ProtocolError, error:
         logger.log(u"JSON-RPC protocol error while accessing BTN API: " + ex(error), logger.ERROR)
         search_results = {'api-error': ex(error)}
         return search_results
Example #13
0
    def send_torrent(self, result):

        r_code = False

        logger.log('Calling %s Client' % self.name, logger.DEBUG)

        if not self._get_auth():
            logger.log('%s: Authentication Failed' % self.name, logger.ERROR)
            return r_code

        try:
            # Sets per provider seed ratio
            result.ratio = result.provider.seed_ratio()

            result = self._get_torrent_hash(result)
        except Exception as e:
            logger.log('Bad torrent data: hash is %s for [%s]' % (result.hash, result.name), logger.ERROR)
            logger.log('Exception raised when checking torrent data: %s' % (ex(e)), logger.DEBUG)
            return r_code

        try:
            if result.url.startswith('magnet'):
                r_code = self._add_torrent_uri(result)
            else:
                r_code = self._add_torrent_file(result)

            if not r_code:
                logger.log('%s: Unable to send torrent to client' % self.name, logger.ERROR)
                return False

            if not self._set_torrent_pause(result):
                logger.log('%s: Unable to set the pause for torrent' % self.name, logger.ERROR)

            if not self._set_torrent_label(result):
                logger.log('%s: Unable to set the label for torrent' % self.name, logger.ERROR)

            if not self._set_torrent_ratio(result):
                logger.log('%s: Unable to set the ratio for torrent' % self.name, logger.ERROR)

            if not self._set_torrent_seed_time(result):
                logger.log('%s: Unable to set the seed time for torrent' % self.name, logger.ERROR)

            if not self._set_torrent_path(result):
                logger.log('%s: Unable to set the path for torrent' % self.name, logger.ERROR)

            if 0 != result.priority and not self._set_torrent_priority(result):
                logger.log('%s: Unable to set priority for torrent' % self.name, logger.ERROR)

        except Exception as e:
            logger.log('%s: Failed sending torrent: %s - %s' % (self.name, result.name, result.hash), logger.ERROR)
            logger.log('%s: Exception raised when sending torrent: %s' % (self.name, ex(e)), logger.DEBUG)
            return r_code

        return r_code
Example #14
0
    def _parse_string(self, name):

        if not name:
            return None

        for (cur_regex_name, cur_regex) in self.compiled_regexes:
            match = cur_regex.match(name)

            if not match:
                continue

            result = ParseResult(name)
            result.which_regex = [cur_regex_name]

            named_groups = match.groupdict().keys()

            if 'series_name' in named_groups:
                result.series_name = match.group('series_name')
                if result.series_name:
                    result.series_name = self.clean_series_name(result.series_name)

            if 'season_num' in named_groups:
                tmp_season = int(match.group('season_num'))
                if cur_regex_name == 'bare' and tmp_season in (19, 20):
                    continue
                result.season_number = tmp_season

            if 'ep_num' in named_groups:
                try:
                    ep_num = self._convert_number(match.group('ep_num'))
                    if 'extra_ep_num' in named_groups and match.group('extra_ep_num'):
                        result.episode_numbers = range(ep_num, self._convert_number(match.group('extra_ep_num')) + 1)
                    else:
                        result.episode_numbers = [ep_num]

                except ValueError, e:
                    raise InvalidNameException(ex(e))

            if 'air_year' in named_groups and 'air_month' in named_groups and 'air_day' in named_groups:
                year = int(match.group('air_year'))
                month = int(match.group('air_month'))
                day = int(match.group('air_day'))

                # make an attempt to detect YYYY-DD-MM formats
                if month > 12:
                    tmp_month = month
                    month = day
                    day = tmp_month

                try:
                    result.air_date = datetime.date(year, month, day)
                except ValueError, e:
                    raise InvalidNameException(ex(e))
Example #15
0
def _sabURLOpenSimple(url):
    try:
        f = urllib.urlopen(url)
    except (EOFError, IOError) as e:
        logger.log(u"Unable to connect to SABnzbd: " + ex(e), logger.ERROR)
        return False, "Unable to connect"
    except moves.http_client.InvalidURL as e:
        logger.log(u"Invalid SABnzbd host, check your config: " + ex(e), logger.ERROR)
        return False, "Invalid SABnzbd host"
    if f is None:
        logger.log(u"No data returned from SABnzbd", logger.ERROR)
        return False, "No data returned from SABnzbd"
    else:
        return True, f
Example #16
0
    def mass_action(self, querylist=[], logTransaction=False, fetchall=False):
        """
        Execute multiple queries

        :param querylist: list of queries
        :param logTransaction: Boolean to wrap all in one transaction
        :param fetchall: Boolean, when using a select query force returning all results
        :return: list of results
        """
        # remove None types
        querylist = [i for i in querylist if i is not None and len(i)]

        sqlResult = []
        attempt = 0

        with db_locks[self.filename]:
            while attempt < 5:
                try:
                    for qu in querylist:
                        if len(qu) == 1:
                            if logTransaction:
                                logger.log(qu[0], logger.DEBUG)
                            sqlResult.append(self.execute(qu[0], fetchall=fetchall))
                        elif len(qu) > 1:
                            if logTransaction:
                                logger.log(qu[0] + " with args " + str(qu[1]), logger.DEBUG)
                            sqlResult.append(self.execute(qu[0], qu[1], fetchall=fetchall))
                    self.connection.commit()
                    logger.log(u"Transaction with " + str(len(querylist)) + u" queries executed", logger.DEBUG)

                    # finished
                    break
                except sqlite3.OperationalError, e:
                    sqlResult = []
                    if self.connection:
                        self.connection.rollback()
                    if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
                        logger.log(u"DB error: " + ex(e), logger.WARNING)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.log(u"DB error: " + ex(e), logger.ERROR)
                        raise
                except sqlite3.DatabaseError, e:
                    sqlResult = []
                    if self.connection:
                        self.connection.rollback()
                    logger.log(u"Fatal error executing query: " + ex(e), logger.ERROR)
                    raise
Example #17
0
    def _season_thumb_dict(self, show_obj):
        """
        Should return a dict like:
        
        result = {<season number>: 
                    {1: '<url 1>', 2: <url 2>, ...},}
        """

        # This holds our resulting dictionary of season art
        result = {}
    
        tvdb_lang = show_obj.lang

        try:
            # There's gotta be a better way of doing this but we don't wanna
            # change the language value elsewhere
            ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()

            if tvdb_lang and not tvdb_lang == 'en':
                ltvdb_api_parms['language'] = tvdb_lang

            t = tvdb_api.Tvdb(banners=True, **ltvdb_api_parms)
            tvdb_show_obj = t[show_obj.tvdbid]
        except (tvdb_exceptions.tvdb_error, IOError), e:
            logger.log(u"Unable to look up show on TVDB, not downloading images: "+ex(e), logger.ERROR)
            return result
Example #18
0
    def _getProperList(self):

        propers = {}

        # for each provider get a list of the propers
        for curProvider in providers.sortedProviderList():

            if not curProvider.isActive():
                continue

            search_date = datetime.datetime.today() - datetime.timedelta(days=2)

            logger.log(u"Searching for any new PROPER releases from " + curProvider.name)
            try:
                curPropers = curProvider.findPropers(search_date)
            except exceptions.AuthException, e:
                logger.log(u"Authentication error: " + ex(e), logger.ERROR)
                continue

            # if they haven't been added by a different provider than add the proper to the list
            for x in curPropers:
                name = self._genericName(x.name)

                if not name in propers:
                    logger.log(u"Found new proper: " + x.name, logger.DEBUG)
                    x.provider = curProvider
                    propers[name] = x
Example #19
0
def rename_ep_file(cur_path, new_path):
    """
    Creates all folders needed to move a file to its new location, renames it, then cleans up any folders
    left that are now empty.

    cur_path: The absolute path to the file you want to move/rename
    new_path: The absolute path to the destination for the file WITHOUT THE EXTENSION
    """

    new_dest_dir, new_dest_name = os.path.split(new_path) #@UnusedVariable
    cur_file_name, cur_file_ext = os.path.splitext(cur_path) #@UnusedVariable

    if cur_file_ext[1:] in subtitleExtensions:
        #Extract subtitle language from filename
        sublang = os.path.splitext(cur_file_name)[1][1:]
        
        #Check if the language extracted from filename is a valid language
        try:
            language = subliminal.language.Language(sublang, strict=True)
            cur_file_ext = '.'+sublang+cur_file_ext 
        except ValueError:
            pass
        
    # put the extension on the incoming file
    new_path += cur_file_ext

    make_dirs(os.path.dirname(new_path))

    # move the file
    try:
        logger.log(u"Renaming file from " + cur_path + " to " + new_path)
        ek.ek(os.rename, cur_path, new_path)
    except (OSError, IOError), e:
        logger.log(u"Failed renaming " + cur_path + " to " + new_path + ": " + ex(e), logger.ERROR)
        return False
Example #20
0
    def _doSearch(self, curString, quotes=False, show=None):

        term =  re.sub('[\.\-\:]', ' ', curString).encode('utf-8')
        self.searchString = term
        if quotes:
            term = "\""+term+"\""

        params = {"q": term,
                  "rpp": 50, #max 50
                  "ns": 1, #nospam
                  "szs":16, #min 100MB
                  "sp":1 #nopass
                  }

        searchURL = NZBCLUB_RSS_URL + "?" + urllib.urlencode(params)

        logger.log(u"Search string: " + searchURL)

        logger.log(u"Sleeping 10 seconds to respect NZBClub's rules")
        time.sleep(10)

        searchResult = self.getURL(searchURL,[("User-Agent","Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:5.0) Gecko/20100101 Firefox/5.0"),("Accept","text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"),("Accept-Language","de-de,de;q=0.8,en-us;q=0.5,en;q=0.3"),("Accept-Charset","ISO-8859-1,utf-8;q=0.7,*;q=0.7"),("Connection","keep-alive"),("Cache-Control","max-age=0")])

        if not searchResult:
            return []

        try:
            parsedXML = etree.fromstring(searchResult)
            items = parsedXML.iter('item')
        except Exception, e:
            logger.log(u"Error trying to load NZBClub RSS feed: "+ex(e), logger.ERROR)
            return []
Example #21
0
    def _sendNMJ(self, host, database, mount=None):
        """
        Sends a NMJ update command to the specified machine
        
        host: The hostname/IP to send the request to (no port)
        database: The database to send the requst to
        mount: The mount URL to use (optional)
        
        Returns: True if the request succeeded, False otherwise
        """

        # if a mount URL is provided then attempt to open a handle to that URL
        if mount:
            try:
                req = urllib2.Request(mount)
                logger.log(u"Try to mount network drive via url: %s" % (mount), logger.DEBUG)
                handle = urllib2.urlopen(req)
            except IOError, e:
                if hasattr(e, 'reason'):
                    logger.log(u"NMJ: Could not contact Popcorn Hour on host %s: %s" % (host, e.reason), logger.WARNING)
                elif hasattr(e, 'code'):
                    logger.log(u"NMJ: Problem with Popcorn Hour on host %s: %s" % (host, e.code), logger.WARNING)
                return False
            except Exception, e:
                logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR)
                return False
Example #22
0
    def _doLogin(self):

        if any(requests.utils.dict_from_cookiejar(self.session.cookies).values()):
            return True

        if sickbeard.HDTORRENTS_UID and sickbeard.HDTORRENTS_HASH:

            requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)

        else:

            login_params = {'uid': sickbeard.HDTORRENTS_USERNAME,
                            'pwd': sickbeard.HDTORRENTS_PASSWORD,
                            'submit': 'Confirm',
            }

            try:
                response = self.session.post(self.urls['login'], data=login_params, timeout=30)
            except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
                logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR)
                return False

            if re.search('You need cookies enabled to log in.', response.text) \
                    or response.status_code == 401:
                logger.log(u'Invalid username or password for ' + self.name + ' Check your settings', logger.ERROR)
                return False

            sickbeard.HDTORRENTS_UID = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
            sickbeard.HDTORRENTS_HASH = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']

            self.cookies = {'uid': sickbeard.HDTORRENTS_UID,
                            'pass': sickbeard.HDTORRENTS_HASH
            }
Example #23
0
def process_failed(dirName, nzbName):
    """Process a download that did not complete correctly"""

    global returnStr

    if sickbeard.USE_FAILED_DOWNLOADS:
        try:
            processor = failedProcessor.FailedProcessor(dirName, nzbName)
            process_result = processor.process()
            process_fail_message = ""
        except exceptions.FailedProcessingFailed, e:
            process_result = False
            process_fail_message = ex(e)

        returnStr += processor.log

        if sickbeard.DELETE_FAILED and process_result:
            delete_dir(dirName)

        if process_result:
            returnStr += logHelper(u"Failed Download Processing succeeded: (" + str(nzbName) + ", " + dirName + ")")
        else:
            returnStr += logHelper(
                u"Failed Download Processing failed: (" + str(nzbName) + ", " + dirName + "): " + process_fail_message,
                logger.WARNING)
Example #24
0
def run_subs_extra_scripts(epObj, foundSubs):

    for curScriptName in sickbeard.SUBTITLES_EXTRA_SCRIPTS:
        script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", curScriptName) if piece.strip()]
        script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0])
        logger.log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)

        for video, subs in foundSubs.iteritems():
            subpaths = []
            for sub in subs:
                subpath = subliminal.subtitle.get_subtitle_path(video.name, sub.language)
                if sickbeard.SUBTITLES_DIR and ek.ek(os.path.exists, sickbeard.SUBTITLES_DIR):
                    subpath = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, ek.ek(os.path.basename, subpath))
                elif sickbeard.SUBTITLES_DIR:
                    subpath = ek.ek(os.path.join, ek.ek(os.path.dirname, subpath), sickbeard.SUBTITLES_DIR, ek.ek(os.path.basename, subpath))

                inner_cmd = script_cmd + [video.name, subpath, sub.language.opensubtitles, epObj.show.name,
                                         str(epObj.season), str(epObj.episode), epObj.name, str(epObj.show.indexerid)]

                # use subprocess to run the command and capture output
                logger.log(u"Executing command: %s" % inner_cmd)
                try:
                    p = subprocess.Popen(inner_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
                    out, err = p.communicate()  # @UnusedVariable
                    logger.log(u"Script result: %s" % out, logger.DEBUG)

                except Exception as e:
                    logger.log(u"Unable to run subs_extra_script: " + ex(e))
Example #25
0
    def downloadResult(self, result):
        """
        Save the result to disk.
        """

        logger.log(u"Downloading a result from " + self.name + " at " + result.url)

        data = self.getURL(result.url)

        if data is None:
            return False

        # use the appropriate watch folder
        if self.providerType == GenericProvider.NZB:
            saveDir = sickbeard.NZB_DIR
            writeMode = 'w'
        elif self.providerType == GenericProvider.TORRENT:
            saveDir = sickbeard.TORRENT_DIR
            writeMode = 'wb'
        else:
            return False

        # use the result name as the filename
        file_name = ek.ek(os.path.join, saveDir, helpers.sanitizeFileName(result.name) + '.' + self.providerType)

        logger.log(u"Saving to " + file_name, logger.DEBUG)

        try:
            with open(file_name, writeMode) as fileOut:
                fileOut.write(data)
            helpers.chmodAsParent(file_name)
        except EnvironmentError, e:
            logger.log("Unable to save the file: " + ex(e), logger.ERROR)
            return False
Example #26
0
def process_media(processPath, videoFiles, nzbName, process_method, force, is_priority):
    global process_result, returnStr

    for cur_video_file in videoFiles:

        if already_postprocessed(processPath, cur_video_file, force):
            continue

        cur_video_file_path = ek.ek(os.path.join, processPath, cur_video_file)

        try:
            processor = postProcessor.PostProcessor(cur_video_file_path, nzbName, process_method, is_priority)
            process_result = processor.process()
            process_fail_message = ""
        except exceptions.PostProcessingFailed, e:
            process_result = False
            process_fail_message = ex(e)

        returnStr += processor.log

        if process_result:
            returnStr += logHelper(u"Processing succeeded for " + cur_video_file_path)
        else:
            returnStr += logHelper(u"Processing failed for " + cur_video_file_path + ": " + process_fail_message,
                                   logger.WARNING)

        #If something fail abort the processing on dir
        if not process_result:
            break
Example #27
0
    def validateRSS(self):

        try:
            if self.cookies:
                cookie_validator = re.compile("^(\w+=\w+)(;\w+=\w+)*$")
                if not cookie_validator.match(self.cookies):
                    return (False, 'Cookie is not correctly formatted: ' + self.cookies)

            data = self.cache._getRSSData()['entries']
            if not data:
                return (False, 'No items found in the RSS feed ' + self.url)

            (title, url) = self._get_title_and_url(data[0])

            if not title:
                return (False, 'Unable to get title from first item')

            if not url:
                return (False, 'Unable to get torrent url from first item')

            if url.startswith('magnet:') and re.search('urn:btih:([\w]{32,40})', url):
                return (True, 'RSS feed Parsed correctly')
            else:
                if self.cookies:
                    requests.utils.add_dict_to_cookiejar(self.session.cookies,
                                                         dict(x.rsplit('=', 1) for x in (self.cookies.split(';'))))
                torrent_file = self.getURL(url)
                try:
                    bdecode(torrent_file)
                except Exception, e:
                    self.dumpHTML(torrent_file)
                    return (False, 'Torrent link is not a valid torrent file: ' + ex(e))

            return (True, 'RSS feed Parsed correctly')
Example #28
0
    def _request(self, method='get', params={}, data=None, files=None):

        if time.time() > self.last_time + 1800 or not self.auth:
            self.last_time = time.time()
            self._get_auth()

        logger.log(
            self.name + u': Requested a ' + method.upper() + ' connection to url ' + self.url + ' with Params= ' + str(
                params) + ' Data=' + str(data if data else 'None')[0:99] + (
            '...' if len(data if data else 'None') > 200 else ''), logger.DEBUG)

        logger.log(
            self.name + u': Requested a ' + method.upper() + ' connection to url ' + self.url + ' with Params= ' + str(
                params) + (
                (' Data=' + str(data)[0:100] + ('...' if len(data) > 100 else '')) if data is not None else ""),
            logger.DEBUG)

        if not self.auth:
            logger.log(self.name + u': Authentication Failed', logger.ERROR)
            return False
        try:
            self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files,
                                                                  timeout=20, verify=False)
        except requests.exceptions.ConnectionError, e:
            logger.log(self.name + u': Unable to connect ' + ex(e), logger.ERROR)
            return False
Example #29
0
    def _retrieve_show_image(self, image_type, show_obj, which=None):
        """
        Gets an image URL from theTVDB.com, downloads it and returns the data.
        
        image_type: type of image to retrieve (currently supported: poster, fanart)
        show_obj: a TVShow object to use when searching for the image
        which: optional, a specific numbered poster to look for
        
        Returns: the binary image data if available, or else None
        """

        tvdb_lang = show_obj.lang

        try:
            # There's gotta be a better way of doing this but we don't wanna
            # change the language value elsewhere
            ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()

            if tvdb_lang and not tvdb_lang == 'en':
                ltvdb_api_parms['language'] = tvdb_lang

            t = tvdb_api.Tvdb(banners=True, **ltvdb_api_parms)
            tvdb_show_obj = t[show_obj.tvdbid]
        except (tvdb_exceptions.tvdb_error, IOError), e:
            logger.log(u"Unable to look up show on TVDB, not downloading images: "+ex(e), logger.ERROR)
            return None
Example #30
0
    def updateCache(self):

        if not self.shouldUpdate():
            return

        data = self._getRSSData()

        # as long as the http request worked we count this as an update
        if data:
            self.setLastUpdate()
        else:
            return []

        # now that we've loaded the current RSS feed lets delete the old cache
        logger.log(u"Clearing "+self.provider.name+" cache and updating with new information")
        self._clearCache()

        if not self._checkAuth(data):
            raise exceptions.AuthException("Your authentication info for "+self.provider.name+" is incorrect, check your config")

        try:
            responseSoup = etree.ElementTree(etree.XML(data))
            items = responseSoup.getiterator('item')
        except Exception, e:
            logger.log(u"Error trying to load "+self.provider.name+" RSS feed: "+ex(e), logger.ERROR)
            logger.log(u"Feed contents: "+repr(data), logger.DEBUG)
            return []
Example #31
0
    def _addCacheEntry(self,
                       name,
                       url,
                       season=None,
                       episodes=None,
                       tvdb_id=0,
                       tvrage_id=0,
                       quality=None,
                       extraNames=[]):

        myDB = self._getDB()

        parse_result = None

        # if we don't have complete info then parse the filename to get it
        for curName in [name] + extraNames:
            try:
                myParser = NameParser()
                parse_result = myParser.parse(curName)
            except InvalidNameException:
                logger.log(
                    u"Unable to parse the filename " + curName +
                    " into a valid episode", logger.DEBUG)
                continue

        if not parse_result:
            logger.log(
                u"Giving up because I'm unable to parse this name: " + name,
                logger.DEBUG)
            return False

        if not parse_result.series_name:
            logger.log(
                u"No series name retrieved from " + name +
                ", unable to cache it", logger.DEBUG)
            return False

        tvdb_lang = None

        # if we need tvdb_id or tvrage_id then search the DB for them
        if not tvdb_id or not tvrage_id:

            # if we have only the tvdb_id, use the database
            if tvdb_id:
                showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id)
                if showObj:
                    tvrage_id = showObj.tvrid
                    tvdb_lang = showObj.lang
                else:
                    logger.log(
                        u"We were given a TVDB id " + str(tvdb_id) +
                        " but it doesn't match a show we have in our list, so leaving tvrage_id empty",
                        logger.DEBUG)
                    tvrage_id = 0

            # if we have only a tvrage_id then use the database
            elif tvrage_id:
                showObj = helpers.findCertainTVRageShow(
                    sickbeard.showList, tvrage_id)
                if showObj:
                    tvdb_id = showObj.tvdbid
                    tvdb_lang = showObj.lang
                else:
                    logger.log(
                        u"We were given a TVRage id " + str(tvrage_id) +
                        " but it doesn't match a show we have in our list, so leaving tvdb_id empty",
                        logger.DEBUG)
                    tvdb_id = 0

            # if they're both empty then fill out as much info as possible by searching the show name
            else:

                # check the name cache and see if we already know what show this is
                logger.log(
                    u"Checking the cache to see if we already know the tvdb id of "
                    + parse_result.series_name, logger.DEBUG)
                tvdb_id = name_cache.retrieveNameFromCache(
                    parse_result.series_name)

                # remember if the cache lookup worked or not so we know whether we should bother updating it later
                if tvdb_id == None:
                    logger.log(
                        u"No cache results returned, continuing on with the search",
                        logger.DEBUG)
                    from_cache = False
                else:
                    logger.log(
                        u"Cache lookup found " + repr(tvdb_id) +
                        ", using that", logger.DEBUG)
                    from_cache = True

                # if the cache failed, try looking up the show name in the database
                if tvdb_id == None:
                    logger.log(
                        u"Trying to look the show up in the show database",
                        logger.DEBUG)
                    showResult = helpers.searchDBForShow(
                        parse_result.series_name)
                    if showResult:
                        logger.log(
                            u"" + parse_result.series_name +
                            " was found to be show " + showResult[1] + " (" +
                            str(showResult[0]) + ") in our DB.", logger.DEBUG)
                        tvdb_id = showResult[0]

                # if the DB lookup fails then do a comprehensive regex search
                if tvdb_id == None:
                    logger.log(
                        u"Couldn't figure out a show name straight from the DB, trying a regex search instead",
                        logger.DEBUG)
                    for curShow in sickbeard.showList:
                        if show_name_helpers.isGoodResult(
                                name, curShow, False):
                            logger.log(
                                u"Successfully matched " + name + " to " +
                                curShow.name + " with regex", logger.DEBUG)
                            tvdb_id = curShow.tvdbid
                            tvdb_lang = curShow.lang
                            break

                # if tvdb_id was anything but None (0 or a number) then
                if not from_cache:
                    name_cache.addNameToCache(parse_result.series_name,
                                              tvdb_id)

                # if we came out with tvdb_id = None it means we couldn't figure it out at all, just use 0 for that
                if tvdb_id == None:
                    tvdb_id = 0

                # if we found the show then retrieve the show object
                if tvdb_id:
                    showObj = helpers.findCertainShow(sickbeard.showList,
                                                      tvdb_id)
                    if showObj:
                        tvrage_id = showObj.tvrid
                        tvdb_lang = showObj.lang

        # if we weren't provided with season/episode information then get it from the name that we parsed
        if not season:
            season = parse_result.season_number if parse_result.season_number != None else 1
        if not episodes:
            episodes = parse_result.episode_numbers

        # if we have an air-by-date show then get the real season/episode numbers
        if parse_result.air_by_date and tvdb_id:
            try:
                # There's gotta be a better way of doing this but we don't wanna
                # change the language value elsewhere
                ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()

                if not (tvdb_lang == "" or tvdb_lang == "en"
                        or tvdb_lang == None):
                    ltvdb_api_parms['language'] = tvdb_lang

                t = tvdb_api.Tvdb(**ltvdb_api_parms)
                epObj = t[tvdb_id].airedOn(parse_result.air_date)[0]
                season = int(epObj["seasonnumber"])
                episodes = [int(epObj["episodenumber"])]
            except tvdb_exceptions.tvdb_episodenotfound:
                logger.log(
                    u"Unable to find episode with date " +
                    str(parse_result.air_date) + " for show " +
                    parse_result.series_name + ", skipping", logger.WARNING)
                return False
            except tvdb_exceptions.tvdb_error, e:
                logger.log(u"Unable to contact TVDB: " + ex(e), logger.WARNING)
                return False
Example #32
0
    def _doLogin(self):

        if any(
                requests.utils.dict_from_cookiejar(
                    self.session.cookies).values()):
            return True

        if self._uid and self._hash:
            requests.utils.add_dict_to_cookiejar(self.session.cookies,
                                                 self.cookies)
        else:

            login_params = {
                'username': self.username,
                'password': self.password,
                'submit.x': 0,
                'submit.y': 0
            }

            if not self.session:
                self.session = requests.Session()

            try:
                response = self.session.post(self.urls['login'],
                                             data=login_params,
                                             timeout=30,
                                             verify=False)
            except (requests.exceptions.ConnectionError,
                    requests.exceptions.HTTPError), e:
                logger.log(
                    u'Unable to connect to ' + self.name + ' provider: ' +
                    ex(e), logger.ERROR)
                return False

            if re.search('You tried too often', response.text):
                logger.log(
                    u'Too many login access for ' + self.name + ', can'
                    't retrive any data', logger.ERROR)
                return False

            if response.status_code == 401:
                logger.log(
                    u'Invalid username or password for ' + self.name +
                    ', Check your settings!', logger.ERROR)
                return False

            try:
                if requests.utils.dict_from_cookiejar(
                        self.session.cookies
                )['uid'] and requests.utils.dict_from_cookiejar(
                        self.session.cookies)['pass']:
                    self._uid = requests.utils.dict_from_cookiejar(
                        self.session.cookies)['uid']
                    self._hash = requests.utils.dict_from_cookiejar(
                        self.session.cookies)['pass']

                    self.cookies = {'uid': self._uid, 'pass': self._hash}
                    return True
            except:
                pass

            logger.log(u'Unable to obtain cookie for TorrentDay', logger.ERROR)
            return False
Example #33
0
        def _int_copy(cur_file_path, new_file_path):

            self._log(u"Copying file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
            try:
                helpers.copyFile(cur_file_path, new_file_path)
                helpers.chmodAsParent(new_file_path)
            except (IOError, OSError), e:
                logger.log("Unable to copy file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
                raise e
Example #34
0
    def retrieveShowMetadata(self, folder):
        """
        Used only when mass adding Existing Shows, using previously generated Show metadata to reduce the need to query TVDB.
        """

        empty_return = (None, None, None)

        metadata_path = ek.ek(os.path.join, folder,
                              self._show_metadata_filename)

        if not ek.ek(os.path.isdir, folder) or not ek.ek(
                os.path.isfile, metadata_path):
            logger.log(
                u"Can't load the metadata file from " + repr(metadata_path) +
                ", it doesn't exist", logger.DEBUG)
            return empty_return

        logger.log(u"Loading show info from metadata file in " + folder,
                   logger.DEBUG)

        try:
            with ek.ek(open, metadata_path, 'r') as xmlFileObj:
                showXML = etree.ElementTree(file=xmlFileObj)

            if showXML.findtext('title') == None \
                    or (showXML.findtext('tvdbid') == None
                        and showXML.findtext('id') == None) \
                            and showXML.findtext('indexer') == None:
                logger.log(u"Invalid info in tvshow.nfo (missing name or id):" \
                           + str(showXML.findtext('title')) + " " \
                           + str(showXML.findtext('indexer')) + " " \
                           + str(showXML.findtext('tvdbid')) + " " \
                           + str(showXML.findtext('id')))
                return empty_return

            name = showXML.findtext('title')

            try:
                indexer = int(showXML.findtext('indexer'))
            except:
                indexer = None

            if showXML.findtext('tvdbid') != None:
                indexer_id = int(showXML.findtext('tvdbid'))
            elif showXML.findtext('id') != None:
                indexer_id = int(showXML.findtext('id'))
            else:
                logger.log(
                    u"Empty <id> or <tvdbid> field in NFO, unable to find a ID",
                    logger.WARNING)
                return empty_return

            if indexer_id is None:
                logger.log(
                    u"Invalid Indexer ID (" + str(indexer_id) +
                    "), not using metadata file", logger.WARNING)
                return empty_return

        except Exception, e:
            logger.log(
                u"There was an error parsing your existing metadata file: '" +
                metadata_path + "' error: " + ex(e), logger.WARNING)
            return empty_return
Example #35
0
    def update_watchlist(self,
                         show_obj=None,
                         s=None,
                         e=None,
                         data_show=None,
                         data_episode=None,
                         update="add"):
        """
        Sends a request to trakt indicating that the given episode is part of our library.

        show_obj: The TVShow object to add to trakt
        s: season number
        e: episode number
        data_show: structured object of shows traktv type
        data_episode: structured object of episodes traktv type
        update: type o action add or remove
        """

        trakt_api = TraktAPI(sickbeard.SSL_VERIFY, sickbeard.TRAKT_TIMEOUT)

        if sickbeard.USE_TRAKT:

            data = {}
            try:
                # URL parameters
                if show_obj is not None:
                    trakt_id = sickbeard.indexerApi(
                        show_obj.indexer).config['trakt_id']
                    data = {
                        'shows': [{
                            'title': show_obj.name,
                            'year': show_obj.startyear,
                            'ids': {},
                        }]
                    }

                    if trakt_id == 'tvdb_id':
                        data['shows'][0]['ids']['tvdb'] = show_obj.indexerid
                    else:
                        data['shows'][0]['ids']['tvrage'] = show_obj.indexerid
                elif data_show is not None:
                    data.update(data_show)
                else:
                    logger.log(
                        u"there's a coding problem contact developer. It's needed to be provided at lest one of the two: data_show or show_obj",
                        logger.WARNING)
                    return False

                if data_episode is not None:
                    data['shows'][0].update(data_episode)

                elif s is not None:
                    # traktv URL parameters
                    season = {
                        'season': [{
                            'number': s,
                        }]
                    }

                    if e is not None:
                        # traktv URL parameters
                        episode = {'episodes': [{'number': e}]}

                        season['season'][0].update(episode)

                    data['shows'][0].update(season)

                trakt_url = "sync/watchlist"
                if update == "remove":
                    trakt_url += "/remove"

                trakt_api.traktRequest(trakt_url, data, method='POST')

            except (traktException, traktAuthException, traktServerBusy) as e:
                logger.log(u"Could not connect to Trakt service: %s" % ex(e),
                           logger.WARNING)
                return False

        return True
Example #36
0
class XBMC_12PlusMetadata(generic.GenericMetadata):
    """
    Metadata generation class for XBMC 12+.

    The following file structure is used:

    show_root/tvshow.nfo                    (show metadata)
    show_root/fanart.jpg                    (fanart)
    show_root/poster.jpg                    (poster)
    show_root/banner.jpg                    (banner)
    show_root/Season ##/filename.ext        (*)
    show_root/Season ##/filename.nfo        (episode metadata)
    show_root/Season ##/filename-thumb.jpg  (episode thumb)
    show_root/season##-poster.jpg           (season posters)
    show_root/season##-banner.jpg           (season banners)
    show_root/season-all-poster.jpg         (season all poster)
    show_root/season-all-banner.jpg         (season all banner)
    """
    def __init__(self,
                 show_metadata=False,
                 episode_metadata=False,
                 fanart=False,
                 poster=False,
                 banner=False,
                 episode_thumbnails=False,
                 season_posters=False,
                 season_banners=False,
                 season_all_poster=False,
                 season_all_banner=False):

        generic.GenericMetadata.__init__(self, show_metadata, episode_metadata,
                                         fanart, poster, banner,
                                         episode_thumbnails, season_posters,
                                         season_banners, season_all_poster,
                                         season_all_banner)

        self.name = 'XBMC 12+'

        self.poster_name = "poster.jpg"
        self.season_all_poster_name = "season-all-poster.jpg"

        # web-ui metadata template
        self.eg_show_metadata = "tvshow.nfo"
        self.eg_episode_metadata = "Season##\\<i>filename</i>.nfo"
        self.eg_fanart = "fanart.jpg"
        self.eg_poster = "poster.jpg"
        self.eg_banner = "banner.jpg"
        self.eg_episode_thumbnails = "Season##\\<i>filename</i>-thumb.jpg"
        self.eg_season_posters = "season##-poster.jpg"
        self.eg_season_banners = "season##-banner.jpg"
        self.eg_season_all_poster = "season-all-poster.jpg"
        self.eg_season_all_banner = "season-all-banner.jpg"

    def _show_data(self, show_obj):
        """
        Creates an elementTree XML structure for an XBMC-style tvshow.nfo and
        returns the resulting data object.

        show_obj: a TVShow instance to create the NFO for
        """

        show_ID = show_obj.indexerid

        indexer_lang = show_obj.lang
        lINDEXER_API_PARMS = sickbeard.indexerApi(
            show_obj.indexer).api_params.copy()

        lINDEXER_API_PARMS['actors'] = True

        if indexer_lang and not indexer_lang == 'en':
            lINDEXER_API_PARMS['language'] = indexer_lang

        if show_obj.dvdorder != 0:
            lINDEXER_API_PARMS['dvdorder'] = True

        t = sickbeard.indexerApi(
            show_obj.indexer).indexer(**lINDEXER_API_PARMS)

        tv_node = etree.Element("tvshow")

        try:
            myShow = t[int(show_ID)]
        except sickbeard.indexer_shownotfound:
            logger.log(
                u"Unable to find show with id " + str(show_ID) + " on " +
                sickbeard.indexerApi(show_obj.indexer).name + ", skipping it",
                logger.ERROR)
            raise

        except sickbeard.indexer_error:
            logger.log(
                u"" + sickbeard.indexerApi(show_obj.indexer).name +
                " is down, can't use its data to add this show", logger.ERROR)
            raise

        # check for title and id
        if getattr(myShow, 'seriesname', None) is None or getattr(
                myShow, 'id', None) is None:
            logger.log(
                u"Incomplete info for show with id " + str(show_ID) + " on " +
                sickbeard.indexerApi(show_obj.indexer).name + ", skipping it",
                logger.ERROR)
            return False

        title = etree.SubElement(tv_node, "title")
        if getattr(myShow, 'seriesname', None) is not None:
            title.text = myShow["seriesname"]

        rating = etree.SubElement(tv_node, "rating")
        if getattr(myShow, 'rating', None) is not None:
            rating.text = myShow["rating"]

        year = etree.SubElement(tv_node, "year")
        if getattr(myShow, 'firstaired', None) is not None:
            try:
                year_text = str(
                    datetime.datetime.strptime(myShow["firstaired"],
                                               '%Y-%m-%d').year)
                if year_text:
                    year.text = year_text
            except:
                pass

        plot = etree.SubElement(tv_node, "plot")
        if getattr(myShow, 'overview', None) is not None:
            plot.text = myShow["overview"]

        episodeguide = etree.SubElement(tv_node, "episodeguide")
        episodeguideurl = etree.SubElement(episodeguide, "url")
        episodeguideurl2 = etree.SubElement(tv_node, "episodeguideurl")
        if getattr(myShow, 'id', None) is not None:
            showurl = sickbeard.indexerApi(
                show_obj.indexer).config['base_url'] + str(
                    myShow["id"]) + '/all/en.zip'
            episodeguideurl.text = showurl
            episodeguideurl2.text = showurl

        mpaa = etree.SubElement(tv_node, "mpaa")
        if getattr(myShow, 'contentrating', None) is not None:
            mpaa.text = myShow["contentrating"]

        indexerid = etree.SubElement(tv_node, "id")
        if getattr(myShow, 'id', None) is not None:
            indexerid.text = str(myShow["id"])

        indexer = etree.SubElement(tv_node, "indexer")
        if show_obj.indexer is not None:
            indexer.text = str(show_obj.indexer)

        genre = etree.SubElement(tv_node, "genre")
        if getattr(myShow, 'genre', None) is not None:
            if isinstance(myShow["genre"], basestring):
                genre.text = " / ".join(x.strip()
                                        for x in myShow["genre"].split('|')
                                        if x.strip())

        premiered = etree.SubElement(tv_node, "premiered")
        if getattr(myShow, 'firstaired', None) is not None:
            premiered.text = myShow["firstaired"]

        studio = etree.SubElement(tv_node, "studio")
        if getattr(myShow, 'network', None) is not None:
            studio.text = myShow["network"]

        if getattr(myShow, '_actors', None) is not None:
            for actor in myShow['_actors']:
                cur_actor = etree.SubElement(tv_node, "actor")

                cur_actor_name = etree.SubElement(cur_actor, "name")
                cur_actor_name_text = actor['name']
                if isinstance(cur_actor_name_text, basestring):
                    cur_actor_name.text = cur_actor_name_text.strip()

                cur_actor_role = etree.SubElement(cur_actor, "role")
                cur_actor_role_text = actor['role']
                if cur_actor_role_text != None:
                    cur_actor_role.text = cur_actor_role_text

                cur_actor_thumb = etree.SubElement(cur_actor, "thumb")
                cur_actor_thumb_text = actor['image']
                if cur_actor_thumb_text != None:
                    cur_actor_thumb.text = cur_actor_thumb_text

        # Make it purdy
        helpers.indentXML(tv_node)

        data = etree.ElementTree(tv_node)

        return data

    def _ep_data(self, ep_obj):
        """
        Creates an elementTree XML structure for an XBMC-style episode.nfo and
        returns the resulting data object.
            show_obj: a TVEpisode instance to create the NFO for
        """

        eps_to_write = [ep_obj] + ep_obj.relatedEps

        indexer_lang = ep_obj.show.lang

        lINDEXER_API_PARMS = sickbeard.indexerApi(
            ep_obj.show.indexer).api_params.copy()

        lINDEXER_API_PARMS['actors'] = True

        if indexer_lang and not indexer_lang == 'en':
            lINDEXER_API_PARMS['language'] = indexer_lang

        if ep_obj.show.dvdorder != 0:
            lINDEXER_API_PARMS['dvdorder'] = True

        try:
            t = sickbeard.indexerApi(
                ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
            myShow = t[ep_obj.show.indexerid]
        except sickbeard.indexer_shownotfound, e:
            raise exceptions.ShowNotFoundException(e.message)
        except sickbeard.indexer_error, e:
            logger.log(
                u"Unable to connect to " +
                sickbeard.indexerApi(ep_obj.show.indexer).name +
                " while creating meta files - skipping - " + ex(e),
                logger.ERROR)
            return
Example #37
0
    def _find_info(self):
        """
        For a given file try to find the showid, season, and episode.
        """

        indexer_id = season = None
        episodes = []

        # try to look up the nzb in history
        attempt_list = [
            self._history_lookup,

            # try to analyze the nzb name
            lambda: self._analyze_name(self.nzb_name),

            # try to analyze the file name
            lambda: self._analyze_name(self.file_name),

            # try to analyze the dir name
            lambda: self._analyze_name(self.folder_name),

            # try to analyze the file + dir names together
            lambda: self._analyze_name(self.file_path),

            # try to analyze the dir + file name together as one name
            lambda: self._analyze_name(self.folder_name + u' ' + self.file_name
                                       )
        ]

        # attempt every possible method to get our info
        for cur_attempt in attempt_list:

            try:
                (cur_indexer_id, cur_season, cur_episodes) = cur_attempt()
            except InvalidNameException, e:
                logger.log(u"Unable to parse, skipping: " + ex(e),
                           logger.DEBUG)
                continue

            # if we already did a successful history lookup then keep that indexer_id value
            if cur_indexer_id and not (self.in_history and indexer_id):
                indexer_id = cur_indexer_id
            if cur_season != None:
                season = cur_season
            if cur_episodes:
                episodes = cur_episodes

            # for air-by-date shows we need to look up the season/episode from tvdb
            if season == -1 and indexer_id and episodes:
                self._log(
                    u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
                    logger.DEBUG)

                # try to get language set for this show
                indexer_lang = None
                try:
                    showObj = helpers.findCertainShow(sickbeard.showList,
                                                      indexer_id)
                    if showObj:
                        indexer_lang = showObj.lang
                except exceptions.MultipleShowObjectsException:
                    continue

                for indexer in sickbeard.indexerApi().indexers:
                    self.indexer = int(indexer)
                    self._log(u"Searching " +
                              sickbeard.indexerApi(self.indexer).name +
                              ", trying to auto-detect Indexer for "
                              "show")
                    try:
                        lINDEXER_API_PARMS = sickbeard.indexerApi(
                            self.indexer).api_params.copy()
                        if indexer_lang and not indexer_lang == 'en':
                            lINDEXER_API_PARMS = {'language': indexer_lang}

                        t = sickbeard.indexerApi(
                            self.indexer).indexer(**lINDEXER_API_PARMS)

                        epObj = t[indexer_id].airedOn(episodes[0])[0]

                        season = int(epObj["seasonnumber"])
                        episodes = [int(epObj["episodenumber"])]

                        self._log(
                            u"Got season " + str(season) + " episodes " +
                            str(episodes), logger.DEBUG)
                    except (KeyError, sickbeard.indexer_episodenotfound), e:
                        self._log(
                            u"Unable to find episode with date " +
                            str(episodes[0]) + u" for show " +
                            str(indexer_id) + u", skipping", logger.DEBUG)
                        # we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
                        continue
                    except sickbeard.indexer_error, e:
                        logger.log(
                            u"Unable to contact " +
                            sickbeard.indexerApi(self.indexer).name + ": " +
                            ex(e), logger.WARNING)
                        continue

                    # try to find the file info
                    if indexer_id and season and episodes:
                        break

                    episodes = []
                    self._log(
                        u"Can't find thhe show on " +
                        sickbeard.indexerApi(self.indexer).name +
                        ", trying next "
                        "indexer", logger.WARNING)
Example #38
0
        if not r.status_code == 200:
            return False

        magnetFileName = ek.ek(
            os.path.join, sickbeard.TORRENT_DIR,
            helpers.sanitizeFileName(result.name) + '.' + self.providerType)
        magnetFileContent = r.content

        try:
            with open(magnetFileName, 'wb') as fileOut:
                fileOut.write(magnetFileContent)

            helpers.chmodAsParent(magnetFileName)

        except EnvironmentError, e:
            logger.log("Unable to save the file: " + ex(e), logger.ERROR)
            return False

        logger.log(u"Saved magnet link to " + magnetFileName + " ",
                   logger.MESSAGE)
        return True

    def findPropers(self, search_date=datetime.datetime.today()):

        results = []

        sqlResults = db.DBConnection().select(
            'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e'
            + ' INNER JOIN tv_shows AS s ON (e.showid = s.tvdb_id)' +
            ' WHERE e.airdate >= ' + str(search_date.toordinal()) +
            ' AND (e.status IN (' +
Example #39
0
def xem_refresh(indexer_id, indexer, force=False):
    """
    Refresh data from xem for a tv show
    
    @param indexer_id: int
    """
    if indexer_id is None:
        return

    indexer_id = int(indexer_id)
    indexer = int(indexer)

    # XEM API URL
    url = "http://thexem.de/map/all?id=%s&origin=%s&destination=scene" % (
        indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'])

    MAX_REFRESH_AGE_SECS = 86400  # 1 day

    myDB = db.DBConnection()
    rows = myDB.select(
        "SELECT last_refreshed FROM xem_refresh WHERE indexer = ? and indexer_id = ?",
        [indexer, indexer_id])
    if rows:
        lastRefresh = int(rows[0]['last_refreshed'])
        refresh = int(time.mktime(datetime.datetime.today().timetuple())
                      ) > lastRefresh + MAX_REFRESH_AGE_SECS
    else:
        refresh = True

    if refresh or force:
        logger.log(
            u'Looking up XEM scene mapping for show %s on %s' % (
                indexer_id,
                sickbeard.indexerApi(indexer).name,
            ), logger.DEBUG)

        # mark refreshed
        myDB.upsert(
            "xem_refresh", {
                'indexer':
                indexer,
                'last_refreshed':
                int(time.mktime(datetime.datetime.today().timetuple()))
            }, {'indexer_id': indexer_id})

        try:
            parsedJSON = sickbeard.helpers.getURL(url, json=True)
            if not parsedJSON or parsedJSON == '':
                logger.log(
                    u'No XEM data for show "%s on %s"' % (
                        indexer_id,
                        sickbeard.indexerApi(indexer).name,
                    ), logger.INFO)
                return

            if 'success' in parsedJSON['result']:
                cl = []
                for entry in parsedJSON['data']:
                    if 'scene' in entry:
                        cl.append([
                            "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
                            [
                                entry['scene']['season'],
                                entry['scene']['episode'],
                                entry['scene']['absolute'], indexer_id,
                                entry[sickbeard.indexerApi(
                                    indexer).config['xem_origin']]['season'],
                                entry[sickbeard.indexerApi(
                                    indexer).config['xem_origin']]['episode']
                            ]
                        ])
                    if 'scene_2' in entry:  # for doubles
                        cl.append([
                            "UPDATE tv_episodes SET scene_season = ?, scene_episode = ?, scene_absolute_number = ? WHERE showid = ? AND season = ? AND episode = ?",
                            [
                                entry['scene_2']['season'],
                                entry['scene_2']['episode'],
                                entry['scene_2']['absolute'], indexer_id,
                                entry[sickbeard.indexerApi(
                                    indexer).config['xem_origin']]['season'],
                                entry[sickbeard.indexerApi(
                                    indexer).config['xem_origin']]['episode']
                            ]
                        ])

                if len(cl) > 0:
                    myDB = db.DBConnection()
                    myDB.mass_action(cl)
            else:
                logger.log(
                    u"Empty lookup result - no XEM data for show %s on %s" % (
                        indexer_id,
                        sickbeard.indexerApi(indexer).name,
                    ), logger.DEBUG)
        except Exception, e:
            logger.log(
                u"Exception while refreshing XEM data for show " +
                str(indexer_id) + " on " + sickbeard.indexerApi(indexer).name +
                ": " + ex(e), logger.WARNING)
            logger.log(traceback.format_exc(), logger.DEBUG)
Example #40
0
                u"Sending NMJ scan update command via url: %s" % (updateUrl),
                logger.DEBUG)
            handle = urllib2.urlopen(req)
            response = handle.read()
        except IOError, e:
            if hasattr(e, 'reason'):
                logger.log(
                    u"NMJ: Could not contact Popcorn Hour on host %s: %s" %
                    (host, e.reason), logger.WARNING)
            elif hasattr(e, 'code'):
                logger.log(
                    u"NMJ: Problem with Popcorn Hour on host %s: %s" %
                    (host, e.code), logger.WARNING)
            return False
        except Exception, e:
            logger.log(u"NMJ: Unknown exception: " + ex(e), logger.ERROR)
            return False

        # try to parse the resulting XML
        try:
            et = etree.fromstring(response)
            result = et.findtext("returnValue")
        except SyntaxError, e:
            logger.log(
                u"Unable to parse XML returned from the Popcorn Hour: %s" %
                (e), logger.ERROR)
            return False

        # if the result was a number then consider that an error
        if int(result) > 0:
            logger.log(u"Popcorn Hour returned an errorcode: %s" % (result),
Example #41
0
class pyTivoNotifier:
    def notify_snatch(self, ep_name):
        pass

    def notify_download(self, ep_name):
        pass

    def notify_downloadable(self, ep_name):
        pass

    def notify_subtitle_download(self, ep_name, lang):
        pass

    def notify_git_update(self, new_version):
        pass

    def update_library(self, ep_obj):

        # Values from config

        if not sickbeard.USE_PYTIVO:
            return False

        host = sickbeard.PYTIVO_HOST
        shareName = sickbeard.PYTIVO_SHARE_NAME
        tsn = sickbeard.PYTIVO_TIVO_NAME

        # There are two more values required, the container and file.
        #
        # container: The share name, show name and season
        #
        # file: The file name
        #
        # Some slicing and dicing of variables is required to get at these values.
        #
        # There might be better ways to arrive at the values, but this is the best I have been able to
        # come up with.
        #

        # Calculated values

        showPath = ep_obj.show.location
        showName = ep_obj.show.name
        rootShowAndSeason = ek.ek(os.path.dirname, ep_obj.location)
        absPath = ep_obj.location

        # Some show names have colons in them which are illegal in a path location, so strip them out.
        # (Are there other characters?)
        showName = showName.replace(":", "")

        root = showPath.replace(showName, "")
        showAndSeason = rootShowAndSeason.replace(root, "")

        container = shareName + "/" + showAndSeason
        file = "/" + absPath.replace(root, "")

        # Finally create the url and make request
        requestUrl = "http://" + host + "/TiVoConnect?" + urlencode(
            {
                'Command': 'Push',
                'Container': container,
                'File': file,
                'tsn': tsn
            })

        logger.log(u"pyTivo notification: Requesting " + requestUrl,
                   logger.DEBUG)

        request = Request(requestUrl)

        try:
            response = urlopen(request)  #@UnusedVariable
        except HTTPError, e:
            if hasattr(e, 'reason'):
                logger.log(
                    u"pyTivo notification: Error, failed to reach a server - "
                    + e.reason, logger.ERROR)
                return False
            elif hasattr(e, 'code'):
                logger.log(
                    u"pyTivo notification: Error, the server couldn't fulfill the request - "
                    + e.code, logger.ERROR)
            return False
        except Exception, e:
            logger.log(u"PYTIVO: Unknown exception: " + ex(e), logger.ERROR)
            return False
Example #42
0
class ShowUpdater():
    def __init__(self):
        self.updateInterval = datetime.timedelta(hours=1)

    def run(self, force=False):

        # update at 3 AM
        run_updater_time = datetime.time(hour=3)

        update_datetime = datetime.datetime.now()
        update_date = update_datetime.date()

        logger.log(u"Checking update interval", logger.DEBUG)

        hour_diff = update_datetime.time().hour - run_updater_time.hour

        # if it's less than an interval after the update time then do an update (or if we're forcing it)
        if hour_diff >= 0 and hour_diff < self.updateInterval.seconds / 3600 or force:
            logger.log(u"Doing full update on all shows")
        else:
            return

        # clean out cache directory, remove everything > 12 hours old
        if sickbeard.CACHE_DIR:
            for indexer in sickbeard.indexerApi().indexers:
                cache_dir = sickbeard.indexerApi(indexer).cache
                logger.log(u"Trying to clean cache folder " + cache_dir)

                # Does our cache_dir exists
                if not ek.ek(os.path.isdir, cache_dir):
                    logger.log(
                        u"Can't clean " + cache_dir + " if it doesn't exist",
                        logger.WARNING)
                else:
                    max_age = datetime.timedelta(hours=12)
                    # Get all our cache files
                    cache_files = ek.ek(os.listdir, cache_dir)

                    for cache_file in cache_files:
                        cache_file_path = ek.ek(os.path.join, cache_dir,
                                                cache_file)

                        if ek.ek(os.path.isfile, cache_file_path):
                            cache_file_modified = datetime.datetime.fromtimestamp(
                                ek.ek(os.path.getmtime, cache_file_path))

                            if update_datetime - cache_file_modified > max_age:
                                try:
                                    ek.ek(os.remove, cache_file_path)
                                except OSError, e:
                                    logger.log(
                                        u"Unable to clean " + cache_dir +
                                        ": " + repr(e) + " / " + str(e),
                                        logger.WARNING)
                                    break

        # select 10 'Ended' tv_shows updated more than 90 days ago to include in this update
        stale_should_update = []
        stale_update_date = (update_date -
                             datetime.timedelta(days=90)).toordinal()

        myDB = db.DBConnection()
        # last_update_date <= 90 days, sorted ASC because dates are ordinal
        sql_result = myDB.select(
            "SELECT indexer_id FROM tv_shows WHERE status = 'Ended' AND last_update_indexer <= ? ORDER BY last_update_indexer ASC LIMIT 10;",
            [stale_update_date])

        for cur_result in sql_result:
            stale_should_update.append(int(cur_result['indexer_id']))

        # start update process
        piList = []
        for curShow in sickbeard.showList:

            try:
                # if should_update returns True (not 'Ended') or show is selected stale 'Ended' then update, otherwise just refresh
                if curShow.should_update(
                        update_date=update_date
                ) or curShow.indexerid in stale_should_update:
                    curQueueItem = sickbeard.showQueueScheduler.action.updateShow(
                        curShow, True)  # @UndefinedVariable
                else:
                    logger.log(
                        u"Not updating episodes for show " + curShow.name +
                        " because it's marked as ended and last/next episode is not within the grace period.",
                        logger.DEBUG)
                    curQueueItem = sickbeard.showQueueScheduler.action.refreshShow(
                        curShow, True)  # @UndefinedVariable

                piList.append(curQueueItem)

            except (exceptions.CantUpdateException,
                    exceptions.CantRefreshException), e:
                logger.log(u"Automatic update failed: " + ex(e), logger.ERROR)
Example #43
0
    def _sendPushover(self, msg, title, userKey=None, apiKey=None):
        """
        Sends a pushover notification to the address provided
        
        msg: The message to send (unicode)
        title: The title of the message
        userKey: The pushover user id to send the message to (or to subscribe with)
        
        returns: True if the message succeeded, False otherwise
        """

        if userKey == None:
            userKey = sickbeard.PUSHOVER_USERKEY

        if apiKey == None:
            apiKey = sickbeard.PUSHOVER_APIKEY

        logger.log("Pushover API KEY in use: " + apiKey, logger.DEBUG)

        # build up the URL and parameters
        msg = msg.strip()

        # send the request to pushover
        try:
            conn = httplib.HTTPSConnection("api.pushover.net:443")
            conn.request(
                "POST", "/1/messages.json",
                urllib.urlencode({
                    "token": apiKey,
                    "user": userKey,
                    "title": title.encode('utf-8'),
                    "message": msg.encode('utf-8'),
                    'timestamp': int(time.time()),
                    "retry": 60,
                    "expire": 3600,
                }), {"Content-type": "application/x-www-form-urlencoded"})

        except urllib2.HTTPError, e:
            # if we get an error back that doesn't have an error code then who knows what's really happening
            if not hasattr(e, 'code'):
                logger.log("Pushover notification failed." + ex(e),
                           logger.ERROR)
                return False
            else:
                logger.log(
                    "Pushover notification failed. Error code: " + str(e.code),
                    logger.ERROR)

            # HTTP status 404 if the provided email address isn't a Pushover user.
            if e.code == 404:
                logger.log(
                    "Username is wrong/not a pushover email. Pushover will send an email to it",
                    logger.WARNING)
                return False

            # For HTTP status code 401's, it is because you are passing in either an invalid token, or the user has not added your service.
            elif e.code == 401:

                #HTTP status 401 if the user doesn't have the service added
                subscribeNote = self._sendPushover(msg, title, userKey, apiKey)
                if subscribeNote:
                    logger.log("Subscription send", logger.DEBUG)
                    return True
                else:
                    logger.log("Subscription could not be send", logger.ERROR)
                    return False

            # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters
            elif e.code == 400:
                logger.log("Wrong data sent to pushover", logger.ERROR)
                return False

            # If you receive a HTTP status code of 429, it is because the message limit has been reached (free limit is 7,500)
            elif e.code == 429:
                logger.log(
                    "Pushover API message limit reached - try a different API key",
                    logger.ERROR)
                return False
Example #44
0
class TorrentRssProvider(generic.TorrentProvider):
    def __init__(self,
                 name,
                 url,
                 cookies='',
                 search_mode='eponly',
                 search_fallback=False,
                 backlog_only=False):
        generic.TorrentProvider.__init__(self, name)
        self.cache = TorrentRssCache(self)
        self.url = re.sub('\/$', '', url)
        self.url = url
        self.enabled = True
        self.ratio = None
        self.supportsBacklog = False

        self.search_mode = search_mode
        self.search_fallback = search_fallback
        self.backlog_only = backlog_only
        self.cookies = cookies

    def configStr(self):
        return self.name + '|' + self.url + '|' + self.cookies + '|' + str(
            int(self.enabled)) + '|' + self.search_mode + '|' + str(
                int(self.search_fallback)) + '|' + str(int(self.backlog_only))

    def imageName(self):
        if ek.ek(
                os.path.isfile,
                ek.ek(os.path.join, sickbeard.PROG_DIR, 'gui',
                      sickbeard.GUI_NAME, 'images', 'providers',
                      self.getID() + '.png')):
            return self.getID() + '.png'
        return 'torrentrss.png'

    def isEnabled(self):
        return self.enabled

    def _get_title_and_url(self, item):

        title, url = None, None

        title = item.title

        if title:
            title = u'' + title
            title = title.replace(' ', '.')

        attempt_list = [
            lambda: item.torrent_magneturi, lambda: item.enclosures[0].href,
            lambda: item.link
        ]

        for cur_attempt in attempt_list:
            try:
                url = cur_attempt()
            except:
                continue

            if title and url:
                return (title, url)

        return (title, url)

    def validateRSS(self):

        try:
            if self.cookies:
                cookie_validator = re.compile("^(\w+=\w+)(;\w+=\w+)*$")
                if not cookie_validator.match(self.cookies):
                    return (False, 'Cookie is not correctly formatted: ' +
                            self.cookies)

            data = self.cache._getRSSData()
            if not data:
                return (False, 'No data returned from url: ' + self.url)

            items = data.entries
            if not len(items) > 0:
                return (False, 'No items found in the RSS feed ' + self.url)

            (title, url) = self._get_title_and_url(items[0])

            if not title:
                return (False, 'Unable to get title from first item')

            if not url:
                return (False, 'Unable to get torrent url from first item')

            if url.startswith('magnet:') and re.search(
                    'urn:btih:([\w]{32,40})', url):
                return (True, 'RSS feed Parsed correctly')
            else:
                if self.cookies:
                    requests.utils.add_dict_to_cookiejar(
                        self.session.cookies,
                        dict(
                            x.rsplit('=', 1)
                            for x in (self.cookies.split(';'))))
                torrent_file = self.getURL(url)
                try:
                    bdecode(torrent_file)
                except Exception, e:
                    self.dumpHTML(torrent_file)
                    return (False,
                            'Torrent link is not a valid torrent file: ' +
                            ex(e))

            return (True, 'RSS feed Parsed correctly')

        except Exception, e:
            return (False, 'Error when trying to load RSS: ' + ex(e))
Example #45
0
    def _request(self,
                 method='get',
                 params=None,
                 data=None,
                 files=None,
                 **kwargs):

        params = params or {}

        if time.time() > self.last_time + 1800 or not self.auth:
            self.last_time = time.time()
            self._get_auth()

        logger.log(
            '%s: sending %s request to %s with ...' %
            (self.name, method.upper(), self.url), logger.DEBUG)
        lines = [('params', (str(params), '')[not params]),
                 ('data', (str(data), '')[not data]),
                 ('files', (str(files), '')[not files]),
                 ('json', (str(kwargs.get('json')),
                           '')[not kwargs.get('json')])]
        m, c = 300, 100
        type_chunks = [(linetype,
                        [ln[i:i + c] for i in range(0, min(len(ln), m), c)])
                       for linetype, ln in lines if ln]
        for (arg, chunks) in type_chunks:
            output = []
            nch = len(chunks) - 1
            for i, seg in enumerate(chunks):
                if nch == i and 'files' == arg:
                    sample = ' ..excerpt(%s/%s)' % (m, len(lines[2][1]))
                    seg = seg[0:c - (len(sample) - 2)] + sample
                output += [
                    '%s: request %s= %s%s%s' % (self.name, arg,
                                                ('', '..')[bool(i)], seg,
                                                ('', '..')[i != nch])
                ]
            for out in output:
                logger.log(out, logger.DEBUG)

        if not self.auth:
            logger.log('%s: Authentication Failed' % self.name, logger.ERROR)
            return False
        try:
            response = self.session.__getattribute__(method)(self.url,
                                                             params=params,
                                                             data=data,
                                                             files=files,
                                                             timeout=120,
                                                             verify=False,
                                                             **kwargs)
        except requests.exceptions.ConnectionError as e:
            logger.log('%s: Unable to connect %s' % (self.name, ex(e)),
                       logger.ERROR)
            return False
        except (requests.exceptions.MissingSchema,
                requests.exceptions.InvalidURL):
            logger.log('%s: Invalid Host' % self.name, logger.ERROR)
            return False
        except requests.exceptions.HTTPError as e:
            logger.log('%s: Invalid HTTP Request %s' % (self.name, ex(e)),
                       logger.ERROR)
            return False
        except requests.exceptions.Timeout as e:
            logger.log('%s: Connection Timeout %s' % (self.name, ex(e)),
                       logger.ERROR)
            return False
        except Exception as e:
            logger.log(
                '%s: Unknown exception raised when sending torrent to %s: %s' %
                (self.name, self.name, ex(e)), logger.ERROR)
            return False

        if 401 == response.status_code:
            logger.log(
                '%s: Invalid Username or Password, check your config' %
                self.name, logger.ERROR)
            return False

        if response.status_code in http_error_code.keys():
            logger.log(
                '%s: %s' % (self.name, http_error_code[response.status_code]),
                logger.DEBUG)
            return False

        logger.log(
            '%s: Response to %s request is %s' %
            (self.name, method.upper(), response.text), logger.DEBUG)

        return response
Example #46
0
        logger.log(self.name + u': Requested a ' + method.upper() + ' connection to url '+ self.url + ' with Params= ' + str(params) + ' Data=' + str(data if data else 'None')[0:99] + ('...' if len(data if data else 'None') > 100 else ''), logger.DEBUG)
        
        if not self.auth:
            logger.log(self.name + u': Autenthication Failed' , logger.ERROR)
            return False
        
        try:
            self.response = self.session.__getattribute__(method)(self.url, params=params, data=data, files=files)
        except requests.exceptions.ConnectionError, e:
            logger.log(self.name + u': Unable to connect ' +ex(e), logger.ERROR)
            return False
        except (requests.exceptions.MissingSchema, requests.exceptions.InvalidURL):
            logger.log(self.name + u': Invalid Host', logger.ERROR)
            return False
        except requests.exceptions.HTTPError, e:
            logger.log(self.name + u': Invalid HTTP Request ' + ex(e), logger.ERROR)
            return False
        except Exception, e:
            logger.log(self.name + u': Unknown exception raised when send torrent to ' + self.name + ': ' + ex(e), logger.ERROR)
            return False

        if self.response.status_code == 401:
            logger.log(self.name + u': Invalid Username or Password, check your config', logger.ERROR)    
            return False
        
        if self.response.status_code in http_error_code.keys():
            logger.log(self.name + u': ' + http_error_code[self.response.status_code], logger.DEBUG)
            return False
        
        logger.log(self.name + u': Response to '+ method.upper() + ' request is ' + self.response.text, logger.DEBUG)
        
Example #47
0
    def update(self):
        """
        Downloads the latest source tarball from github and installs it over the existing version.
        """
        base_url = 'https://github.com/' + self.github_repo_user + '/' + self.github_repo
        tar_download_url = base_url + '/tarball/' + self.branch
        version_path = ek.ek(os.path.join, sickbeard.PROG_DIR, u'version.txt')

        try:
            # prepare the update dir
            sb_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR,
                                  u'sb-update')

            if os.path.isdir(sb_update_dir):
                logger.log(u"Clearing out update folder " + sb_update_dir +
                           " before extracting")
                shutil.rmtree(sb_update_dir)

            logger.log(u"Creating update folder " + sb_update_dir +
                       " before extracting")
            os.makedirs(sb_update_dir)

            # retrieve file
            logger.log(u"Downloading update from " + repr(tar_download_url))
            tar_download_path = os.path.join(sb_update_dir, u'sb-update.tar')
            urllib.urlretrieve(tar_download_url, tar_download_path)

            if not ek.ek(os.path.isfile, tar_download_path):
                logger.log(
                    u"Unable to retrieve new version from " +
                    tar_download_url + ", can't update", logger.ERROR)
                return False

            if not ek.ek(tarfile.is_tarfile, tar_download_path):
                logger.log(
                    u"Retrieved version from " + tar_download_url +
                    " is corrupt, can't update", logger.ERROR)
                return False

            # extract to sb-update dir
            logger.log(u"Extracting file " + tar_download_path)
            tar = tarfile.open(tar_download_path)
            tar.extractall(sb_update_dir)
            tar.close()

            # delete .tar.gz
            logger.log(u"Deleting file " + tar_download_path)
            os.remove(tar_download_path)

            # find update dir name
            update_dir_contents = [
                x for x in os.listdir(sb_update_dir)
                if os.path.isdir(os.path.join(sb_update_dir, x))
            ]
            if len(update_dir_contents) != 1:
                logger.log(
                    u"Invalid update data, update failed: " +
                    str(update_dir_contents), logger.ERROR)
                return False
            content_dir = os.path.join(sb_update_dir, update_dir_contents[0])

            # walk temp folder and move files to main folder
            logger.log(u"Moving files from " + content_dir + " to " +
                       sickbeard.PROG_DIR)
            for dirname, dirnames, filenames in os.walk(
                    content_dir):  # @UnusedVariable
                dirname = dirname[len(content_dir) + 1:]
                for curfile in filenames:
                    old_path = os.path.join(content_dir, dirname, curfile)
                    new_path = os.path.join(sickbeard.PROG_DIR, dirname,
                                            curfile)

                    #Avoid DLL access problem on WIN32/64
                    #These files needing to be updated manually
                    #or find a way to kill the access from memory
                    if curfile in ('unrar.dll', 'unrar64.dll'):
                        try:
                            os.chmod(new_path, stat.S_IWRITE)
                            os.remove(new_path)
                            os.renames(old_path, new_path)
                        except Exception, e:
                            logger.log(
                                u"Unable to update " + new_path + ': ' + ex(e),
                                logger.DEBUG)
                            os.remove(
                                old_path
                            )  # Trash the updated file without moving in new path
                        continue

                    if os.path.isfile(new_path):
                        os.remove(new_path)
                    os.renames(old_path, new_path)

            # update version.txt with commit hash
            try:
                with open(version_path, 'w') as ver_file:
                    ver_file.write(self._newest_commit_hash)
            except EnvironmentError, e:
                logger.log(
                    u"Unable to write version file, update not complete: " +
                    ex(e), logger.ERROR)
                return False
Example #48
0
    def downloadResult(self, result):
        """
        Save the result to disk.
        """

        # check for auth
        if not self._doLogin():
            return False

        if self.providerType == GenericProvider.TORRENT:
            try:
                torrent_hash = re.findall('urn:btih:([\w]{32,40})',
                                          result.url)[0].upper()

                if len(torrent_hash) == 32:
                    torrent_hash = b16encode(b32decode(torrent_hash)).lower()

                if not torrent_hash:
                    logger.log(
                        "Unable to extract torrent hash from link: " +
                        ex(result.url), logger.ERROR)
                    return False

                urls = [
                    'http://torcache.net/torrent/' + torrent_hash + '.torrent',
                    'http://torrage.com/torrent/' + torrent_hash + '.torrent',
                    'http://zoink.it/torrent/' + torrent_hash + '.torrent',
                ]
            except:
                urls = [result.url]

            filename = ek.ek(
                os.path.join, sickbeard.TORRENT_DIR,
                helpers.sanitizeFileName(result.name) + '.' +
                self.providerType)
        elif self.providerType == GenericProvider.NZB:
            urls = [result.url]

            filename = ek.ek(
                os.path.join, sickbeard.NZB_DIR,
                helpers.sanitizeFileName(result.name) + '.' +
                self.providerType)
        else:
            return

        for url in urls:
            if helpers.download_file(url, filename, session=self.session):
                logger.log(u"Downloading a result from " + self.name + " at " +
                           url)

                if self.providerType == GenericProvider.TORRENT:
                    logger.log(u"Saved magnet link to " + filename,
                               logger.INFO)
                else:
                    logger.log(u"Saved result to " + filename, logger.INFO)

                if self._verify_download(filename):
                    return True

        logger.log(u"Failed to download result", logger.ERROR)
        return False
Example #49
0
def xem_refresh(indexer_id, indexer, force=False):
    """
    Refresh data from xem for a tv show

    @param indexer_id: int
    @param indexer: int
    @param force: bool
    """
    if None is indexer_id:
        return

    indexer_id = int(indexer_id)
    indexer = int(indexer)

    if 'xem_origin' not in sickbeard.indexerApi(
            indexer).config or indexer_id not in xem_ids_list.get(indexer, []):
        return

    # XEM API URL
    url = 'http://thexem.de/map/all?id=%s&origin=%s&destination=scene' % (
        indexer_id, sickbeard.indexerApi(indexer).config['xem_origin'])

    max_refresh_age_secs = 86400  # 1 day

    my_db = db.DBConnection()
    rows = my_db.select(
        'SELECT last_refreshed'
        ' FROM xem_refresh'
        ' WHERE indexer = ? AND indexer_id = ?', [indexer, indexer_id])
    if rows:
        last_refresh = int(rows[0]['last_refreshed'])
        refresh = int(time.mktime(datetime.datetime.today().timetuple())
                      ) > last_refresh + max_refresh_age_secs
    else:
        refresh = True

    if refresh or force:
        logger.log(
            u'Looking up XEM scene mapping for show %s on %s' %
            (indexer_id, sickbeard.indexerApi(indexer).name), logger.DEBUG)

        # mark refreshed
        my_db.upsert(
            'xem_refresh', {
                'indexer':
                indexer,
                'last_refreshed':
                int(time.mktime(datetime.datetime.today().timetuple()))
            }, {'indexer_id': indexer_id})

        try:
            parsed_json = sickbeard.helpers.getURL(url, json=True, timeout=90)
            if not parsed_json or '' == parsed_json:
                logger.log(
                    u'No XEM data for show %s on %s' %
                    (indexer_id, sickbeard.indexerApi(indexer).name),
                    logger.MESSAGE)
                return

            if 'success' in parsed_json['result']:
                cl = []
                for entry in filter(lambda x: 'scene' in x,
                                    parsed_json['data']):
                    # use scene2 for doubles
                    scene = 'scene%s' % ('', '_2')['scene_2' in entry]
                    cl.append([
                        'UPDATE tv_episodes'
                        ' SET scene_season = ?, scene_episode = ?, scene_absolute_number = ?'
                        ' WHERE showid = ? AND season = ? AND episode = ?',
                        [
                            entry[scene]['season'], entry[scene]['episode'],
                            entry[scene]['absolute'], indexer_id,
                            entry[sickbeard.indexerApi(
                                indexer).config['xem_origin']]['season'],
                            entry[sickbeard.indexerApi(
                                indexer).config['xem_origin']]['episode']
                        ]
                    ])

                if 0 < len(cl):
                    my_db = db.DBConnection()
                    my_db.mass_action(cl)
            else:
                logger.log(
                    u'Empty lookup result - no XEM data for show %s on %s' %
                    (indexer_id, sickbeard.indexerApi(indexer).name),
                    logger.DEBUG)
        except Exception as e:
            logger.log(
                u'Exception while refreshing XEM data for show ' +
                str(indexer_id) + ' on ' + sickbeard.indexerApi(indexer).name +
                ': ' + ex(e), logger.WARNING)
            logger.log(traceback.format_exc(), logger.ERROR)
Example #50
0
    def send_torrent(self, result):

        r_code = False

        logger.log('Calling %s Client' % self.name, logger.DEBUG)

        if not self._get_auth():
            logger.log('%s: Authentication Failed' % self.name, logger.ERROR)
            return r_code

        try:
            # Sets per provider seed ratio
            result.ratio = result.provider.seed_ratio()

            result = self._get_torrent_hash(result)

            if result.url.startswith('magnet'):
                r_code = self._add_torrent_uri(result)
            else:
                r_code = self._add_torrent_file(result)

            if not r_code:
                logger.log(
                    '%s: Unable to send Torrent: Return code undefined (already exists in client?)'
                    % self.name, logger.ERROR)
                return False

            if not self._set_torrent_pause(result):
                logger.log(
                    '%s: Unable to set the pause for Torrent' % self.name,
                    logger.ERROR)

            if not self._set_torrent_label(result):
                logger.log(
                    '%s: Unable to set the label for Torrent' % self.name,
                    logger.ERROR)

            if not self._set_torrent_ratio(result):
                logger.log(
                    '%s: Unable to set the ratio for Torrent' % self.name,
                    logger.ERROR)

            if not self._set_torrent_seed_time(result):
                logger.log(
                    '%s: Unable to set the seed time for Torrent' % self.name,
                    logger.ERROR)

            if not self._set_torrent_path(result):
                logger.log(
                    '%s: Unable to set the path for Torrent' % self.name,
                    logger.ERROR)

            if 0 != result.priority and not self._set_torrent_priority(result):
                logger.log(
                    '%s: Unable to set priority for Torrent' % self.name,
                    logger.ERROR)

        except Exception as e:
            logger.log(
                '%s: Failed sending torrent: %s - %s' %
                (self.name, result.name, result.hash), logger.ERROR)
            logger.log(
                '%s: Exception raised when sending torrent: %s' %
                (self.name, ex(e)), logger.DEBUG)
            return r_code

        return r_code
Example #51
0
class QueueItemAdd(ShowQueueItem):
    def __init__(self, indexer, indexer_id, showDir, default_status, quality,
                 flatten_folders, lang, subtitles, anime, scene, paused,
                 blacklist, whitelist, default_status_after):

        self.indexer = indexer
        self.indexer_id = indexer_id
        self.showDir = showDir
        self.default_status = default_status
        self.quality = quality
        self.flatten_folders = flatten_folders
        self.lang = lang
        self.subtitles = subtitles
        self.anime = anime
        self.scene = scene
        self.paused = paused
        self.blacklist = blacklist
        self.whitelist = whitelist
        self.default_status_after = default_status_after

        self.show = None

        # this will initialize self.show to None
        ShowQueueItem.__init__(self, ShowQueueActions.ADD, self.show)

        # Process add show in priority
        self.priority = generic_queue.QueuePriorities.HIGH

    def _getName(self):
        """
        Returns the show name if there is a show object created, if not returns
        the dir that the show is being added to.
        """
        if self.show == None:
            return self.showDir
        return self.show.name

    show_name = property(_getName)

    def _isLoading(self):
        """
        Returns True if we've gotten far enough to have a show object, or False
        if we still only know the folder name.
        """
        if self.show == None:
            return True
        return False

    isLoading = property(_isLoading)

    def run(self):

        ShowQueueItem.run(self)

        logger.log(u"Starting to add show " + self.showDir)
        # make sure the Indexer IDs are valid
        try:

            lINDEXER_API_PARMS = sickbeard.indexerApi(
                self.indexer).api_params.copy()
            if self.lang:
                lINDEXER_API_PARMS['language'] = self.lang

            logger.log(u"" + str(sickbeard.indexerApi(self.indexer).name) +
                       ": " + repr(lINDEXER_API_PARMS))

            t = sickbeard.indexerApi(
                self.indexer).indexer(**lINDEXER_API_PARMS)
            s = t[self.indexer_id]

            # this usually only happens if they have an NFO in their show dir which gave us a Indexer ID that has no proper english version of the show
            if getattr(s, 'seriesname', None) is None:
                logger.log(
                    u"Show in " + self.showDir + " has no name on " +
                    str(sickbeard.indexerApi(self.indexer).name) +
                    ", probably the wrong language used to search with.",
                    logger.ERROR)
                ui.notifications.error(
                    "Unable to add show",
                    "Show in " + self.showDir + " has no name on " +
                    str(sickbeard.indexerApi(self.indexer).name) +
                    ", probably the wrong language. Delete .nfo and add manually in the correct language."
                )
                self._finishEarly()
                return
            # if the show has no episodes/seasons
            if not s:
                logger.log(
                    u"Show " + str(s['seriesname']) + " is on " +
                    str(sickbeard.indexerApi(self.indexer).name) +
                    " but contains no season/episode data.", logger.ERROR)
                ui.notifications.error(
                    "Unable to add show", "Show " + str(s['seriesname']) +
                    " is on " + str(sickbeard.indexerApi(self.indexer).name) +
                    " but contains no season/episode data.")
                self._finishEarly()
                return
        except Exception, e:
            logger.log(
                u"Show name with ID %s don't exist in %s anymore. Please change/delete your local .nfo file or remove it from your TRAKT watchlist"
                % (self.indexer_id, sickbeard.indexerApi(self.indexer).name),
                logger.ERROR)
            ui.notifications.error(
                "Unable to add show",
                "Unable to look up the show in " + self.showDir + " on " +
                str(sickbeard.indexerApi(self.indexer).name) + " using ID " +
                str(self.indexer_id) +
                ", not using the NFO. Delete .nfo and try adding manually again."
            )
            self._finishEarly()
            return

        try:
            newShow = TVShow(self.indexer, self.indexer_id, self.lang)
            newShow.loadFromIndexer()

            self.show = newShow

            # set up initial values
            self.show.location = self.showDir
            self.show.subtitles = self.subtitles if self.subtitles != None else sickbeard.SUBTITLES_DEFAULT
            self.show.quality = self.quality if self.quality else sickbeard.QUALITY_DEFAULT
            self.show.flatten_folders = self.flatten_folders if self.flatten_folders != None else sickbeard.FLATTEN_FOLDERS_DEFAULT
            self.show.anime = self.anime if self.anime != None else sickbeard.ANIME_DEFAULT
            self.show.scene = self.scene if self.scene != None else sickbeard.SCENE_DEFAULT
            self.show.paused = self.paused if self.paused != None else False

            # set up default new/missing episode status
            logger.log(
                u"Setting all episodes to the specified default status: " +
                str(self.show.default_ep_status))
            self.show.default_ep_status = self.default_status

            if self.show.anime:
                self.show.release_groups = BlackAndWhiteList(
                    self.show.indexerid)
                if self.blacklist:
                    self.show.release_groups.set_black_keywords(self.blacklist)
                if self.whitelist:
                    self.show.release_groups.set_white_keywords(self.whitelist)

            # be smartish about this
            #if self.show.genre and "talk show" in self.show.genre.lower():
            #    self.show.air_by_date = 1
            #if self.show.genre and "documentary" in self.show.genre.lower():
            #    self.show.air_by_date = 0
            #if self.show.classification and "sports" in self.show.classification.lower():
            #    self.show.sports = 1

        except sickbeard.indexer_exception, e:
            logger.log(
                u"Unable to add show due to an error with " +
                sickbeard.indexerApi(self.indexer).name + ": " + ex(e),
                logger.ERROR)
            if self.show:
                ui.notifications.error(
                    "Unable to add " + str(self.show.name) +
                    " due to an error with " +
                    sickbeard.indexerApi(self.indexer).name + "")
            else:
                ui.notifications.error(
                    "Unable to add show due to an error with " +
                    sickbeard.indexerApi(self.indexer).name + "")
            self._finishEarly()
            return
Example #52
0
    def getURL(self, url, headers=None):

        if not headers:
            headers = {}

        # Glype Proxies does not support Direct Linking.
        # We have to fake a search on the proxy site to get data
        if self.proxy.isEnabled():
            headers.update({'referer': self.proxy.getProxyURL()})
            
        result = None

        try:
            r = requests.get(url, headers=headers)
        except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
            logger.log(u"Error loading "+self.name+" URL: " + str(sys.exc_info()) + " - " + ex(e), logger.ERROR)
            return None
Example #53
0
        if not ek.ek(os.path.isfile, restore_file):
            logger.log(u"Not restoring, " + restore_file + " doesn't exist",
                       logger.DEBUG)
            break

        try:
            logger.log(
                u"Trying to restore " + restore_file + " to " + new_file,
                logger.DEBUG)
            shutil.copy(restore_file, new_file)
            logger.log(u"Restore done", logger.DEBUG)
            break
        except Exception, e:
            logger.log(
                u"Error while trying to restore " + restore_file + ": " +
                ex(e), logger.WARNING)
            numTries += 1
            time.sleep(1)
            logger.log(u"Trying again.", logger.DEBUG)

        if numTries >= 10:
            logger.log(
                u"Unable to restore " + restore_file + " to " + new_file +
                " please do it manually.", logger.ERROR)
            return False

    return True


# try to convert to int, if it fails the default will be returned
def tryInt(s, s_default=0):
Example #54
0
                    sickbeard.indexerApi(self.indexer).name + "")
            self._finishEarly()
            return

        except exceptions.MultipleShowObjectsException:
            logger.log(
                u"The show in " + self.showDir +
                " is already in your show list, skipping", logger.WARNING)
            ui.notifications.error(
                'Show skipped', "The show in " + self.showDir +
                " is already in your show list")
            self._finishEarly()
            return

        except Exception, e:
            logger.log(u"Error trying to add show: " + ex(e), logger.ERROR)
            logger.log(traceback.format_exc(), logger.DEBUG)
            self._finishEarly()
            raise

        logger.log(u"Retrieving show info from IMDb", logger.DEBUG)
        try:
            self.show.loadIMDbInfo()
        except imdb_exceptions.IMDbError, e:
            logger.log(u" Something wrong on IMDb api: " + ex(e),
                       logger.WARNING)
        except Exception, e:
            logger.log(u"Error loading IMDb info: " + ex(e), logger.ERROR)

        try:
            self.show.saveToDB()
Example #55
0
    def update(self):

        zip_download_url = self._find_newest_version(True)
        logger.log(u"new_link: " + repr(zip_download_url), logger.DEBUG)

        if not zip_download_url:
            logger.log(
                u"Unable to find a new version link on google code, not updating"
            )
            return False

        try:
            # prepare the update dir
            sb_update_dir = ek.ek(os.path.join, sickbeard.PROG_DIR,
                                  u'sb-update')

            if os.path.isdir(sb_update_dir):
                logger.log(u"Clearing out update folder " + sb_update_dir +
                           " before extracting")
                shutil.rmtree(sb_update_dir)

            logger.log(u"Creating update folder " + sb_update_dir +
                       " before extracting")
            os.makedirs(sb_update_dir)

            # retrieve file
            logger.log(u"Downloading update from " + zip_download_url)
            zip_download_path = os.path.join(sb_update_dir, u'sb-update.zip')
            urllib.urlretrieve(zip_download_url, zip_download_path)

            if not ek.ek(os.path.isfile, zip_download_path):
                logger.log(
                    u"Unable to retrieve new version from " +
                    zip_download_url + ", can't update", logger.ERROR)
                return False

            if not ek.ek(zipfile.is_zipfile, zip_download_path):
                logger.log(
                    u"Retrieved version from " + zip_download_url +
                    " is corrupt, can't update", logger.ERROR)
                return False

            # extract to sb-update dir
            logger.log(u"Unzipping from " + str(zip_download_path) + " to " +
                       sb_update_dir)
            update_zip = zipfile.ZipFile(zip_download_path, 'r')
            update_zip.extractall(sb_update_dir)
            update_zip.close()

            # delete the zip
            logger.log(u"Deleting zip file from " + str(zip_download_path))
            os.remove(zip_download_path)

            # find update dir name
            update_dir_contents = [
                x for x in os.listdir(sb_update_dir)
                if os.path.isdir(os.path.join(sb_update_dir, x))
            ]

            if len(update_dir_contents) != 1:
                logger.log(
                    u"Invalid update data, update failed. Maybe try deleting your sb-update folder?",
                    logger.ERROR)
                return False

            content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
            old_update_path = os.path.join(content_dir, u'updater.exe')
            new_update_path = os.path.join(sickbeard.PROG_DIR, u'updater.exe')
            logger.log(u"Copying new update.exe file from " + old_update_path +
                       " to " + new_update_path)
            shutil.move(old_update_path, new_update_path)

        except Exception, e:
            logger.log(u"Error while trying to update: " + ex(e), logger.ERROR)
            return False
Example #56
0
    def _find_info(self):
        """
        For a given file try to find the showid, season, and episode.
        """

        show = season = quality = version = None
        episodes = []

        # try to look up the nzb in history
        attempt_list = [self._history_lookup,

                        # try to analyze the nzb name
                        lambda: self._analyze_name(self.nzb_name),

                        # try to analyze the file name
                        lambda: self._analyze_name(self.file_name),

                        # try to analyze the dir name
                        lambda: self._analyze_name(self.folder_name),

                        # try to analyze the file + dir names together
                        lambda: self._analyze_name(self.file_path),

                        # try to analyze the dir + file name together as one name
                        lambda: self._analyze_name(self.folder_name + u' ' + self.file_name)
        ]

        # attempt every possible method to get our info
        for cur_attempt in attempt_list:

            try:
                (cur_show, cur_season, cur_episodes, cur_quality, cur_version) = cur_attempt()
            except (InvalidNameException, InvalidShowException), e:
                logger.log(u"Unable to parse, skipping: " + ex(e), logger.DEBUG)
                continue

            if not cur_show:
                continue
            else:
                show = cur_show

            if cur_quality and not (self.in_history and quality):
                quality = cur_quality

            # we only get current version for animes from history to prevent issues with old database entries
            if cur_version is not None:
                version = cur_version

            if cur_season != None:
                season = cur_season
            if cur_episodes:
                episodes = cur_episodes

            # for air-by-date shows we need to look up the season/episode from database
            if season == -1 and show and episodes:
                self._log(
                    u"Looks like this is an air-by-date or sports show, attempting to convert the date to season/episode",
                    logger.DEBUG)
                airdate = episodes[0].toordinal()
                myDB = db.DBConnection()
                sql_result = myDB.select(
                    "SELECT season, episode FROM tv_episodes WHERE showid = ? and indexer = ? and airdate = ?",
                    [show.indexerid, show.indexer, airdate])

                if sql_result:
                    season = int(sql_result[0][0])
                    episodes = [int(sql_result[0][1])]
                else:
                    self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(
                        show.indexerid) + u", skipping", logger.DEBUG)
                    # we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
                    episodes = []
                    continue

            # if there's no season then we can hopefully just use 1 automatically
            elif season == None and show:
                myDB = db.DBConnection()
                numseasonsSQlResult = myDB.select(
                    "SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and indexer = ? and season != 0",
                    [show.indexerid, show.indexer])
                if int(numseasonsSQlResult[0][0]) == 1 and season == None:
                    self._log(
                        u"Don't have a season number, but this show appears to only have 1 season, setting season number to 1...",
                        logger.DEBUG)
                    season = 1

            if show and season and episodes:
                return (show, season, episodes, quality, version)
Example #57
0
        def _int_move_and_sym_link(cur_file_path, new_file_path):

            self._log(u"Moving then symbolic linking file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
            try:
                helpers.moveAndSymlinkFile(cur_file_path, new_file_path)
                helpers.chmodAsParent(new_file_path)
            except (IOError, OSError), e:
                self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
                raise e
Example #58
0
        def _int_hard_link(cur_file_path, new_file_path):

            self._log(u"Hard linking file from " + cur_file_path + " to " + new_file_path, logger.DEBUG)
            try:
                helpers.hardlinkFile(cur_file_path, new_file_path)
                helpers.chmodAsParent(new_file_path)
            except (IOError, OSError), e:
                self._log("Unable to link file " + cur_file_path + " to " + new_file_path + ": " + ex(e), logger.ERROR)
                raise e
Example #59
0
def findEpisode(episode, manualSearch=False):

    logger.log(u"Searching for " + episode.prettyName())

    foundResults = []

    didSearch = False

    for curProvider in providers.sortedProviderList():

        if not curProvider.isActive():
            continue

        # we check our results after every search string
        # this is done because in the future we will have a ordered list of all show aliases and release_group aliases
        # ordered by success rate ...

        # lets get all search strings
        # we use the method from the curProvider to accommodate for the internal join functions
        # this way we do not break the special abilities of the providers e.g. nzbmatrix
        searchStrings = curProvider.get_episode_search_strings(episode)
        logger.log("All search string permutations (" + curProvider.name +
                   "):" + str(searchStrings))
        """
        try:
            searchStrings = list(set(searchStrings))
        except TypeError:
            pass
        """
        done_searching = False
        for searchString in searchStrings:
            try:
                curFoundResults = curProvider.findEpisode(
                    episode,
                    manualSearch=manualSearch,
                    searchString=searchString)
            except exceptions.AuthException, e:
                logger.log(u"Authentication error: " + ex(e), logger.ERROR)
                break  # break the while loop
            except Exception, e:
                logger.log(
                    u"Error while searching " + curProvider.name +
                    ", skipping: " + ex(e), logger.ERROR)
                logger.log(traceback.format_exc(), logger.DEBUG)
                break  # break the while loop

            didSearch = True

            # skip non-tv crap
            curFoundResults = filter(
                lambda x: show_name_helpers.filterBadReleases(
                    x.name) and show_name_helpers.isGoodResult(
                        x.name, episode.show, season=episode.season),
                curFoundResults)

            # loop all results and see if any of them are good enough that we can stop searching
            for cur_result in curFoundResults:
                done_searching = isFinalResult(cur_result)
                logger.log(
                    u"Should we stop searching after finding " +
                    cur_result.name + ": " + str(done_searching), logger.DEBUG)
                if done_searching:
                    break

            # if we are searching an anime we are a little more loose
            # this means we check every turn for a possible result
            # in contrast the isFinalResultlooks function looks for a perfect result (best quality)
            # but this will accept any result that would have been picked in the end -> pickBestResult
            # and then stop and use that
            if episode.show.is_anime:
                logger.log(
                    u"We are searching an anime. i am checking if we got a good result with search provider "
                    + curProvider.name, logger.DEBUG)
                bestResult = pickBestResult(curFoundResults, show=episode.show)
                if bestResult:
                    return bestResult

            foundResults += curFoundResults
            # if we did find a result that's good enough to stop then don't continue
            # this breaks the turn loop
            if done_searching:
                break
Example #60
0
class WDTVMetadata(generic.GenericMetadata):
    """
    Metadata generation class for WDTV

    The following file structure is used:

    show_root/folder.jpg                    (poster)
    show_root/Season ##/folder.jpg          (season thumb)
    show_root/Season ##/filename.ext        (*)
    show_root/Season ##/filename.metathumb  (episode thumb)
    show_root/Season ##/filename.xml        (episode metadata)
    """
    def __init__(self,
                 show_metadata=False,
                 episode_metadata=False,
                 fanart=False,
                 poster=False,
                 banner=False,
                 episode_thumbnails=False,
                 season_posters=False,
                 season_banners=False,
                 season_all_poster=False,
                 season_all_banner=False):

        generic.GenericMetadata.__init__(self, show_metadata, episode_metadata,
                                         fanart, poster, banner,
                                         episode_thumbnails, season_posters,
                                         season_banners, season_all_poster,
                                         season_all_banner)

        self.name = 'WDTV'

        self._ep_nfo_extension = 'xml'

        self.poster_name = "folder.jpg"

        # web-ui metadata template
        self.eg_show_metadata = "<i>not supported</i>"
        self.eg_episode_metadata = "Season##\\<i>filename</i>.xml"
        self.eg_fanart = "<i>not supported</i>"
        self.eg_poster = "folder.jpg"
        self.eg_banner = "<i>not supported</i>"
        self.eg_episode_thumbnails = "Season##\\<i>filename</i>.metathumb"
        self.eg_season_posters = "Season##\\folder.jpg"
        self.eg_season_banners = "<i>not supported</i>"
        self.eg_season_all_poster = "<i>not supported</i>"
        self.eg_season_all_banner = "<i>not supported</i>"

    # Override with empty methods for unsupported features
    def retrieveShowMetadata(self, folder):
        # no show metadata generated, we abort this lookup function
        return (None, None, None)

    def create_show_metadata(self, show_obj, force=False):
        pass

    def update_show_indexer_metadata(self, show_obj):
        pass

    def get_show_file_path(self, show_obj):
        pass

    def create_fanart(self, show_obj):
        pass

    def create_banner(self, show_obj):
        pass

    def create_season_banners(self, show_obj):
        pass

    def create_season_all_poster(self, show_obj):
        pass

    def create_season_all_banner(self, show_obj):
        pass

    def get_episode_thumb_path(self, ep_obj):
        """
        Returns the path where the episode thumbnail should be stored. Defaults to
        the same path as the episode file but with a .metathumb extension.

        ep_obj: a TVEpisode instance for which to create the thumbnail
        """
        if ek.ek(os.path.isfile, ep_obj.location):
            tbn_filename = helpers.replaceExtension(ep_obj.location,
                                                    'metathumb')
        else:
            return None

        return tbn_filename

    def get_season_poster_path(self, show_obj, season):
        """
        Season thumbs for WDTV go in Show Dir/Season X/folder.jpg

        If no season folder exists, None is returned
        """

        dir_list = [
            x for x in ek.ek(os.listdir, show_obj.location)
            if ek.ek(os.path.isdir, ek.ek(os.path.join, show_obj.location, x))
        ]

        season_dir_regex = '^Season\s+(\d+)$'

        season_dir = None

        for cur_dir in dir_list:
            if season == 0 and cur_dir == "Specials":
                season_dir = cur_dir
                break

            match = re.match(season_dir_regex, cur_dir, re.I)
            if not match:
                continue

            cur_season = int(match.group(1))

            if cur_season == season:
                season_dir = cur_dir
                break

        if not season_dir:
            logger.log(
                u"Unable to find a season dir for season " + str(season),
                logger.DEBUG)
            return None

        logger.log(
            u"Using " + str(season_dir) +
            "/folder.jpg as season dir for season " + str(season),
            logger.DEBUG)

        return ek.ek(os.path.join, show_obj.location, season_dir, 'folder.jpg')

    def _ep_data(self, ep_obj):
        """
        Creates an elementTree XML structure for a WDTV style episode.xml
        and returns the resulting data object.

        ep_obj: a TVShow instance to create the NFO for
        """

        eps_to_write = [ep_obj] + ep_obj.relatedEps

        indexer_lang = ep_obj.show.lang

        try:
            lINDEXER_API_PARMS = sickbeard.indexerApi(
                ep_obj.show.indexer).api_params.copy()

            lINDEXER_API_PARMS['actors'] = True

            if indexer_lang and not indexer_lang == 'en':
                lINDEXER_API_PARMS['language'] = indexer_lang

            if ep_obj.show.dvdorder != 0:
                lINDEXER_API_PARMS['dvdorder'] = True

            t = sickbeard.indexerApi(
                ep_obj.show.indexer).indexer(**lINDEXER_API_PARMS)
            myShow = t[ep_obj.show.indexerid]
        except sickbeard.indexer_shownotfound, e:
            raise exceptions.ShowNotFoundException(e.message)
        except sickbeard.indexer_error, e:
            logger.log(
                u"Unable to connect to " +
                sickbeard.indexerApi(ep_obj.show.indexer).name +
                " while creating meta files - skipping - " + ex(e),
                logger.ERROR)
            return False