Beispiel #1
0
    def __parseTransmissionResponse(self, postData, tries=0):
        """
        Parse a transmission response

        Takes:
            tries - Limits the recursion of this call when tags do not match
            postData - The data posted to transmission-rpc

        Returns:
            Tuple (torrents,httpResponseCode,transmissionResponseCode)
        """

        response = None
        httpResponseCode = -1
        encoding = None
        transmissionResponseCode = u"failed"

        if tries >= TRANSMISSION_MAX_RETRIES:
            return (response, httpResponseCode, transmissionResponseCode)

        # Make the call
        response, httpResponseCode, encoding = self.__sendRequest(postData=postData.encode("utf-8"))

        # Ensure httpResponseCode is unicode
        httpResponseCode = unicode(httpResponseCode)

        # Ensure the result is in utf-8
        response = Settings.changeCharset(response, "utf-8", "html.parser")

        torrents = []

        # parse the json if it exists
        if response is not None:
            try:
                response = json.loads(response)

            # If there is a problem parsing the response then return an empty set
            except (ValueError) as e:
                pass

        # Make sure we got a result
        if isinstance(response, dict):

            # Get Tag, if tag is available and ensure the response matches
            posted = json.loads(postData)
            if isinstance(posted, dict) and "tag" in posted:
                if isinstance(response, dict) and "tag" in response:
                    if posted["tag"] != response["tag"]:
                        time.sleep(5)
                        response, httpResponseCode = self.__parseTransmissionResponse(self, postData, tries=tries + 1)

            # Get Transmission Response Code
            if isinstance(response, dict) and "result" in response:
                transmissionResponseCode = unicode(response["result"])

        return (response, httpResponseCode, transmissionResponseCode)
Beispiel #2
0
def __rssReader():
    '''
    This thread will take care of Processing RSS Feeds
    '''

    logger.info(u'RSSDaemon Started')

    logger.debug("Pool Created")

    try:
        while True:

            totalProcessed = 0
            startTime = time.time()

            rssPool = Pool(processes=flannelfox.settings['maxRssThreads'], maxtasksperchild=10)

            # Reads the RSSFeedConfig file each loop to ensure new entries are picked up
            # rssFeeds
            majorFeeds = {}
            results = []
            majorFeeds.update(Settings.readLastfmArtists())
            majorFeeds.update(Settings.readTraktTV())
            majorFeeds.update(Settings.readGoodreads())
            majorFeeds.update(Settings.readRSS())

            # Holds all the torrents that are in the feeds, filtered, and new
            rssTorrents = TorrentQueue.Queue()

            # If single thread is specified then do not fork
            # TODO: this should not happen and will be removed
            if flannelfox.settings['maxRssThreads'] == 1:
                for majorFeed in majorFeeds.itervalues():
                    logger.info(u"Feed Name: {0}".format(majorFeed["feedName"]))

                    torrents, error, processed = __rssThread(majorFeed)

                    for t in torrents:
                        rssTorrents.append(t)

            # If multiple cores are allowed then for http calls
            else:

                try:
                    logger.info(u"Pool fetch of RSS Started {0}".format(strftime("%Y-%m-%d %H:%M:%S", gmtime())))


                    #for f in majorFeeds.itervalues():
                    #    results.append(rssPool.apply_async(__rssThread, (f,)))
                 
                    results = rssPool.imap_unordered(__rssThread, majorFeeds.itervalues())

                except Exception as e:
                    logger.error(u"ERROR: There was an error fetching the RSS Feeds.\n-  {0}".format(e))


                # Try to get the rssFeeds and return the resutls
                logger.info(u'Appending items to the queue')
                
                try:
                    
                    for result in results:
                        
                        #Take each item in the result and append it to the Queue
                        pid, torrents, error, processed = result

                        if error is not None:
                            logger.error('ERROR: There was a problem processing a rss feed:\n-  {0}'.format(error))

                        totalProcessed += processed

                        currentTorrent = 0
                        for t in torrents:

                            currentTorrent += 1

                            logger.debug(u'Processing: T[{0}]I[{1}]'.format(pid, currentTorrent))
                            try:
                                rssTorrents.append(t)
                            except Exception as e:
                                logger.error(u"ERROR: There was a problem appending data to the queue.\n-  {0}".format(e))

                    try:

                        logger.debug(u"Closing RSS Pool")
                        rssPool.close()
                    
                        logger.debug(u"Joining RSS Pool Workers")
                        rssPool.join()
                    except:
                        logger.error(u"ERROR: There was a problem clearing the pool.\n-  {0}".format(traceback.format_exc()))    

                except Exception as e:
                    logger.error(u"ERROR: There was a problem iterating the results.\n-  {0}".format(e))

                    try:
                        logger.debug(u"Closing RSS Pool")
                        rssPool.close()

                        logger.debug(u"Terminating RSS Pool Workers")
                        rssPool.terminate()

                        logger.debug(u"Joining RSS Pool Workers")
                        rssPool.join()
                    except:
                        logger.error(u"ERROR: There was a problem clearing the pool after an error.\n-  {0}".format(traceback.format_exc()))    

            logger.info(u"Pool fetch of RSS Done {0} {1} records loaded".format(strftime("%Y-%m-%d %H:%M:%S", gmtime()), len(rssTorrents)))
            
            # Log the number of records processed
            logger.info("Processed {0} items in {1:.2f} second(s)".format(totalProcessed, time.time() - startTime))

            # Write matching filters to database
            logger.debug("Writing {0} Torrents to DB".format(len(rssTorrents)))
            rssTorrents.writeToDB()

            # Garbage collection
            logger.debug("Garbage Collection")
            majorFeeds = rssTorrents = results = result = rssPool = None

            #Settings.showHeap()

            # Put the app to sleep
            logger.info("Sleep for a bit")
            time.sleep(flannelfox.settings['rssDaemonThreadSleep'])

    except Exception as e:
        logger.error(u"ERROR: __rssReader Failed {0} {1}\n-  {2}".format(
            strftime("%Y-%m-%d %H:%M:%S", gmtime()),
            e, 
            traceback.format_exc())
        )

    except:
        logger.error(u"ERROR: __rssReader Failed {0}\n-  {1}".format(
            strftime("%Y-%m-%d %H:%M:%S", gmtime()), 
            traceback.format_exc())
        )
Beispiel #3
0
def __rssThread(majorFeed):

    error = None
    processed = 0
    pid = os.getpid()

    try:

        rssTorrents = []

        logger.threadingInfo("[T:{0}] Thread Started".format(pid))
        
        # This is needed to ensure Keyboard driven interruptions are handled correctly
        # signal.signal(signal.SIGINT, signal.SIG_IGN)

        # Check each feed for a list of possible torrents
        # Set the default type for untyped feeds
        if isinstance(majorFeed["feedType"],unicode) and majorFeed["feedType"] != "":
            majorFeed["feedType"] = majorFeed["feedType"]
        else:
            majorFeed["feedType"] = u"none"

        # Aggregate all the minorFeed items
        for minorFeed in majorFeed["minorFeeds"]:
            # Read URL
            rssData, httpCode, encoding = __readRSSFeed(minorFeed["url"])

            logger.threadingDebug(u"[T:{0}] Checking URL: {1} [{2}]".format(pid, httpRegex.match(minorFeed["url"]).group(1), httpCode))

            # If we did not get any data or there was an error then skip to the next feed
            if rssData is None or httpCode != 200:
                continue

            # Ensure data is utf-8
            rssData = Settings.changeCharset(rssData, "utf-8", "xml")

            # Create a list of torrents from the RSS Feed
            torrents = __rssToTorrents(rssData, feedType=majorFeed["feedType"], feedDestination=majorFeed["feedDestination"],minRatio=minorFeed["minRatio"],comparison=minorFeed["comparison"],minTime=minorFeed["minTime"])

            # Update the processed count
            processed += len(torrents)

            for torrent in torrents:

                # Check the filters and see if anything should be excluded
                if torrent.filterMatch(majorFeed["feedFilters"]):
                    rssTorrents.append(torrent)
                '''
                    logger.debug("Matched Torrent: ")
                    logger.debug("======================")
                    logger.debug(u"{0}".format(torrent))
                    logger.debug("======================")
                else:
                    logger.debug("UnMatched Torrent: ")
                    logger.debug("======================")
                    logger.debug(u"{0}".format(torrent))
                    logger.debug("======================")
                '''

        # Garbage Collection
        minorFeed = rssData = torrents = None

    except Exception as e:
        error = u"ERROR: [T:{0}]: {0}\nException: {1}\nTraceback: {2}".format(minorFeed["url"],e, traceback.format_exc())
        rssTorrents = []

    except:
        error = u'ERROR: [T:{0}]: {0}'.format(traceback.format_exc())
        rssTorrents = []

    logger.threadingInfo("[T:{0}] Thread Done".format(pid))
    return (pid, rssTorrents, error, processed)