def __parseTransmissionResponse(self, postData, tries=0): """ Parse a transmission response Takes: tries - Limits the recursion of this call when tags do not match postData - The data posted to transmission-rpc Returns: Tuple (torrents,httpResponseCode,transmissionResponseCode) """ response = None httpResponseCode = -1 encoding = None transmissionResponseCode = u"failed" if tries >= TRANSMISSION_MAX_RETRIES: return (response, httpResponseCode, transmissionResponseCode) # Make the call response, httpResponseCode, encoding = self.__sendRequest(postData=postData.encode("utf-8")) # Ensure httpResponseCode is unicode httpResponseCode = unicode(httpResponseCode) # Ensure the result is in utf-8 response = Settings.changeCharset(response, "utf-8", "html.parser") torrents = [] # parse the json if it exists if response is not None: try: response = json.loads(response) # If there is a problem parsing the response then return an empty set except (ValueError) as e: pass # Make sure we got a result if isinstance(response, dict): # Get Tag, if tag is available and ensure the response matches posted = json.loads(postData) if isinstance(posted, dict) and "tag" in posted: if isinstance(response, dict) and "tag" in response: if posted["tag"] != response["tag"]: time.sleep(5) response, httpResponseCode = self.__parseTransmissionResponse(self, postData, tries=tries + 1) # Get Transmission Response Code if isinstance(response, dict) and "result" in response: transmissionResponseCode = unicode(response["result"]) return (response, httpResponseCode, transmissionResponseCode)
def __rssThread(majorFeed): error = None processed = 0 pid = os.getpid() try: rssTorrents = [] logger.threadingInfo("[T:{0}] Thread Started".format(pid)) # This is needed to ensure Keyboard driven interruptions are handled correctly # signal.signal(signal.SIGINT, signal.SIG_IGN) # Check each feed for a list of possible torrents # Set the default type for untyped feeds if isinstance(majorFeed["feedType"],unicode) and majorFeed["feedType"] != "": majorFeed["feedType"] = majorFeed["feedType"] else: majorFeed["feedType"] = u"none" # Aggregate all the minorFeed items for minorFeed in majorFeed["minorFeeds"]: # Read URL rssData, httpCode, encoding = __readRSSFeed(minorFeed["url"]) logger.threadingDebug(u"[T:{0}] Checking URL: {1} [{2}]".format(pid, httpRegex.match(minorFeed["url"]).group(1), httpCode)) # If we did not get any data or there was an error then skip to the next feed if rssData is None or httpCode != 200: continue # Ensure data is utf-8 rssData = Settings.changeCharset(rssData, "utf-8", "xml") # Create a list of torrents from the RSS Feed torrents = __rssToTorrents(rssData, feedType=majorFeed["feedType"], feedDestination=majorFeed["feedDestination"],minRatio=minorFeed["minRatio"],comparison=minorFeed["comparison"],minTime=minorFeed["minTime"]) # Update the processed count processed += len(torrents) for torrent in torrents: # Check the filters and see if anything should be excluded if torrent.filterMatch(majorFeed["feedFilters"]): rssTorrents.append(torrent) ''' logger.debug("Matched Torrent: ") logger.debug("======================") logger.debug(u"{0}".format(torrent)) logger.debug("======================") else: logger.debug("UnMatched Torrent: ") logger.debug("======================") logger.debug(u"{0}".format(torrent)) logger.debug("======================") ''' # Garbage Collection minorFeed = rssData = torrents = None except Exception as e: error = u"ERROR: [T:{0}]: {0}\nException: {1}\nTraceback: {2}".format(minorFeed["url"],e, traceback.format_exc()) rssTorrents = [] except: error = u'ERROR: [T:{0}]: {0}'.format(traceback.format_exc()) rssTorrents = [] logger.threadingInfo("[T:{0}] Thread Done".format(pid)) return (pid, rssTorrents, error, processed)