def __init__(self, title, description, url, user): """ Instantiate a new user submitted article. """ self.title = title self.description = description self.url = url self.user = user REDIS.lpush(SUBMITTED_ARTICLE_QUEUE_KEY, url)
def process(self): """ Pulls down the article or page and controls processing of the contents into article, and tags. """ page = urllib.urlopen(self.url) self.soup = BeautifulSoup(page) self.set_article() self.determine_contents() self.determine_tags() self.save_tag_associations() REDIS.lpush(ARTICLES_TO_BE_GRAPHED, self.article.id)
def save_security_local(sec_key, username): """ Correctly saves the user's security key and user name to their browser cookies. It also adds the new security key to the session. """ cookie = cherrypy.response.cookie cookie[COOKIE_USERNAME] = username cookie[COOKIE_SEC_KEY] = sec_key cookie[COOKIE_USERNAME]['max-age'] = 3600 cookie[COOKIE_SEC_KEY]['max-age'] = 3600 cherrypy.response.headers['sec-key'] = sec_key save_security_to_session(sec_key, username) REDIS.set(username, sec_key)
def has_stored_sec_key(username, frontend_sec): """ Check in redis if the user has a security key for a continuing session else wack their cookies """ sec_key = REDIS.get(username) if sec_key and sec_key == frontend_sec: save_security_to_session(sec_key, username) return True return False
def run(): """ Main program loop for the classifier service. """ url = REDIS.lpop(SUBMITTED_ARTICLE_QUEUE_KEY) print "URL: %s " % url if not url: time.sleep(10) else: # try: classify = Classifier(url) classify.process() # except Exception as E: # REDIS.lpush(SUBMITTED_ARTICLE_PROCESS_FAILED, url) # REDIS.hset(SUBMITTED_ARTICLE_PROCESS_FAILED_LOG, url, repr(traceback.format_stack())) run()