def post(self): util = helpers.Util() user_model = util.is_user_good() self.response.headers["Content-Type"] = "application/json" if not user_model == None: text_to_save =self.request.get("text", default_value=None) resubmit_post = self.request.get("resubmit",default_value="true") resubmit_bool = resubmit_post.lower() in ("yes", "true", "t", "1") if not text_to_save == None and len(text_to_save) > 0 and len(text_to_save) < 140: day_created = util.get_todays_start() text_to_save = re.sub("\s+"," ",text_to_save) social_post = model.SocialPostsForUsers(social_user=user_model,text=text_to_save,day_created=day_created,resubmit=resubmit_bool) try: social_post.put() self.response.out.write(json.dumps( { "success" : True, "id" : "%s" % social_post.key() }) ) except CapabilityDisabledError: fail = FailureJson(FAILURE_CAPABILITY_DISABLED_CODE,FAILURE_CAPABILITY_DISABLED_TEXT) self.response.out.write( fail.get_json() ) else: fail = FailureJson(FAILURE_NO_TEXT_TO_SAVE_CODE,FAILURE_NO_TEXT_TO_SAVE_TEXT) self.response.out.write( fail.get_json() ) else: fail = FailureJson(FAILURE_NO_USER_CODE,FAILURE_NO_USER_TEXT) self.response.out.write( fail.get_json() )
def post(self): util = helpers.Util() user_model = util.is_user_good() self.response.headers["Content-Type"] = "application/json" if not user_model == None: text_to_save = self.request.get("text", default_value=None) resubmit_post = self.request.get("resubmit", default_value="true") resubmit_bool = resubmit_post.lower() in ("yes", "true", "t", "1") if not text_to_save == None and len(text_to_save) > 0 and len( text_to_save) < 140: day_created = util.get_todays_start() text_to_save = re.sub("\s+", " ", text_to_save) social_post = model.SocialPostsForUsers( social_user=user_model, text=text_to_save, day_created=day_created, resubmit=resubmit_bool) try: social_post.put() self.response.out.write( json.dumps({ "success": True, "id": "%s" % social_post.key() })) except CapabilityDisabledError: fail = FailureJson(FAILURE_CAPABILITY_DISABLED_CODE, FAILURE_CAPABILITY_DISABLED_TEXT) self.response.out.write(fail.get_json()) else: fail = FailureJson(FAILURE_NO_TEXT_TO_SAVE_CODE, FAILURE_NO_TEXT_TO_SAVE_TEXT) self.response.out.write(fail.get_json()) else: fail = FailureJson(FAILURE_NO_USER_CODE, FAILURE_NO_USER_TEXT) self.response.out.write(fail.get_json())
def get(self): model_queries = NewsMeModelQueries() oldest_date = model_queries.get_oldest_link_date() logging.info("oldest_date = %s" % oldest_date) util = helpers.Util() today_start = util.get_todays_start() logging.info("today_start = %s" % today_start) links = [] request_host = self.request.headers["Host"] while oldest_date < today_start: template_date = oldest_date.strftime("%Y-%m-%d") next_link = "http://%s/?when=%s" % (request_host, template_date) logging.info("next_link = %s" % next_link) links.append(next_link) oldest_date = util.get_next_day(oldest_date) logging.info("next oldest_date = %s" % oldest_date) _template_values = {} _template_values["links"] = links _template_values["home"] = "http://%s/" % request_host _path = os.path.join(os.path.dirname(__file__), 'newsmesitemap.html') self.response.headers["Content-Type"] = "application/xml" self.response.out.write(template.render(_path, _template_values))
def get(self): util = helpers.Util() model_queries = NewsMeModelQueries() day_start = self.request.get("when") # 2012-09-03 day_start_valid = self.is_request_when_valid(util.get_time_from_string) logging.info("request when = %s" % self.request.get("when")) if not day_start_valid: # this is a normal request to the / main page then day_start = util.get_todays_start() else: day_start = util.get_time_from_string(day_start) logging.info("today's day start: %s, %s" % (day_start, type(day_start)) ) day_stop = util.get_dates_stop(day_start) logging.info("today's day stop: %s" % day_stop) # seconds to cache until the next cron job, this might not matter for "when" pages seconds_to_cache = util.get_report_http_time_left() results = None failover_limit_max = 1 failover_limit_counter = 0 # look for the news results for the current day, if there aren't any # then fail over to the next day within a limit while results == None and failover_limit_counter < failover_limit_max : cache_results = memcache.get(cache_keys.NEWSME_REPORTHANDLER_ALL_STORIES % day_start) if cache_results == None: results = model_queries.get_articles_between(start=day_start,stop=day_stop) memcache.add(cache_keys.NEWSME_REPORTHANDLER_ALL_STORIES % day_start, results, seconds_to_cache) else: results = cache_results logging.info("results length = %s" % len(results) ) if len(results) == 0: logging.warn("results length is NOT correct for day start: %s" % day_start) results = None day_start = util.get_yester_day(day_start) day_stop = util.get_dates_stop(day_start) else: logging.info("results length is correct for day start: %s" % day_start) failover_limit_counter = failover_limit_counter + 1 logging.info("failover_limit_counter = %s" % failover_limit_counter) if results == None: logging.warn("never found results for request") if not day_start_valid: day_start = util.get_todays_start() else: day_start = self.request.get("when") # 2012-09-03 day_start = util.get_time_from_string(day_start) request_host = self.request.headers["Host"] logging.info("request_host = %s" % request_host) _template_values = {} _template_values["links"] = results template_date = day_start.strftime("%Y-%m-%d") _template_values["rel_canonical"] = "http://%s/?when=%s" % (request_host, template_date) _template_values["page_date"] = template_date selection_list = util.get_list_of_days(way=util.get_yester_day,when=day_start,many=3) selection_list.extend( util.get_list_of_days(way=util.get_next_day,when=day_start,many=3) ) selection_list.sort() selection_list.pop( len(selection_list) / 2) _template_values["date_selection"] = selection_list if self.request.get("when") == None or self.request.get("when") == '': # cache current page until next cron cycle logging.info("response cache in seconds will be: %s" % seconds_to_cache) self.response.headers["Expires"] = util.get_expiration_stamp(seconds_to_cache) self.response.headers["Cache-Control: max-age"] = '%s' % seconds_to_cache self.response.headers["Cache-Control"] = "public" else: # cache "when" pages for a long time logging.info("response cache in seconds will be: %s" % cache_keys.NEWSME_CACHE_DIGEST_RESPONSE_LONG) self.response.headers["Expires"] = util.get_expiration_stamp(cache_keys.NEWSME_CACHE_DIGEST_RESPONSE_LONG) self.response.headers["Cache-Control: max-age"] = '%s' % cache_keys.NEWSME_CACHE_DIGEST_RESPONSE_LONG self.response.headers["Cache-Control"] = "public" _path = os.path.join(os.path.dirname(__file__), 'newsmereport.html') self.response.out.write(template.render(_path, _template_values))
def get(self): util = helpers.Util() model_queries = NewsMeModelQueries() day_start = self.request.get("when") # 2012-09-03 day_start_valid = self.is_request_when_valid(util.get_time_from_string) logging.info("request when = %s" % self.request.get("when")) if not day_start_valid: # this is a normal request to the / main page then day_start = util.get_todays_start() else: day_start = util.get_time_from_string(day_start) logging.info("today's day start: %s, %s" % (day_start, type(day_start))) day_stop = util.get_dates_stop(day_start) logging.info("today's day stop: %s" % day_stop) # seconds to cache until the next cron job, this might not matter for "when" pages seconds_to_cache = util.get_report_http_time_left() results = None failover_limit_max = 1 failover_limit_counter = 0 # look for the news results for the current day, if there aren't any # then fail over to the next day within a limit while results == None and failover_limit_counter < failover_limit_max: cache_results = memcache.get( cache_keys.NEWSME_REPORTHANDLER_ALL_STORIES % day_start) if cache_results == None: results = model_queries.get_articles_between(start=day_start, stop=day_stop) memcache.add( cache_keys.NEWSME_REPORTHANDLER_ALL_STORIES % day_start, results, seconds_to_cache) else: results = cache_results logging.info("results length = %s" % len(results)) if len(results) == 0: logging.warn( "results length is NOT correct for day start: %s" % day_start) results = None day_start = util.get_yester_day(day_start) day_stop = util.get_dates_stop(day_start) else: logging.info("results length is correct for day start: %s" % day_start) failover_limit_counter = failover_limit_counter + 1 logging.info("failover_limit_counter = %s" % failover_limit_counter) if results == None: logging.warn("never found results for request") if not day_start_valid: day_start = util.get_todays_start() else: day_start = self.request.get("when") # 2012-09-03 day_start = util.get_time_from_string(day_start) request_host = self.request.headers["Host"] logging.info("request_host = %s" % request_host) _template_values = {} _template_values["links"] = results template_date = day_start.strftime("%Y-%m-%d") _template_values["rel_canonical"] = "http://%s/?when=%s" % ( request_host, template_date) _template_values["page_date"] = template_date selection_list = util.get_list_of_days(way=util.get_yester_day, when=day_start, many=3) selection_list.extend( util.get_list_of_days(way=util.get_next_day, when=day_start, many=3)) selection_list.sort() selection_list.pop(len(selection_list) / 2) _template_values["date_selection"] = selection_list if self.request.get("when") == None or self.request.get("when") == '': # cache current page until next cron cycle logging.info("response cache in seconds will be: %s" % seconds_to_cache) self.response.headers["Expires"] = util.get_expiration_stamp( seconds_to_cache) self.response.headers[ "Cache-Control: max-age"] = '%s' % seconds_to_cache self.response.headers["Cache-Control"] = "public" else: # cache "when" pages for a long time logging.info("response cache in seconds will be: %s" % cache_keys.NEWSME_CACHE_DIGEST_RESPONSE_LONG) self.response.headers["Expires"] = util.get_expiration_stamp( cache_keys.NEWSME_CACHE_DIGEST_RESPONSE_LONG) self.response.headers[ "Cache-Control: max-age"] = '%s' % cache_keys.NEWSME_CACHE_DIGEST_RESPONSE_LONG self.response.headers["Cache-Control"] = "public" _path = os.path.join(os.path.dirname(__file__), 'newsmereport.html') self.response.out.write(template.render(_path, _template_values))
def get(self): util = helpers.Util() user_model = util.is_user_good() if not user_model == None: _template_values = {} get_which = self.request.get("which") get_since = self.request.get("since") if get_since == "" or get_since == "undefined": get_since = None day_start = util.get_todays_start() day_stop = util.get_todays_stop() _template_values["day_start"] = day_start _template_values["day_stop"] = day_stop posts_results_cache_key = cache_keys.POST_RESULTS % (get_which,get_since,day_start.date()) logging.info("post results cache key is: %s" % posts_results_cache_key) cache_results = memcache.get(posts_results_cache_key) if not cache_results == None: logging.info("cached search results being returned for key: %s" % posts_results_cache_key) _template_values["c"] = get_since _template_values["r"] = cache_results else: logging.info("search results not found for cache key %s" % posts_results_cache_key) queries = helpers.Queries() if get_which == "yours-pending": q = queries.get_posts_yours_pending(user_model,get_since,day_start) results = q.fetch(100, config= queries.get_db_run_config_eventual() ) cursor = q.cursor() if get_since == None: q2 = queries.get_posts_yours_resubmitted(user_model,day_start) results2 = q2.fetch(100, config= queries.get_db_run_config_eventual()) logging.info("yours resubmitted: %s" % len(results2) ) results.extend(results2 ) _template_values["c"] = cursor _template_values["r"] = results elif get_which == "theirs-pending": q = queries.get_posts_theirs_pending(get_since,day_start) results = q.fetch(100, config= queries.get_db_run_config_eventual()) cursor = q.cursor() if get_since == None: q2 = queries.get_posts_theirs_resubmitted(user_model,day_start) results2 = q2.fetch(100, config= queries.get_db_run_config_eventual()) logging.info("theirs resubmitted: %s" % len(results2) ) results.extend( results2 ) _template_values["c"] = cursor _template_values["r"] = results else: logging.warning("unknown which was passed: %" % get_which) _template_values["c"] = None _template_values["r"] = None if not _template_values["r"] == None: memcache.add(posts_results_cache_key, _template_values["r"], 60) _path = os.path.join(os.path.dirname(__file__), 'posts.html') self.response.headers["Expires"] = util.get_expiration_stamp(60) self.response.headers["Content-Type"] = "application/json" self.response.headers["Cache-Control: max-age"] = 60 self.response.headers["Cache-Control"] = "public" self.response.out.write(template.render(_path, _template_values)) else: fail = FailureJson(FAILURE_NO_USER_CODE,FAILURE_NO_USER_TEXT) self.response.out.write( fail.get_json() )
def get(self): util = helpers.Util() user_model = util.is_user_good() if not user_model == None: _template_values = {} get_which = self.request.get("which") get_since = self.request.get("since") if get_since == "" or get_since == "undefined": get_since = None day_start = util.get_todays_start() day_stop = util.get_todays_stop() _template_values["day_start"] = day_start _template_values["day_stop"] = day_stop posts_results_cache_key = cache_keys.POST_RESULTS % ( get_which, get_since, day_start.date()) logging.info("post results cache key is: %s" % posts_results_cache_key) cache_results = memcache.get(posts_results_cache_key) if not cache_results == None: logging.info( "cached search results being returned for key: %s" % posts_results_cache_key) _template_values["c"] = get_since _template_values["r"] = cache_results else: logging.info("search results not found for cache key %s" % posts_results_cache_key) queries = helpers.Queries() if get_which == "yours-pending": q = queries.get_posts_yours_pending( user_model, get_since, day_start) results = q.fetch( 100, config=queries.get_db_run_config_eventual()) cursor = q.cursor() if get_since == None: q2 = queries.get_posts_yours_resubmitted( user_model, day_start) results2 = q2.fetch( 100, config=queries.get_db_run_config_eventual()) logging.info("yours resubmitted: %s" % len(results2)) results.extend(results2) _template_values["c"] = cursor _template_values["r"] = results elif get_which == "theirs-pending": q = queries.get_posts_theirs_pending(get_since, day_start) results = q.fetch( 100, config=queries.get_db_run_config_eventual()) cursor = q.cursor() if get_since == None: q2 = queries.get_posts_theirs_resubmitted( user_model, day_start) results2 = q2.fetch( 100, config=queries.get_db_run_config_eventual()) logging.info("theirs resubmitted: %s" % len(results2)) results.extend(results2) _template_values["c"] = cursor _template_values["r"] = results else: logging.warning("unknown which was passed: %" % get_which) _template_values["c"] = None _template_values["r"] = None if not _template_values["r"] == None: memcache.add(posts_results_cache_key, _template_values["r"], 60) _path = os.path.join(os.path.dirname(__file__), 'posts.html') self.response.headers["Expires"] = util.get_expiration_stamp(60) self.response.headers["Content-Type"] = "application/json" self.response.headers["Cache-Control: max-age"] = 60 self.response.headers["Cache-Control"] = "public" self.response.out.write(template.render(_path, _template_values)) else: fail = FailureJson(FAILURE_NO_USER_CODE, FAILURE_NO_USER_TEXT) self.response.out.write(fail.get_json())