def get_page(cls, page_name, content_lambda): """Get page from cache or create page on demand.""" content = MemcacheManager.get(page_name) if not content: logging.info('Cache miss: ' + page_name) content = content_lambda() MemcacheManager.set(page_name, content) return content
def get_announcements(cls, allow_cached=True): items = MemcacheManager.get(cls.memcache_key) if not allow_cached or items is None: items = AnnouncementEntity.all().order('-date').fetch(1000) # TODO(psimakov): prepare to exceed 1MB max item size # read more here: http://stackoverflow.com # /questions/5081502/memcache-1-mb-limit-in-google-app-engine MemcacheManager.set(cls.memcache_key, items) return items
def get_forum_by_url(forumurl): # number of forums is small, so we cache all of them forums = memcache.get(FORUMS_MEMCACHE_KEY) if not forums: forums = Forum.all().fetch(200) # this effectively limits number of forums to 200 if not forums: return None memcache.set(FORUMS_MEMCACHE_KEY, forums) for forum in forums: if forumurl == forum.url: return forum return None
def get(cls, student, unit_id): """Loads programming answer entity.""" key = cls._create_key(student.user_id, unit_id) value = MemcacheManager.get(cls._memcache_key(key)) if NO_OBJECT == value: return None if not value: value = cls.get_by_key_name(key) if value: MemcacheManager.set(cls._memcache_key(key), value) else: MemcacheManager.set(cls._memcache_key(key), NO_OBJECT) return value
def get_layout(cls): with Namespace(cls.TARGET_NAMESPACE): c = MemcacheManager.get(cls._memcache_key(), namespace=cls.TARGET_NAMESPACE) if c: return ExplorerLayoutDTO(c) c = ExplorerLayout.get_by_key_name(cls.KEY) if c: MemcacheManager.set(cls._memcache_key(), c, namespace=cls.TARGET_NAMESPACE) return ExplorerLayoutDTO(c) else: return ExplorerLayoutDTO(None)
def get(self, *args): (forum, siteroot, tmpldir) = forum_siteroot_tmpldir_from_url(self.request.path_info) if not forum or forum.is_disabled: return self.redirect(FORUMS_ROOT + "/") is_moderator = users.is_current_user_admin() if not is_moderator or forum.is_disabled: return self.redirect(siteroot) post_id = self.request.query_string #logging.info("PostDelUndel: post_id='%s'" % post_id) post = db.get(db.Key.from_path('Post', int(post_id))) if not post: logging.info("No post with post_id='%s'" % post_id) return self.redirect(siteroot) if post.forum.key() != forum.key(): loggin.info("post.forum.key().id() ('%s') != fourm.key().id() ('%s')" % (str(post.forum.key().id()), str(forum.key().id()))) return self.redirect(siteroot) path = self.request.path if path.endswith("/postdel"): if not post.is_deleted: post.is_deleted = True post.put() memcache.delete(rss_memcache_key(forum)) else: logging.info("Post '%s' is already deleted" % post_id) elif path.endswith("/postundel"): if post.is_deleted: post.is_deleted = False post.put() memcache.delete(rss_memcache_key(forum)) else: logging.info("Trying to undelete post '%s' that is not deleted" % post_id) else: logging.info("'%s' is not a valid path" % path) topic = post.topic # deleting/undeleting first post also means deleting/undeleting the whole topic first_post = Post.gql("WHERE forum=:1 AND topic = :2 ORDER BY created_on", forum, topic).get() if first_post.key() == post.key(): if path.endswith("/postdel"): topic.is_deleted = True else: topic.is_deleted = False topic.put() clear_topics_memcache(forum) # redirect to topic owning this post topic_url = siteroot + "topic?id=" + str(topic.key().id()) self.redirect(topic_url)
def is_xsrf_token_valid(cls, token, action): """Validate a given XSRF token by retrieving it from memcache.""" try: parts = token.split(cls.DELIMITER_PUBLIC) if not len(parts) == 2: raise Exception('Bad token format, expected: a/b.') issued_on = long(parts[0]) age = time.time() - issued_on if age > cls.XSRF_TOKEN_AGE_SECS: return False authentic_token = cls._create_token(action, issued_on) if authentic_token == token: return True return False except Exception: # pylint: disable-msg=broad-except return False user_str = cls._make_user_str() token_obj = MemcacheManager.get(token) if not token_obj: return False token_str, token_action = token_obj if user_str != token_str or action != token_action: return False return True
def test_memcache(self): self._create_role() actions.login(STUDENT_EMAIL) roles.Roles.is_user_allowed(self._get_course(), PERMISSION_MODULE, PERMISSION) mem_map = MemcacheManager.get(roles.Roles.memcache_key) self.assertIn(STUDENT_EMAIL, mem_map) self.assertIn(PERMISSION_MODULE.name, mem_map[STUDENT_EMAIL]) self.assertIn(PERMISSION, mem_map[STUDENT_EMAIL][PERMISSION_MODULE.name])
def get_teacher_by_email(cls, email): """Returns enrolled teacher or None.""" # ehiller - not sure if memcache check is in the right place, feel like we might want to do that after # checking datastore. this depends on what memcachemanager returns if a teacher hasn't been set there yet but # still actually exists. teacher = MemcacheManager.get(cls._memcache_key(email)) if NO_OBJECT == teacher: return None if not teacher: teacher = Teacher.get_by_email(email) if teacher: MemcacheManager.set(cls._memcache_key(email), teacher) else: MemcacheManager.set(cls._memcache_key(email), NO_OBJECT) if teacher: # ehiller - removed isEnrolled check, don't think we still need a teacher to be # enrolled to get their data back return teacher else: return None
def update_layout(cls, top_text=None, right_video_id=None, category_order=None): with Namespace(cls.TARGET_NAMESPACE): c = ExplorerLayout.get_by_key_name(cls.KEY) if not c: c = ExplorerLayout(key_name=cls.KEY) if top_text is not None: c.top_text = top_text if right_video_id is not None: c.right_video_id = right_video_id if category_order is not None: c.category_order = yaml.safe_dump(category_order) c.put() MemcacheManager.set(cls._memcache_key(), c, namespace=cls.TARGET_NAMESPACE)
def __init__(self, top_key=None, already_have=[]): self.top_key = top_key self.cache = { x.key(): x for x in already_have } self.prefetch_query = None self.new_keys_fetched = False if top_key: keys_to_prefetch = MemcacheManager.get(self._collection_key) if keys_to_prefetch: keys_to_prefetch = set(keys_to_prefetch).difference(map(str, self.cache.keys())) self.prefetch_query = db.get_async(keys_to_prefetch) else: self.new_keys_fetched = len(already_have) > 0
def __init__(self, top_key=None, already_have=[]): self.top_key = top_key self.cache = {x.key(): x for x in already_have} self.prefetch_query = None self.new_keys_fetched = False if top_key: keys_to_prefetch = MemcacheManager.get(self._collection_key) if keys_to_prefetch: keys_to_prefetch = set(keys_to_prefetch).difference( map(str, self.cache.keys())) self.prefetch_query = db.get_async(keys_to_prefetch) else: self.new_keys_fetched = len(already_have) > 0
def get_topics_for_forum(forum, is_moderator, off, count): off = int(off) key = topics_memcache_key(forum) topics = memcache.get(key) if not topics: q = Topic.gql("WHERE forum = :1 ORDER BY created_on DESC", forum) topics = q.fetch(1000) if topics: #memcache.set(key, topics) # TODO: should I pickle? pass # Not caching this for now :( if not topics: return (None, 0) if topics and not is_moderator: topics = [t for t in topics if not t.is_deleted] topics = topics[off:off+count] new_off = off + len(topics) if len(topics) < count: new_off = None # signal this is the last page return (topics, new_off)
def post(self, *args): if not self.personalize_page_and_get_enrolled(): return (forum, siteroot, tmpldir) = forum_siteroot_tmpldir_from_url(self.request.path_info) if not forum or forum.is_disabled: return self.redirect(FORUMS_ROOT + "/") if self.request.get('Cancel'): return self.redirect(siteroot) ip = get_remote_ip() if ip in BANNED_IPS: return self.redirect(siteroot) self.send_cookie() vals = ['TopicId', 'Subject', 'Message', ] (topic_id, subject, message) = req_get_vals(self.request, vals) message = to_unicode(message) tvals = { 'siteroot' : siteroot, 'forums' : db.GqlQuery("SELECT * FROM Forum where is_disabled = False").fetch(MAX_FORUMS), 'forum' : forum, "prevSubject" : subject, "prevMessage" : message, "log_in_out" : get_log_in_out(siteroot + "post") } # validate captcha and other values errclass = None if not message: errclass = "message_class" # first post must have subject if not topic_id and not subject: errclass = "subject_class" # sha.new() doesn't accept Unicode strings, so convert to utf8 first message_utf8 = message.encode('UTF-8') s = sha.new(message_utf8) sha1_digest = s.hexdigest() duppost = Post.gql("WHERE sha1_digest = :1", sha1_digest).get() if duppost: errclass = "message_class" if errclass: tvals[errclass] = "error" tmpl = os.path.join(tmpldir, "post.html") return self.template_out(tmpl, tvals) # get user either by google user id or cookie. Create user objects if don't # already exist existing_user = False user_id = users.get_current_user() if not user_id: self.redirect("/") return user = Student.get_enrolled_student_by_email(user_id.email()) if not user: self.redirect("/") return if not topic_id: topic = Topic(forum=forum, subject=subject, created_by=user.name) topic.put() else: topic = db.get(db.Key.from_path('Topic', int(topic_id))) #assert forum.key() == topic.forum.key() topic.ncomments += 1 topic.put() notify_addr = '*****@*****.**' if notify_addr: mail.send_mail(sender=notify_addr, to=notify_addr, subject="[booc-iu] New forum post", body=Markup("Topic: %s\n%s\n%s") % ( topic.subject, self.request.host_url + siteroot + "topic?id=" + str(topic.key().id()), message)) user_ip_str = get_remote_ip() if user.wiki_id: user_profile_link = '/wikiprofile?student=%d' % user.wiki_id else: user_profile_link = '' p = Post(topic=topic, forum=forum, user=user, user_ip=0, user_ip_str=user_ip_str, message=message, sha1_digest=sha1_digest, user_name = user.name, user_email = user_id.email(), user_homepage = user_profile_link) p.put() memcache.delete(rss_memcache_key(forum)) clear_topics_memcache(forum) if topic_id: self.redirect(siteroot + "topic?id=" + str(topic_id)) else: self.redirect(siteroot)
def delete(self): """Do the normal delete() and invalidate memcache.""" super(AnnouncementEntity, self).delete() MemcacheManager.delete(self.memcache_key)
def delete(self, *args, **kwargs): return MemcacheManager.delete(*args, **kwargs)
def get_activity_scores(cls, student_user_ids, course, force_refresh=True): """Retrieve activity data for student using EventEntity. For each student, launch a Query of EventEntities to retrieve student scores. The Query is launched as a map-reduce background process that will return up to 500 results, reporting back every second. It reports back by calling the map_fn callback, which in turn calls parse_activity scores. As soon as the Query is launched (in the background) the foreground process calls build_missing_scores() to construct a student_answer.dict that will be updated as score data for that student is received. Events properties include a userid (a number) and a source (e.g., tag-assessement), a recorded-on date (timestamp) and data (a dictionary). Here's a typeical data dict: {"loc": {"city": "mililani", "language": "en-US,en;q=0.8", "locale": "en_US", "country": "US", "region": "hi", "long": -158.01528099999999, "lat": 21.451331, "page_locale": "en_US"}, "instanceid": "yOkVTqWogdaF", "quid": "5733935958982656", "score": 1, "location": "https://mobilecsp-201608.appspot.com/mobilecsp/unit?unit=1&lesson=45", "answer": [0, 1, 2, 4], "type": "McQuestion", "user_agent": "Mozilla/5.0 ..."} Note that it includes the unit_id and lesson_id as part of the Url """ # Instantiate parser object cached_date = datetime.datetime.now() activityParser = ActivityScoreParser() if force_refresh: activityParser.params = activityParser.build_additional_mapper_params( course.app_context) # Launch a background Query for each student's activity data. This is expensive. for user_id in student_user_ids: # if GLOBAL_DEBUG: logging.debug('***RAM*** launching a query for student ' + str(user_id)) mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) # Callback function -- e.g., 45-50 callbacks per query def map_fn(activity_attempt): # if GLOBAL_DEBUG: # logging.debug('***RAM*** map_fn ' + str(activity_attempt)) activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) # In the foreground create the student_answer_dict, which is stored at: # activity_scores[student][unit][lesson][sequence] where sequence is # the question's sequential position within the lesson. # So each question in the lesson will have a question_answer_dict. activityParser.build_missing_scores() #Lets cache results for each student for user_id in student_user_ids: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data[ 'scores'] = activityParser.activity_scores.get( student.email, {}) cached_student_data[ 'attempts'] = activityParser.num_attempts_dict.get( student.email, {}) MemcacheManager.set( cls._memcache_key_for_student(student.email), cached_student_data) else: uncached_students = [] for student_id in student_user_ids: if student_id != '': student = Student.get_by_user_id(student_id) temp_email = student.email temp_mem = cls._memcache_key_for_student(temp_email) scores_for_student = MemcacheManager.get(temp_mem) if scores_for_student: cached_date = scores_for_student['date'] activityParser.activity_scores[ student_id] = scores_for_student['scores'] activityParser.num_attempts_dict[ student_id] = scores_for_student['scores'] else: uncached_students.append(student_id) if len(uncached_students) > 0: if cached_date == None or datetime.datetime.now( ) < cached_date: cached_date = datetime.datetime.now() activityParser.params = activityParser.build_additional_mapper_params( course.app_context) for user_id in uncached_students: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in uncached_students: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data[ 'scores'] = activityParser.activity_scores.get( student.email, {}) MemcacheManager.set( cls._memcache_key_for_student(student.email), cached_student_data) score_data = {} score_data['date'] = cached_date score_data['scores'] = activityParser.activity_scores score_data['attempts'] = activityParser.num_attempts_dict if GLOBAL_DEBUG: logging.debug('***RAM*** get_activity_scores returning scores: ' + str(score_data['scores'])) return score_data
def delete(self): """Do the normal delete() and also remove the object from memcache.""" super(ProgrammingAnswersEntity, self).delete() MemcacheManager.delete(self._memcache_key(self.key().name()))
def put(self): """Do the normal put() and also invalidate memcache.""" result = super(AnnouncementEntity, self).put() MemcacheManager.delete(self.memcache_key) return result
def delete(self): """Do the normal delete() and invalidate memcache.""" super(StudentAnswersEntity, self).delete() MemcacheManager.delete(self.memcache_key)
def incr(self, *args, **kwargs): return MemcacheManager.incr(*args, **kwargs)
def delete(self): """Do the normal delete() and also remove the object from memcache.""" super(Teacher, self).delete() MemcacheManager.delete(self._memcache_key(self.key().name()))
def put(self): """Do the normal put() and also add the object to memcache.""" result = super(Teacher, self).put() MemcacheManager.set(self._memcache_key(self.key().name()), self) return result
def set(self, *args, **kwargs): kwargs.setdefault('ttl', self.ttl) return MemcacheManager.set(*args, **kwargs)
def get_activity_scores(cls, student_user_ids, course, force_refresh = False): """Retrieve activity data for student using EventEntity""" #instantiate parser object cached_date = datetime.datetime.now() activityParser = ActivityScoreParser() if force_refresh: activityParser.params = activityParser.build_additional_mapper_params(course.app_context) for user_id in student_user_ids: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]), batch_size=500, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in student_user_ids: cached_student_data = {} cached_student_data['date'] = cached_date cached_student_data['scores'] = activityParser.activity_scores.get(Student.get_student_by_user_id( user_id).email, {}) MemcacheManager.set(cls._memcache_key_for_student(Student.get_student_by_user_id(user_id).email), cached_student_data) else: uncached_students = [] for student_id in student_user_ids: scores_for_student = MemcacheManager.get(cls._memcache_key_for_student(Student.get_student_by_user_id( student_id).email)) if scores_for_student: cached_date = scores_for_student['date'] activityParser.activity_scores[student_id] = scores_for_student['scores'] else: uncached_students.append(student_id) if len(uncached_students) > 0: if cached_date == None or datetime.datetime.now() < cached_date: cached_date = datetime.datetime.now() activityParser.params = activityParser.build_additional_mapper_params(course.app_context) for user_id in uncached_students: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]), batch_size=500, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in uncached_students: cached_student_data = {} cached_student_data['date'] = cached_date cached_student_data['scores'] = activityParser.activity_scores.get(Student.get_student_by_user_id( user_id).email, {}) MemcacheManager.set(cls._memcache_key_for_student(Student.get_student_by_user_id(user_id).email), cached_student_data) score_data = {} score_data['date'] = cached_date score_data['scores'] = activityParser.activity_scores return score_data
def get(self, *args, **kwargs): return MemcacheManager.get(*args, **kwargs)
def get_activity_scores(cls, student_user_ids, course, force_refresh = True): """Retrieve activity data for student using EventEntity. For each student, launch a Query of EventEntities to retrieve student scores. The Query is launched as a map-reduce background process that will return up to 500 results, reporting back every second. It reports back by calling the map_fn callback, which in turn calls parse_activity scores. As soon as the Query is launched (in the background) the foreground process calls build_missing_scores() to construct a student_answer.dict that will be updated as score data for that student is received. Events properties include a userid (a number) and a source (e.g., tag-assessement), a recorded-on date (timestamp) and data (a dictionary). Here's a typeical data dict: {"loc": {"city": "mililani", "language": "en-US,en;q=0.8", "locale": "en_US", "country": "US", "region": "hi", "long": -158.01528099999999, "lat": 21.451331, "page_locale": "en_US"}, "instanceid": "yOkVTqWogdaF", "quid": "5733935958982656", "score": 1, "location": "https://mobilecsp-201608.appspot.com/mobilecsp/unit?unit=1&lesson=45", "answer": [0, 1, 2, 4], "type": "McQuestion", "user_agent": "Mozilla/5.0 ..."} Note that it includes the unit_id and lesson_id as part of the Url """ # Instantiate parser object cached_date = datetime.datetime.now() activityParser = ActivityScoreParser() if force_refresh: activityParser.params = activityParser.build_additional_mapper_params(course.app_context) # Launch a background Query for each student's activity data. This is expensive. for user_id in student_user_ids: # if GLOBAL_DEBUG: # logging.debug('***RAM*** launching a query for student ' + str(user_id)) mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) # Callback function -- e.g., 45-50 callbacks per query def map_fn(activity_attempt): # if GLOBAL_DEBUG: # logging.debug('***RAM*** map_fn ' + str(activity_attempt)) activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) # In the foreground create the student_answer_dict, which is stored at: # activity_scores[student][unit][lesson][sequence] where sequence is # the question's sequential position within the lesson. # So each question in the lesson will have a question_answer_dict. activityParser.build_missing_scores() #Lets cache results for each student for user_id in student_user_ids: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data['scores'] = activityParser.activity_scores.get(student.email, {}) cached_student_data['attempts'] = activityParser.num_attempts_dict.get(student.email, {}) MemcacheManager.set(cls._memcache_key_for_student(student.email),cached_student_data) else: uncached_students = [] for student_id in student_user_ids: if student_id != '': student = Student.get_by_user_id(student_id) temp_email = student.email temp_mem = cls._memcache_key_for_student(temp_email) scores_for_student = MemcacheManager.get(temp_mem) if scores_for_student: cached_date = scores_for_student['date'] activityParser.activity_scores[student_id] = scores_for_student['scores'] activityParser.num_attempts_dict[student_id] = scores_for_student['scores'] else: uncached_students.append(student_id) if len(uncached_students) > 0: if cached_date == None or datetime.datetime.now() < cached_date: cached_date = datetime.datetime.now() activityParser.params = activityParser.build_additional_mapper_params(course.app_context) for user_id in uncached_students: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in uncached_students: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data['scores'] = activityParser.activity_scores.get(student.email, {}) MemcacheManager.set(cls._memcache_key_for_student(student.email),cached_student_data) score_data = {} score_data['date'] = cached_date score_data['scores'] = activityParser.activity_scores score_data['attempts'] = activityParser.num_attempts_dict if GLOBAL_DEBUG: logging.debug('***RAM*** get_activity_scores returning scores: ' + str(score_data['scores'])) return score_data
def get_sections(cls, allow_cached=True): sections = MemcacheManager.get(cls.memcache_key) if not allow_cached or sections is None: sections = CourseSectionEntity.all().order('-date').fetch(1000) MemcacheManager.set(cls.memcache_key, sections) return sections
def delete(self): """Do the normal delete() and invalidate memcache.""" super(CourseSectionEntity, self).delete() MemcacheManager.delete(self.memcache_key)
def done(self): if self.new_keys_fetched: MemcacheManager.set( self._collection_key, list(str(x) for x in self.cache.keys()), ttl=60*60*48)
def clear_forums_memcache(): memcache.delete(FORUMS_MEMCACHE_KEY)
def clear_topics_memcache(forum): memcache.delete(topics_memcache_key(forum))
def done(self): if self.new_keys_fetched: MemcacheManager.set(self._collection_key, list(str(x) for x in self.cache.keys()), ttl=60 * 60 * 48)
def put(self): """Do the normal put() and also add the object to memcache.""" result = super(ProgrammingAnswersEntity, self).put() MemcacheManager.set(self._memcache_key(self.key().name()), self) return result