def is_xsrf_token_valid(cls, token, action): """Validate a given XSRF token by retrieving it from memcache.""" try: parts = token.split(cls.DELIMITER_PUBLIC) if not len(parts) == 2: raise Exception('Bad token format, expected: a/b.') issued_on = long(parts[0]) age = time.time() - issued_on if age > cls.XSRF_TOKEN_AGE_SECS: return False authentic_token = cls._create_token(action, issued_on) if authentic_token == token: return True return False except Exception: # pylint: disable-msg=broad-except return False user_str = cls._make_user_str() token_obj = MemcacheManager.get(token) if not token_obj: return False token_str, token_action = token_obj if user_str != token_str or action != token_action: return False return True
def is_xsrf_token_valid(cls, token, action): """Validate a given XSRF token by retrieving it from memcache.""" try: parts = token.split(cls.DELIMITER_PUBLIC) if not len(parts) == 2: raise Exception('Bad token format, expected: a/b.') issued_on = long(parts[0]) age = time.time() - issued_on if age > cls.XSRF_TOKEN_AGE_SECS: return False authentic_token = cls._create_token(action, issued_on) if authentic_token == token: return True return False except Exception: # pylint: disable-msg=broad-except return False user_str = cls._make_user_str() token_obj = MemcacheManager.get(token) if not token_obj: return False token_str, token_action = token_obj if user_str != token_str or action != token_action: return False return True
def get_page(cls, page_name, content_lambda): """Get page from cache or create page on demand.""" content = MemcacheManager.get(page_name) if not content: logging.info('Cache miss: ' + page_name) content = content_lambda() MemcacheManager.set(page_name, content) return content
def test_memcache(self): self._create_role() actions.login(STUDENT_EMAIL) roles.Roles.is_user_allowed(self._get_course(), PERMISSION_MODULE, PERMISSION) mem_map = MemcacheManager.get(roles.Roles.memcache_key) self.assertIn(STUDENT_EMAIL, mem_map) self.assertIn(PERMISSION_MODULE.name, mem_map[STUDENT_EMAIL]) self.assertIn(PERMISSION, mem_map[STUDENT_EMAIL][PERMISSION_MODULE.name])
def get_announcements(cls, allow_cached=True): items = MemcacheManager.get(cls.memcache_key) if not allow_cached or items is None: items = AnnouncementEntity.all().order('-date').fetch(1000) # TODO(psimakov): prepare to exceed 1MB max item size # read more here: http://stackoverflow.com # /questions/5081502/memcache-1-mb-limit-in-google-app-engine MemcacheManager.set(cls.memcache_key, items) return items
def test_memcache(self): self._create_role() actions.login(STUDENT_EMAIL) roles.Roles.is_user_allowed(self._get_course(), PERMISSION_MODULE, PERMISSION) mem_map = MemcacheManager.get(roles.Roles.memcache_key) self.assertIn(STUDENT_EMAIL, mem_map) self.assertIn(PERMISSION_MODULE.name, mem_map[STUDENT_EMAIL]) self.assertIn(PERMISSION, mem_map[STUDENT_EMAIL][PERMISSION_MODULE.name])
def get_announcements(cls, allow_cached=True): items = MemcacheManager.get(cls.memcache_key) if not allow_cached or items is None: items = AnnouncementEntity.all().order('-date').fetch(1000) # TODO(psimakov): prepare to exceed 1MB max item size # read more here: http://stackoverflow.com # /questions/5081502/memcache-1-mb-limit-in-google-app-engine MemcacheManager.set(cls.memcache_key, items) return items
def __init__(self, top_key=None, already_have=[]): self.top_key = top_key self.cache = { x.key(): x for x in already_have } self.prefetch_query = None self.new_keys_fetched = False if top_key: keys_to_prefetch = MemcacheManager.get(self._collection_key) if keys_to_prefetch: keys_to_prefetch = set(keys_to_prefetch).difference(map(str, self.cache.keys())) self.prefetch_query = db.get_async(keys_to_prefetch) else: self.new_keys_fetched = len(already_have) > 0
def get_forum_by_url(forumurl): # number of forums is small, so we cache all of them forums = memcache.get(FORUMS_MEMCACHE_KEY) if not forums: forums = Forum.all().fetch(200) # this effectively limits number of forums to 200 if not forums: return None memcache.set(FORUMS_MEMCACHE_KEY, forums) for forum in forums: if forumurl == forum.url: return forum return None
def __init__(self, top_key=None, already_have=[]): self.top_key = top_key self.cache = {x.key(): x for x in already_have} self.prefetch_query = None self.new_keys_fetched = False if top_key: keys_to_prefetch = MemcacheManager.get(self._collection_key) if keys_to_prefetch: keys_to_prefetch = set(keys_to_prefetch).difference( map(str, self.cache.keys())) self.prefetch_query = db.get_async(keys_to_prefetch) else: self.new_keys_fetched = len(already_have) > 0
def get(cls, student, unit_id): """Loads programming answer entity.""" key = cls._create_key(student.user_id, unit_id) value = MemcacheManager.get(cls._memcache_key(key)) if NO_OBJECT == value: return None if not value: value = cls.get_by_key_name(key) if value: MemcacheManager.set(cls._memcache_key(key), value) else: MemcacheManager.set(cls._memcache_key(key), NO_OBJECT) return value
def get_layout(cls): with Namespace(cls.TARGET_NAMESPACE): c = MemcacheManager.get(cls._memcache_key(), namespace=cls.TARGET_NAMESPACE) if c: return ExplorerLayoutDTO(c) c = ExplorerLayout.get_by_key_name(cls.KEY) if c: MemcacheManager.set(cls._memcache_key(), c, namespace=cls.TARGET_NAMESPACE) return ExplorerLayoutDTO(c) else: return ExplorerLayoutDTO(None)
def get_topics_for_forum(forum, is_moderator, off, count): off = int(off) key = topics_memcache_key(forum) topics = memcache.get(key) if not topics: q = Topic.gql("WHERE forum = :1 ORDER BY created_on DESC", forum) topics = q.fetch(1000) if topics: #memcache.set(key, topics) # TODO: should I pickle? pass # Not caching this for now :( if not topics: return (None, 0) if topics and not is_moderator: topics = [t for t in topics if not t.is_deleted] topics = topics[off:off+count] new_off = off + len(topics) if len(topics) < count: new_off = None # signal this is the last page return (topics, new_off)
def get_teacher_by_email(cls, email): """Returns enrolled teacher or None.""" # ehiller - not sure if memcache check is in the right place, feel like we might want to do that after # checking datastore. this depends on what memcachemanager returns if a teacher hasn't been set there yet but # still actually exists. teacher = MemcacheManager.get(cls._memcache_key(email)) if NO_OBJECT == teacher: return None if not teacher: teacher = Teacher.get_by_email(email) if teacher: MemcacheManager.set(cls._memcache_key(email), teacher) else: MemcacheManager.set(cls._memcache_key(email), NO_OBJECT) if teacher: # ehiller - removed isEnrolled check, don't think we still need a teacher to be # enrolled to get their data back return teacher else: return None
def get_activity_scores(cls, student_user_ids, course, force_refresh=True): """Retrieve activity data for student using EventEntity. For each student, launch a Query of EventEntities to retrieve student scores. The Query is launched as a map-reduce background process that will return up to 500 results, reporting back every second. It reports back by calling the map_fn callback, which in turn calls parse_activity scores. As soon as the Query is launched (in the background) the foreground process calls build_missing_scores() to construct a student_answer.dict that will be updated as score data for that student is received. Events properties include a userid (a number) and a source (e.g., tag-assessement), a recorded-on date (timestamp) and data (a dictionary). Here's a typeical data dict: {"loc": {"city": "mililani", "language": "en-US,en;q=0.8", "locale": "en_US", "country": "US", "region": "hi", "long": -158.01528099999999, "lat": 21.451331, "page_locale": "en_US"}, "instanceid": "yOkVTqWogdaF", "quid": "5733935958982656", "score": 1, "location": "https://mobilecsp-201608.appspot.com/mobilecsp/unit?unit=1&lesson=45", "answer": [0, 1, 2, 4], "type": "McQuestion", "user_agent": "Mozilla/5.0 ..."} Note that it includes the unit_id and lesson_id as part of the Url """ # Instantiate parser object cached_date = datetime.datetime.now() activityParser = ActivityScoreParser() if force_refresh: activityParser.params = activityParser.build_additional_mapper_params( course.app_context) # Launch a background Query for each student's activity data. This is expensive. for user_id in student_user_ids: # if GLOBAL_DEBUG: logging.debug('***RAM*** launching a query for student ' + str(user_id)) mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) # Callback function -- e.g., 45-50 callbacks per query def map_fn(activity_attempt): # if GLOBAL_DEBUG: # logging.debug('***RAM*** map_fn ' + str(activity_attempt)) activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) # In the foreground create the student_answer_dict, which is stored at: # activity_scores[student][unit][lesson][sequence] where sequence is # the question's sequential position within the lesson. # So each question in the lesson will have a question_answer_dict. activityParser.build_missing_scores() #Lets cache results for each student for user_id in student_user_ids: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data[ 'scores'] = activityParser.activity_scores.get( student.email, {}) cached_student_data[ 'attempts'] = activityParser.num_attempts_dict.get( student.email, {}) MemcacheManager.set( cls._memcache_key_for_student(student.email), cached_student_data) else: uncached_students = [] for student_id in student_user_ids: if student_id != '': student = Student.get_by_user_id(student_id) temp_email = student.email temp_mem = cls._memcache_key_for_student(temp_email) scores_for_student = MemcacheManager.get(temp_mem) if scores_for_student: cached_date = scores_for_student['date'] activityParser.activity_scores[ student_id] = scores_for_student['scores'] activityParser.num_attempts_dict[ student_id] = scores_for_student['scores'] else: uncached_students.append(student_id) if len(uncached_students) > 0: if cached_date == None or datetime.datetime.now( ) < cached_date: cached_date = datetime.datetime.now() activityParser.params = activityParser.build_additional_mapper_params( course.app_context) for user_id in uncached_students: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in uncached_students: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data[ 'scores'] = activityParser.activity_scores.get( student.email, {}) MemcacheManager.set( cls._memcache_key_for_student(student.email), cached_student_data) score_data = {} score_data['date'] = cached_date score_data['scores'] = activityParser.activity_scores score_data['attempts'] = activityParser.num_attempts_dict if GLOBAL_DEBUG: logging.debug('***RAM*** get_activity_scores returning scores: ' + str(score_data['scores'])) return score_data
def get_sections(cls, allow_cached=True): sections = MemcacheManager.get(cls.memcache_key) if not allow_cached or sections is None: sections = CourseSectionEntity.all().order('-date').fetch(1000) MemcacheManager.set(cls.memcache_key, sections) return sections
def get_activity_scores(cls, student_user_ids, course, force_refresh = True): """Retrieve activity data for student using EventEntity. For each student, launch a Query of EventEntities to retrieve student scores. The Query is launched as a map-reduce background process that will return up to 500 results, reporting back every second. It reports back by calling the map_fn callback, which in turn calls parse_activity scores. As soon as the Query is launched (in the background) the foreground process calls build_missing_scores() to construct a student_answer.dict that will be updated as score data for that student is received. Events properties include a userid (a number) and a source (e.g., tag-assessement), a recorded-on date (timestamp) and data (a dictionary). Here's a typeical data dict: {"loc": {"city": "mililani", "language": "en-US,en;q=0.8", "locale": "en_US", "country": "US", "region": "hi", "long": -158.01528099999999, "lat": 21.451331, "page_locale": "en_US"}, "instanceid": "yOkVTqWogdaF", "quid": "5733935958982656", "score": 1, "location": "https://mobilecsp-201608.appspot.com/mobilecsp/unit?unit=1&lesson=45", "answer": [0, 1, 2, 4], "type": "McQuestion", "user_agent": "Mozilla/5.0 ..."} Note that it includes the unit_id and lesson_id as part of the Url """ # Instantiate parser object cached_date = datetime.datetime.now() activityParser = ActivityScoreParser() if force_refresh: activityParser.params = activityParser.build_additional_mapper_params(course.app_context) # Launch a background Query for each student's activity data. This is expensive. for user_id in student_user_ids: # if GLOBAL_DEBUG: # logging.debug('***RAM*** launching a query for student ' + str(user_id)) mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) # Callback function -- e.g., 45-50 callbacks per query def map_fn(activity_attempt): # if GLOBAL_DEBUG: # logging.debug('***RAM*** map_fn ' + str(activity_attempt)) activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) # In the foreground create the student_answer_dict, which is stored at: # activity_scores[student][unit][lesson][sequence] where sequence is # the question's sequential position within the lesson. # So each question in the lesson will have a question_answer_dict. activityParser.build_missing_scores() #Lets cache results for each student for user_id in student_user_ids: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data['scores'] = activityParser.activity_scores.get(student.email, {}) cached_student_data['attempts'] = activityParser.num_attempts_dict.get(student.email, {}) MemcacheManager.set(cls._memcache_key_for_student(student.email),cached_student_data) else: uncached_students = [] for student_id in student_user_ids: if student_id != '': student = Student.get_by_user_id(student_id) temp_email = student.email temp_mem = cls._memcache_key_for_student(temp_email) scores_for_student = MemcacheManager.get(temp_mem) if scores_for_student: cached_date = scores_for_student['date'] activityParser.activity_scores[student_id] = scores_for_student['scores'] activityParser.num_attempts_dict[student_id] = scores_for_student['scores'] else: uncached_students.append(student_id) if len(uncached_students) > 0: if cached_date == None or datetime.datetime.now() < cached_date: cached_date = datetime.datetime.now() activityParser.params = activityParser.build_additional_mapper_params(course.app_context) for user_id in uncached_students: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in uncached_students: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data['scores'] = activityParser.activity_scores.get(student.email, {}) MemcacheManager.set(cls._memcache_key_for_student(student.email),cached_student_data) score_data = {} score_data['date'] = cached_date score_data['scores'] = activityParser.activity_scores score_data['attempts'] = activityParser.num_attempts_dict if GLOBAL_DEBUG: logging.debug('***RAM*** get_activity_scores returning scores: ' + str(score_data['scores'])) return score_data
def get_activity_scores(cls, student_user_ids, course, force_refresh = False): """Retrieve activity data for student using EventEntity""" #instantiate parser object cached_date = datetime.datetime.now() activityParser = ActivityScoreParser() if force_refresh: activityParser.params = activityParser.build_additional_mapper_params(course.app_context) for user_id in student_user_ids: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]), batch_size=500, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in student_user_ids: cached_student_data = {} cached_student_data['date'] = cached_date cached_student_data['scores'] = activityParser.activity_scores.get(Student.get_student_by_user_id( user_id).email, {}) MemcacheManager.set(cls._memcache_key_for_student(Student.get_student_by_user_id(user_id).email), cached_student_data) else: uncached_students = [] for student_id in student_user_ids: scores_for_student = MemcacheManager.get(cls._memcache_key_for_student(Student.get_student_by_user_id( student_id).email)) if scores_for_student: cached_date = scores_for_student['date'] activityParser.activity_scores[student_id] = scores_for_student['scores'] else: uncached_students.append(student_id) if len(uncached_students) > 0: if cached_date == None or datetime.datetime.now() < cached_date: cached_date = datetime.datetime.now() activityParser.params = activityParser.build_additional_mapper_params(course.app_context) for user_id in uncached_students: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]), batch_size=500, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in uncached_students: cached_student_data = {} cached_student_data['date'] = cached_date cached_student_data['scores'] = activityParser.activity_scores.get(Student.get_student_by_user_id( user_id).email, {}) MemcacheManager.set(cls._memcache_key_for_student(Student.get_student_by_user_id(user_id).email), cached_student_data) score_data = {} score_data['date'] = cached_date score_data['scores'] = activityParser.activity_scores return score_data
def get_sections(cls, allow_cached=True): sections = MemcacheManager.get(cls.memcache_key) if not allow_cached or sections is None: sections = CourseSectionEntity.all().order('-date').fetch(1000) MemcacheManager.set(cls.memcache_key, sections) return sections
def get(self, *args, **kwargs): return MemcacheManager.get(*args, **kwargs)
def get(self, *args, **kwargs): return MemcacheManager.get(*args, **kwargs)