def get_page(cls, page_name, content_lambda): """Get page from cache or create page on demand.""" content = MemcacheManager.get(page_name) if not content: logging.info('Cache miss: ' + page_name) content = content_lambda() MemcacheManager.set(page_name, content) return content
def get_announcements(cls, allow_cached=True): items = MemcacheManager.get(cls.memcache_key) if not allow_cached or items is None: items = AnnouncementEntity.all().order('-date').fetch(1000) # TODO(psimakov): prepare to exceed 1MB max item size # read more here: http://stackoverflow.com # /questions/5081502/memcache-1-mb-limit-in-google-app-engine MemcacheManager.set(cls.memcache_key, items) return items
def get_announcements(cls, allow_cached=True): items = MemcacheManager.get(cls.memcache_key) if not allow_cached or items is None: items = AnnouncementEntity.all().order('-date').fetch(1000) # TODO(psimakov): prepare to exceed 1MB max item size # read more here: http://stackoverflow.com # /questions/5081502/memcache-1-mb-limit-in-google-app-engine MemcacheManager.set(cls.memcache_key, items) return items
def get(cls, student, unit_id): """Loads programming answer entity.""" key = cls._create_key(student.user_id, unit_id) value = MemcacheManager.get(cls._memcache_key(key)) if NO_OBJECT == value: return None if not value: value = cls.get_by_key_name(key) if value: MemcacheManager.set(cls._memcache_key(key), value) else: MemcacheManager.set(cls._memcache_key(key), NO_OBJECT) return value
def get_forum_by_url(forumurl): # number of forums is small, so we cache all of them forums = memcache.get(FORUMS_MEMCACHE_KEY) if not forums: forums = Forum.all().fetch(200) # this effectively limits number of forums to 200 if not forums: return None memcache.set(FORUMS_MEMCACHE_KEY, forums) for forum in forums: if forumurl == forum.url: return forum return None
def get_layout(cls): with Namespace(cls.TARGET_NAMESPACE): c = MemcacheManager.get(cls._memcache_key(), namespace=cls.TARGET_NAMESPACE) if c: return ExplorerLayoutDTO(c) c = ExplorerLayout.get_by_key_name(cls.KEY) if c: MemcacheManager.set(cls._memcache_key(), c, namespace=cls.TARGET_NAMESPACE) return ExplorerLayoutDTO(c) else: return ExplorerLayoutDTO(None)
def update_layout(cls, top_text=None, right_video_id=None, category_order=None): with Namespace(cls.TARGET_NAMESPACE): c = ExplorerLayout.get_by_key_name(cls.KEY) if not c: c = ExplorerLayout(key_name=cls.KEY) if top_text is not None: c.top_text = top_text if right_video_id is not None: c.right_video_id = right_video_id if category_order is not None: c.category_order = yaml.safe_dump(category_order) c.put() MemcacheManager.set(cls._memcache_key(), c, namespace=cls.TARGET_NAMESPACE)
def get_teacher_by_email(cls, email): """Returns enrolled teacher or None.""" # ehiller - not sure if memcache check is in the right place, feel like we might want to do that after # checking datastore. this depends on what memcachemanager returns if a teacher hasn't been set there yet but # still actually exists. teacher = MemcacheManager.get(cls._memcache_key(email)) if NO_OBJECT == teacher: return None if not teacher: teacher = Teacher.get_by_email(email) if teacher: MemcacheManager.set(cls._memcache_key(email), teacher) else: MemcacheManager.set(cls._memcache_key(email), NO_OBJECT) if teacher: # ehiller - removed isEnrolled check, don't think we still need a teacher to be # enrolled to get their data back return teacher else: return None
def get_activity_scores(cls, student_user_ids, course, force_refresh=True): """Retrieve activity data for student using EventEntity. For each student, launch a Query of EventEntities to retrieve student scores. The Query is launched as a map-reduce background process that will return up to 500 results, reporting back every second. It reports back by calling the map_fn callback, which in turn calls parse_activity scores. As soon as the Query is launched (in the background) the foreground process calls build_missing_scores() to construct a student_answer.dict that will be updated as score data for that student is received. Events properties include a userid (a number) and a source (e.g., tag-assessement), a recorded-on date (timestamp) and data (a dictionary). Here's a typeical data dict: {"loc": {"city": "mililani", "language": "en-US,en;q=0.8", "locale": "en_US", "country": "US", "region": "hi", "long": -158.01528099999999, "lat": 21.451331, "page_locale": "en_US"}, "instanceid": "yOkVTqWogdaF", "quid": "5733935958982656", "score": 1, "location": "https://mobilecsp-201608.appspot.com/mobilecsp/unit?unit=1&lesson=45", "answer": [0, 1, 2, 4], "type": "McQuestion", "user_agent": "Mozilla/5.0 ..."} Note that it includes the unit_id and lesson_id as part of the Url """ # Instantiate parser object cached_date = datetime.datetime.now() activityParser = ActivityScoreParser() if force_refresh: activityParser.params = activityParser.build_additional_mapper_params( course.app_context) # Launch a background Query for each student's activity data. This is expensive. for user_id in student_user_ids: # if GLOBAL_DEBUG: logging.debug('***RAM*** launching a query for student ' + str(user_id)) mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) # Callback function -- e.g., 45-50 callbacks per query def map_fn(activity_attempt): # if GLOBAL_DEBUG: # logging.debug('***RAM*** map_fn ' + str(activity_attempt)) activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) # In the foreground create the student_answer_dict, which is stored at: # activity_scores[student][unit][lesson][sequence] where sequence is # the question's sequential position within the lesson. # So each question in the lesson will have a question_answer_dict. activityParser.build_missing_scores() #Lets cache results for each student for user_id in student_user_ids: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data[ 'scores'] = activityParser.activity_scores.get( student.email, {}) cached_student_data[ 'attempts'] = activityParser.num_attempts_dict.get( student.email, {}) MemcacheManager.set( cls._memcache_key_for_student(student.email), cached_student_data) else: uncached_students = [] for student_id in student_user_ids: if student_id != '': student = Student.get_by_user_id(student_id) temp_email = student.email temp_mem = cls._memcache_key_for_student(temp_email) scores_for_student = MemcacheManager.get(temp_mem) if scores_for_student: cached_date = scores_for_student['date'] activityParser.activity_scores[ student_id] = scores_for_student['scores'] activityParser.num_attempts_dict[ student_id] = scores_for_student['scores'] else: uncached_students.append(student_id) if len(uncached_students) > 0: if cached_date == None or datetime.datetime.now( ) < cached_date: cached_date = datetime.datetime.now() activityParser.params = activityParser.build_additional_mapper_params( course.app_context) for user_id in uncached_students: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in uncached_students: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data[ 'scores'] = activityParser.activity_scores.get( student.email, {}) MemcacheManager.set( cls._memcache_key_for_student(student.email), cached_student_data) score_data = {} score_data['date'] = cached_date score_data['scores'] = activityParser.activity_scores score_data['attempts'] = activityParser.num_attempts_dict if GLOBAL_DEBUG: logging.debug('***RAM*** get_activity_scores returning scores: ' + str(score_data['scores'])) return score_data
def get_sections(cls, allow_cached=True): sections = MemcacheManager.get(cls.memcache_key) if not allow_cached or sections is None: sections = CourseSectionEntity.all().order('-date').fetch(1000) MemcacheManager.set(cls.memcache_key, sections) return sections
def get_activity_scores(cls, student_user_ids, course, force_refresh = False): """Retrieve activity data for student using EventEntity""" #instantiate parser object cached_date = datetime.datetime.now() activityParser = ActivityScoreParser() if force_refresh: activityParser.params = activityParser.build_additional_mapper_params(course.app_context) for user_id in student_user_ids: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]), batch_size=500, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in student_user_ids: cached_student_data = {} cached_student_data['date'] = cached_date cached_student_data['scores'] = activityParser.activity_scores.get(Student.get_student_by_user_id( user_id).email, {}) MemcacheManager.set(cls._memcache_key_for_student(Student.get_student_by_user_id(user_id).email), cached_student_data) else: uncached_students = [] for student_id in student_user_ids: scores_for_student = MemcacheManager.get(cls._memcache_key_for_student(Student.get_student_by_user_id( student_id).email)) if scores_for_student: cached_date = scores_for_student['date'] activityParser.activity_scores[student_id] = scores_for_student['scores'] else: uncached_students.append(student_id) if len(uncached_students) > 0: if cached_date == None or datetime.datetime.now() < cached_date: cached_date = datetime.datetime.now() activityParser.params = activityParser.build_additional_mapper_params(course.app_context) for user_id in uncached_students: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]), batch_size=500, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in uncached_students: cached_student_data = {} cached_student_data['date'] = cached_date cached_student_data['scores'] = activityParser.activity_scores.get(Student.get_student_by_user_id( user_id).email, {}) MemcacheManager.set(cls._memcache_key_for_student(Student.get_student_by_user_id(user_id).email), cached_student_data) score_data = {} score_data['date'] = cached_date score_data['scores'] = activityParser.activity_scores return score_data
def get_activity_scores(cls, student_user_ids, course, force_refresh = True): """Retrieve activity data for student using EventEntity. For each student, launch a Query of EventEntities to retrieve student scores. The Query is launched as a map-reduce background process that will return up to 500 results, reporting back every second. It reports back by calling the map_fn callback, which in turn calls parse_activity scores. As soon as the Query is launched (in the background) the foreground process calls build_missing_scores() to construct a student_answer.dict that will be updated as score data for that student is received. Events properties include a userid (a number) and a source (e.g., tag-assessement), a recorded-on date (timestamp) and data (a dictionary). Here's a typeical data dict: {"loc": {"city": "mililani", "language": "en-US,en;q=0.8", "locale": "en_US", "country": "US", "region": "hi", "long": -158.01528099999999, "lat": 21.451331, "page_locale": "en_US"}, "instanceid": "yOkVTqWogdaF", "quid": "5733935958982656", "score": 1, "location": "https://mobilecsp-201608.appspot.com/mobilecsp/unit?unit=1&lesson=45", "answer": [0, 1, 2, 4], "type": "McQuestion", "user_agent": "Mozilla/5.0 ..."} Note that it includes the unit_id and lesson_id as part of the Url """ # Instantiate parser object cached_date = datetime.datetime.now() activityParser = ActivityScoreParser() if force_refresh: activityParser.params = activityParser.build_additional_mapper_params(course.app_context) # Launch a background Query for each student's activity data. This is expensive. for user_id in student_user_ids: # if GLOBAL_DEBUG: # logging.debug('***RAM*** launching a query for student ' + str(user_id)) mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) # Callback function -- e.g., 45-50 callbacks per query def map_fn(activity_attempt): # if GLOBAL_DEBUG: # logging.debug('***RAM*** map_fn ' + str(activity_attempt)) activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) # In the foreground create the student_answer_dict, which is stored at: # activity_scores[student][unit][lesson][sequence] where sequence is # the question's sequential position within the lesson. # So each question in the lesson will have a question_answer_dict. activityParser.build_missing_scores() #Lets cache results for each student for user_id in student_user_ids: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data['scores'] = activityParser.activity_scores.get(student.email, {}) cached_student_data['attempts'] = activityParser.num_attempts_dict.get(student.email, {}) MemcacheManager.set(cls._memcache_key_for_student(student.email),cached_student_data) else: uncached_students = [] for student_id in student_user_ids: if student_id != '': student = Student.get_by_user_id(student_id) temp_email = student.email temp_mem = cls._memcache_key_for_student(temp_email) scores_for_student = MemcacheManager.get(temp_mem) if scores_for_student: cached_date = scores_for_student['date'] activityParser.activity_scores[student_id] = scores_for_student['scores'] activityParser.num_attempts_dict[student_id] = scores_for_student['scores'] else: uncached_students.append(student_id) if len(uncached_students) > 0: if cached_date == None or datetime.datetime.now() < cached_date: cached_date = datetime.datetime.now() activityParser.params = activityParser.build_additional_mapper_params(course.app_context) for user_id in uncached_students: mapper = models_utils.QueryMapper( EventEntity.all().filter('user_id in', [user_id]) \ .filter('recorded_on >= ', cls.CUTOFF_DATE), \ batch_size=1000, report_every=1000) def map_fn(activity_attempt): activityParser.parse_activity_scores(activity_attempt) mapper.run(map_fn) activityParser.build_missing_scores() #Lets cache results for each student for user_id in uncached_students: cached_student_data = {} cached_student_data['date'] = cached_date student = Student.get_by_user_id(user_id) cached_student_data['scores'] = activityParser.activity_scores.get(student.email, {}) MemcacheManager.set(cls._memcache_key_for_student(student.email),cached_student_data) score_data = {} score_data['date'] = cached_date score_data['scores'] = activityParser.activity_scores score_data['attempts'] = activityParser.num_attempts_dict if GLOBAL_DEBUG: logging.debug('***RAM*** get_activity_scores returning scores: ' + str(score_data['scores'])) return score_data
def put(self): """Do the normal put() and also add the object to memcache.""" result = super(ProgrammingAnswersEntity, self).put() MemcacheManager.set(self._memcache_key(self.key().name()), self) return result
def set(self, *args, **kwargs): kwargs.setdefault('ttl', self.ttl) return MemcacheManager.set(*args, **kwargs)
def done(self): if self.new_keys_fetched: MemcacheManager.set(self._collection_key, list(str(x) for x in self.cache.keys()), ttl=60 * 60 * 48)
def set(self, *args, **kwargs): kwargs.setdefault('ttl', self.ttl) return MemcacheManager.set(*args, **kwargs)
def done(self): if self.new_keys_fetched: MemcacheManager.set( self._collection_key, list(str(x) for x in self.cache.keys()), ttl=60*60*48)
def put(self): """Do the normal put() and also add the object to memcache.""" result = super(Teacher, self).put() MemcacheManager.set(self._memcache_key(self.key().name()), self) return result
def get_sections(cls, allow_cached=True): sections = MemcacheManager.get(cls.memcache_key) if not allow_cached or sections is None: sections = CourseSectionEntity.all().order('-date').fetch(1000) MemcacheManager.set(cls.memcache_key, sections) return sections