def post(self): upload_files = self.get_uploads("cover_image") id = self.request.get("tc_id") tc = TrainingCentre.get_by_id(long(id)) redirect_url = self.request.get("continue").encode('ascii', 'ignore') if upload_files is not None and len(upload_files) > 0: blob_info = upload_files[0] tc.cover = blob_info.key() tc.put() mc_delete(cache_keys.get_trainingcentre_cache_key(long(id))) logger.info('Cover image link: ' + images.get_serving_url(tc.cover)) return self.redirect(redirect_url)
def post(self): upload_files = self.get_uploads("cover_image") id = self.request.get("pg_id") pg = Playground.get_by_id(long(id)) redirect_url = self.request.get("continue").encode('ascii', 'ignore') if upload_files is not None and len(upload_files) > 0: blob_info = upload_files[0] pg.cover = blob_info.key() pg.put() mc_delete(cache_keys.get_playground_cache_key(long(id))) logger.info('Cover image link: ' + images.get_serving_url(pg.cover)) return self.redirect(redirect_url)
def status_change(self, match, user_info): key = None if match is not None: if match.key is not None: match.updated_by = user_info.key # change the status from current status if match.status == 2: match.status = 1 #deactive status else: match.status = 2 #active status key = match.put() if key is not None: mc_delete(cache_keys.get_match_cache_key(key.id())) return key
def split_keys(self, arr, size): arrs = [] num = 0 while len(arr) > size: pice = arr[:size] num += 1 id = 'page'+str(num) logger.info("Search Key Id: %s " % id) logger.debug("Search Keys: " + str(pice)) prev_data = self.get_search_page(id) logger.debug('mc_prev_data: ' + str(prev_data)) del_data = mc_delete(cache_keys.get_search_page_cache_key(id)) logger.debug('mc_del_data: ' + str(del_data)) final_data = mc_wrap(cache_keys.get_search_page_cache_key(id), ENTITY_CACHE_EXPIRATION, lambda x: pice) logger.debug('mc_final_data: ' + str(final_data)) arrs.append(pice) arr = arr[size:] arrs.append(arr) return arrs
def persist(self, user, user_info): key = None if user is not None: curr_user = None if user.key is not None: curr_user = self.get_record(user.key.id()) #If entry exists for the same user, udpate the data if curr_user is not None: self.copy_user_model(curr_user, user) curr_user.updated_by = user_info.key key = curr_user.put() logger.debug("User persisted in datastore, %s " % key) if key is not None: #TODO: Make sure all the caches that has this entity is deleted here mc_delete(cache_keys.get_user_cache_key(key.id())) return key
def persist(self, locality, user_info): key = None if locality is not None: curr_locality = None if locality.key is not None: curr_locality = self.get_record(locality.key.id()) #If entry exists for the same locality, udpate the data if curr_locality is not None: self.copy_locality_model(curr_locality, locality) curr_locality.updated_by = user_info.key key = curr_locality.put() else: #create a new locality locality.created_by = user_info.key locality.updated_by = user_info.key key = locality.put() if key is not None: mc_delete(cache_keys.get_locality_cache_key(key.id())) return key
def search_index_suggest(self, user_ip, status, **params): logger.debug('Suggest Search Param Results: ' + str(params)) #locality = params['address.locality'] #TODO: Hardcoded for testing. To be made generic. suggest_playgrounds = [] if status != 'all': status_value = STATUS_DICT.get(status) logger.debug('Status %d ' % (status_value)) query_str = 'status:'+str(status_value) for key, value in params.items(): if key == 'latlong' and value is not None: query_str += ' AND distance(latlong, geopoint('+str(value)+')) < 5000' if '.' in key and value is not None: struct, attr = key.split('.') if struct == 'address': if attr == 'locality': query_str += ' NOT locality:'+str(value) try: index = search.Index(PLAYGROUND) sortopts = search.SortOptions(expressions=[ search.SortExpression(expression='name', direction='ASCENDING')]) search_query = search.Query( query_string=query_str, options=search.QueryOptions( limit=PAGE_SIZE, sort_options=sortopts)) search_results = index.search(search_query) #logger.debug('Suggest Search Result:' + str(search_results)) except search.Error: logger.exception("NdbPlaygroundDao:: Search query failed for suggest playgrounds") #Retrieve the doc_id from the search results and then use that to query the datastore to fetch the entity keys = [] for doc in search_results: keys.append(ndb.Key(Playground, long(doc.doc_id))) #suggest_playgrounds = ndb.get_multi(keys) #return suggest_playgrounds cache_id = 'suggest_'+str(PLAYGROUND)+'_'+str(user_ip) get_keys = mc_get(cache_keys.get_suggest_keys_cache_key(cache_id)) if get_keys is not None: del_keys = mc_delete(cache_keys.get_suggest_keys_cache_key(cache_id)) add_keys = mc_wrap(cache_keys.get_suggest_keys_cache_key(cache_id), ENTITY_CACHE_EXPIRATION, lambda x: keys) logger.info('No of Suggest Playground Added to cache : %s' % len(add_keys)) return keys
def status_change(self, playground, user_info): key = None if playground is not None: if playground.key is not None: playground.updated_by = user_info.key # change the status from current status if user_has_role(user_info, 'admin'): if playground.status == 1: playground.status += 1 #active status elif playground.status == 2: playground.status -= 1 #deactive status else: if playground.status == 0: playground.status += 1 #enable status else: playground.status -= playground.status #disable status key = playground.put() if key is not None: #update the search index self.update_search_index(key.id(), playground) mc_delete(cache_keys.get_playground_cache_key(key.id())) return key
def persist(self, register, user_info): key = None if register is not None: curr_register = None if register.key is not None: curr_register = self.get_record(register.key.id()) #If entry exists for the same registration, udpate the data if curr_register is not None: self.copy_register_model(curr_register, register) curr_register.updated_by = user_info.key key = curr_register.put() else: # create a new registration register.created_by = user_info.key register.updated_by = user_info.key register.status = 0 #pending_creation status key = register.put() logger.debug("Register persisted in datastore, %s " % key) if key is not None: #TODO: Make sure all the caches that has this entity is deleted here mc_delete(cache_keys.get_register_cache_key(key.id())) return key
def persist(self, event, user_info): key = None if event is not None: curr_event = None if event.key is not None: curr_event = self.get_record(event.key.id()) #If entry exists for the same event, udpate the data if curr_event is not None: self.copy_event_model(curr_event, event) event.parent_event_id = None curr_event.updated_by = user_info.key # change the status to pending approval after every edit, unless the current user is an admin if not user_has_role(user_info, 'admin'): curr_event.status = 1 #pending_approval status if not user_info.key in curr_event.owners: curr_event.owners.append(user_info.key) key = curr_event.put() else: # create a new event event.created_by = user_info.key event.updated_by = user_info.key event.parent_event_id = None if not user_has_role(user_info, 'admin'): event.owners = [] event.owners.append(user_info.key) event.status = 0 #pending_creation status else: event.status = 1 #pending_approval status key = event.put() if key is not None: #update the search index self.update_search_index(key.id(), event) #TODO: Make sure all the caches that has this entity is deleted here mc_delete(cache_keys.get_event_cache_key(key.id())) mc_delete(cache_keys.get_recent_event_cache_key()) mc_delete(cache_keys.get_ongoing_event_cache_key()) mc_delete(cache_keys.get_ongoing_future_event_cache_key()) mc_delete(cache_keys.get_upcoming_event_cache_key()) if event.featured: mc_delete(cache_keys.get_featured_event_cache_key()) return key
def status_change(self, event, user_info): key = None if event is not None: if event.key is not None: event.updated_by = user_info.key # change the status from current status if user_has_role(user_info, 'admin'): if event.status == 1: event.status += 1 #active status elif event.status == 2: event.status -= 1 #deactive status else: if event.status == 0: event.status += 1 #enable status else: event.status -= event.status #disable status key = event.put() if key is not None: #update the search index self.update_search_index(key.id(), event) #TODO: Make sure all the caches that has this entity is deleted here mc_delete(cache_keys.get_event_cache_key(key.id())) mc_delete(cache_keys.get_recent_event_cache_key()) mc_delete(cache_keys.get_ongoing_event_cache_key()) mc_delete(cache_keys.get_ongoing_future_event_cache_key()) mc_delete(cache_keys.get_upcoming_event_cache_key()) if event.featured: mc_delete(cache_keys.get_featured_event_cache_key()) return key