def get(self): event_shortname = self.request.get("shortname") page = self.request.get("page") page_int = int(page) if event_shortname == None: event_shortname = "sandy" event = None events = event_db.GetAllCached() for e in events: if e.short_name == event_shortname: event = e ids = [] where_string = "Open" q = None if event.short_name != 'moore': gql_string = 'SELECT * FROM Site WHERE status >= :1 and event = :2' q = db.GqlQuery(gql_string, where_string, event.key()) else: q = Query(model_class=site_db.Site) q.filter("event =", event.key()) q.is_keys_only() q.filter("status >= ", "Open") this_offset = page_int * PAGE_OFFSET ids = [ key.key().id() for key in q.fetch(PAGE_OFFSET, offset=this_offset) ] this_offset = page_int * PAGE_OFFSET ids = [ key.key().id() for key in q.fetch(PAGE_OFFSET, offset=this_offset) ] def public_site_filter(site): # site as dict return { 'event': site['event'], 'id': site['id'], 'case_number': site['case_number'], 'work_type': site['work_type'], 'claimed_by': site['claimed_by'], 'status': site['status'], 'floors_affected': site.get('floors_affected'), 'blurred_latitude': site.get('blurred_latitude'), 'blurred_longitude': site.get('blurred_longitude'), } output = json.dumps([ public_site_filter(s[1]) for s in site_db.GetAllCached(event, ids) ], default=dthandler) self.response.out.write(output)
def get(self): event_shortname = self.request.get("shortname") page = self.request.get("page") page_int = int(page) if event_shortname == None: event_shortname = "sandy" event = None events = event_db.GetAllCached() for e in events: if e.short_name == event_shortname: event = e ids = [] where_string = "Open" q = None if event.short_name != 'moore': gql_string = 'SELECT * FROM Site WHERE status >= :1 and event = :2' q = db.GqlQuery(gql_string, where_string, event.key()) else: q = Query(model_class = site_db.Site) q.filter("event =", event.key()) q.is_keys_only() q.filter("status >= ", "Open") this_offset = page_int * PAGE_OFFSET ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)] this_offset = page_int * PAGE_OFFSET ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)] def public_site_filter(site): # site as dict return { 'event': site['event'], 'id': site['id'], 'case_number': site['case_number'], 'work_type': site['work_type'], 'claimed_by': site['claimed_by'], 'status': site['status'], 'floors_affected': site.get('floors_affected'), 'blurred_latitude': site.get('blurred_latitude'), 'blurred_longitude': site.get('blurred_longitude'), } output = json.dumps( [public_site_filter(s[1]) for s in site_db.GetAllCached(event, ids)], default=dthandler ) self.response.out.write(output)
def mail_queue_expander(request): BATCH_SIZE=5 edition=db.get(request.form['edition']) if not edition: pass page=int(request.form.get('page',0)) subscriber_q=Query(subscriptions.models.Subscription, keys_only=True).filter('site =', edition.site).filter('active =', True) if request.form.has_key('cursor'): subscriber_q=subscriber_q.with_cursor(request.form['cursor']) subscribers=subscriber_q.fetch(BATCH_SIZE) if not subscribers: edition.status='complete' edition.put() return task=Task(params={'edition':edition.key(), 'cursor': subscriber_q.cursor(), 'page':page+1}, name="%s-%s-%s-%s" %(edition.site.slug, edition.issue_num,edition.publish_after.strftime("%Y%j%H%M-%S"), page+1) ) try: MailQueueExpander.add(task) except (TaskAlreadyExistsError, TombstonedTaskError): raise for sub in subscribers: def _tx(): pending_email=PendingEmail(subscription=sub, edition=edition) db.put(pending_email) SendNewsletter.add(Task(params={'pending_email':pending_email.key()}), transactional=True) db.run_in_transaction_custom_retries(10,_tx)
def migrate_sources(request): try: set_namespace('') old_site=db.get(db.Key(request.POST.get('old_site'))) q=Query(ICalendarSource, namespace='').filter('site = ',old_site) old_sources=q.fetch(1000) set_namespace(request.POST.get('new_namespace')) new_site=db.get(db.Key(request.POST.get('new_site'))) for old_source in old_sources: if old_source.submitted_by: old_source.submitted_by=Profile.all().filter('slug =', old_source.submitted_by.slug).get() if old_source.approved_by: old_source.approved_by=Profile.all().filter('slug =', old_source.approved_by.slug).get() new_source=clone_source(old_source, key_name=old_source.slug) new_source.site=new_site new_source.put() #old_source.delete() taskqueue.add(url='/admin/migrate-events/', params={'new_namespace':request.POST.get('new_namespace'), 'old_site':old_site.key(), 'new_site':new_site.key(), },) except Exception,e: logging.error("%s in \n%s"% (traceback.format_exc(),str(request.POST)))
def migrate_profiles(request): try: set_namespace('') old_site=db.get(db.Key(request.POST.get('old_site'))) q=Query(Profile, namespace='').filter('site = ',old_site) old_profiles=q.fetch(1000) set_namespace(request.POST.get('new_namespace')) new_site=db.get(db.Key(request.POST.get('new_site'))) for old_profile in old_profiles: new_profile=clone_entity(old_profile, key_name=old_profile.key().name()) new_profile.site=new_site new_profile.put() #old_profile.delete() taskqueue.add(url='/admin/migrate-sources/', params={'new_namespace':request.POST.get('new_namespace'), 'old_site':old_site.key(), 'new_site':new_site.key(), },) except Exception,e: logging.error("%s in \n%s"% (traceback.format_exc(),str(request.POST)))
def migrate_profiles(request): try: set_namespace('') old_site = db.get(db.Key(request.POST.get('old_site'))) q = Query(Profile, namespace='').filter('site = ', old_site) old_profiles = q.fetch(1000) set_namespace(request.POST.get('new_namespace')) new_site = db.get(db.Key(request.POST.get('new_site'))) for old_profile in old_profiles: new_profile = clone_entity(old_profile, key_name=old_profile.key().name()) new_profile.site = new_site new_profile.put() #old_profile.delete() taskqueue.add( url='/admin/migrate-sources/', params={ 'new_namespace': request.POST.get('new_namespace'), 'old_site': old_site.key(), 'new_site': new_site.key(), }, ) except Exception, e: logging.error("%s in \n%s" % (traceback.format_exc(), str(request.POST)))
def migrate_events(request): try: if request.method == 'POST': set_namespace('') logging.warning("namespace: %s" % get_namespace()) cursor = request.POST.get('cursor') old_site = db.get(db.Key(request.POST.get('old_site'))) logging.warning("old site: %s" % old_site) #q=Event.all().filter('site =', old_site) q = Query(Event, namespace='').filter('site = ', old_site) if cursor: q = q.with_cursor(cursor) events = q.fetch(1) logging.warning(events) set_namespace(request.POST.get('new_namespace')) new_site = db.get(db.Key(request.POST.get('new_site'))) if events: taskqueue.add( url='/admin/migrate-events/', params={ 'new_namespace': request.POST.get('new_namespace'), 'old_site': old_site.key(), 'new_site': new_site.key(), 'cursor': q.cursor() }, ) for event in events: event.site = new_site #new_event.site=new_site if event.source: event.source = ICalendarSource.all().filter( 'slug =', event.source.slug).get() if event.submitted_by: event.submitted_by = Profile.all().filter( 'slug =', event.submitted_by.slug).get() if event.approved_by: event.approved_by = Profile.all().filter( 'slug =', event.approved_by.slug).get() new_event = clone_event(event, key_name=event.key().name()) #event.delete() new_event.put() except Exception, e: logging.error("%s in \n%s" % (traceback.format_exc(), str(request.POST)))
def get(self): # get city id city_id = self.request.get('city','') try: city_id = int(city_id) except: city_id = 0 if city_id == 0: # no city? --> show city list citylist = {} cities = City.all() for city in cities: citylist[city.Name] = [city.Code,abs(hash(city.Name))] #----generate parameter list---------------------------------------------------------------------- template_values = { 'citylist' : citylist, 'stationlist' : None } path = os.path.join(os.path.dirname(__file__), './/template//citylist_all.html') #----end------------------------------------------------------------------------------------------ self.response.out.write(template.render(path,template_values)) else: # show station plots stationlist = {} query = Query(City) query.filter('Code =', city_id) query.run() city = query.get() for station in city.station_set: data = [] query = Query(AQIData) query.filter('Station =', station.Code) query.order('-Date') query.run() aqi = query.fetch(None) for entry in aqi: data.append("['%s',%d]" % (str(entry.Date),entry.AQI)) stationlist[station.Name] = ','.join(data) #----generate parameter list---------------------------------------------------------------------- template_values = { 'citylist' : None, 'stationlist' : stationlist } path = os.path.join(os.path.dirname(__file__), './/template//citylist_all.html') #----end------------------------------------------------------------------------------------------ self.response.out.write(template.render(path,template_values))
def migrate_events(request): try: if request.method == 'POST': set_namespace('') logging.warning("namespace: %s" % get_namespace()) cursor=request.POST.get('cursor') old_site=db.get(db.Key(request.POST.get('old_site'))) logging.warning("old site: %s" % old_site) #q=Event.all().filter('site =', old_site) q=Query(Event, namespace='').filter('site = ',old_site) if cursor: q=q.with_cursor(cursor) events= q.fetch(1) logging.warning(events) set_namespace(request.POST.get('new_namespace')) new_site=db.get(db.Key(request.POST.get('new_site'))) if events: taskqueue.add(url='/admin/migrate-events/', params={'new_namespace':request.POST.get('new_namespace'), 'old_site':old_site.key(), 'new_site':new_site.key(), 'cursor':q.cursor() },) for event in events: event.site=new_site #new_event.site=new_site if event.source: event.source=ICalendarSource.all().filter('slug =', event.source.slug).get() if event.submitted_by: event.submitted_by=Profile.all().filter('slug =', event.submitted_by.slug).get() if event.approved_by: event.approved_by=Profile.all().filter('slug =', event.approved_by.slug).get() new_event= clone_event(event, key_name=event.key().name()) #event.delete() new_event.put() except Exception,e: logging.error("%s in \n%s"% (traceback.format_exc(),str(request.POST)))
def mail_queue_expander(request): BATCH_SIZE = 5 edition = db.get(request.form['edition']) if not edition: pass page = int(request.form.get('page', 0)) subscriber_q = Query(subscriptions.models.Subscription, keys_only=True).filter('site =', edition.site).filter( 'active =', True) if request.form.has_key('cursor'): subscriber_q = subscriber_q.with_cursor(request.form['cursor']) subscribers = subscriber_q.fetch(BATCH_SIZE) if not subscribers: edition.status = 'complete' edition.put() return task = Task(params={ 'edition': edition.key(), 'cursor': subscriber_q.cursor(), 'page': page + 1 }, name="%s-%s-%s-%s" % (edition.site.slug, edition.issue_num, edition.publish_after.strftime("%Y%j%H%M-%S"), page + 1)) try: MailQueueExpander.add(task) except (TaskAlreadyExistsError, TombstonedTaskError): raise for sub in subscribers: def _tx(): pending_email = PendingEmail(subscription=sub, edition=edition) db.put(pending_email) SendNewsletter.add( Task(params={'pending_email': pending_email.key()}), transactional=True) db.run_in_transaction_custom_retries(10, _tx)
def migrate_sources(request): try: set_namespace('') old_site = db.get(db.Key(request.POST.get('old_site'))) q = Query(ICalendarSource, namespace='').filter('site = ', old_site) old_sources = q.fetch(1000) set_namespace(request.POST.get('new_namespace')) new_site = db.get(db.Key(request.POST.get('new_site'))) for old_source in old_sources: if old_source.submitted_by: old_source.submitted_by = Profile.all().filter( 'slug =', old_source.submitted_by.slug).get() if old_source.approved_by: old_source.approved_by = Profile.all().filter( 'slug =', old_source.approved_by.slug).get() new_source = clone_source(old_source, key_name=old_source.slug) new_source.site = new_site new_source.put() #old_source.delete() taskqueue.add( url='/admin/migrate-events/', params={ 'new_namespace': request.POST.get('new_namespace'), 'old_site': old_site.key(), 'new_site': new_site.key(), }, ) except Exception, e: logging.error("%s in \n%s" % (traceback.format_exc(), str(request.POST)))
def get_resume_state(self, key_value): my_query = Query() my_query.filter('__key__=', key_value) return my_query.fetch(100)
def profile_for_user(user, keys_only=False): q=Query(Profile, keys_only=keys_only).filter('user =', user) return q.fetch(1)[0]
def profile_for_user(user, keys_only=False): q = Query(Profile, keys_only=keys_only).filter('user =', user) return q.fetch(1)[0]
def AuthenticatedGet(self, org, event): id_param = self.request.get('id') latitude_param = self.request.get("latitude") longitude_param = self.request.get("longitude") if latitude_param and longitude_param: try: latitude = float(latitude_param) longitude = float(longitude_param) except: self.response.set_status(404) json_array = [] for site in site_db.Site.gql( 'Where latitude = :1 and longitude = :2 and event = :3', latitude, longitude, event.key()): json_string = json.dumps({ "id": site.key().id(), "address": site.address, }) json_array.append(json_string) self.response.out.write( json.dumps(json_array, default = dthandler)) return if id_param == "all": status = self.request.get("status", default_value = "") page = self.request.get("page", default_value = "0") page_int = int(page) logging.debug("page = " + page) #query_string = "SELECT * FROM Site WHERE event = :event_key LIMIT %s OFFSET %s" % (PAGE_OFFSET, page_int * PAGE_OFFSET) ##logging.debug("OFFSET = " + PAGE_OFFSET) ##logging.debug("page * OFFSET = " + page_int * PAGE_OFFSET) #query = db.GqlQuery(query_string, event_key = event.key()) q = Query(model_class = site_db.Site) ids = [] #filter by event q.filter("event =", event.key()) q.is_keys_only() if status == "open": logging.debug("status == open") q.filter("status >= ", "Open") elif status == "closed": q.filter("status < ", "Open") logging.debug("status == closed") logging.debug("status = " + status) #query = q.fetch(PAGE_OFFSET, offset = page_int * PAGE_OFFSET) #for q in query: #ids.append(q.key().id()) this_offset = page_int * PAGE_OFFSET logging.debug("this_offset = " + str(this_offset)) ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)] logging.debug("ids len = " + str(len(ids))) output = json.dumps( [s[1] for s in site_db.GetAllCached(event, ids)], default=dthandler) logging.info("after output") self.response.out.write(output) return #if id_param == "all": #county = self.request.get("county", default_value = "all") #status = self.request.get("status", default_value = "") #q = Query(model_class = site_db.Site, keys_only = True) ##filter by event #q.filter("event =", event.key()) #if status == "open": #q.filter("status >= ", "Open") #elif status == "closed": #q.filter("status < ", "Open") #if county != "all": #q.filter("county =", county) #ids = [key.id() for key in q.run(batch_size = 2000)] #output = json.dumps( #[s[1] for s in site_db.GetAllCached(event, ids)], #default=dthandler) #self.response.out.write(output) #return try: id = int(id_param) except: self.response.set_status(404) return site = site_db.GetAndCache(id) if not site: self.response.set_status(404) return # TODO(jeremy): Add the various fixes for Flash # and other vulnerabilities caused by having user-generated # content in JSON strings, by setting this as an attachment # and prepending the proper garbage strings. # Javascript security is really a pain. self.response.out.write( json.dumps(site_db.SiteToDict(site), default = dthandler))
def AuthenticatedGet(self, org, event): id_param = self.request.get("id") latitude_param = self.request.get("latitude") longitude_param = self.request.get("longitude") if latitude_param and longitude_param: try: latitude = float(latitude_param) longitude = float(longitude_param) except: self.response.set_status(404) json_array = [] for site in site_db.Site.gql( "Where latitude = :1 and longitude = :2 and event = :3", latitude, longitude, event.key() ): json_string = json.dumps({"id": site.key().id(), "address": site.address}) json_array.append(json_string) self.response.out.write(json.dumps(json_array, default=dthandler)) return if id_param == "all": status = self.request.get("status", default_value="") page = self.request.get("page", default_value="0") page_int = int(page) logging.debug("page = " + page) # query_string = "SELECT * FROM Site WHERE event = :event_key LIMIT %s OFFSET %s" % (PAGE_OFFSET, page_int * PAGE_OFFSET) ##logging.debug("OFFSET = " + PAGE_OFFSET) ##logging.debug("page * OFFSET = " + page_int * PAGE_OFFSET) # query = db.GqlQuery(query_string, event_key = event.key()) q = Query(model_class=site_db.Site) ids = [] # filter by event q.filter("event =", event.key()) q.is_keys_only() if status == "open": logging.debug("status == open") q.filter("status >= ", "Open") elif status == "closed": q.filter("status < ", "Open") logging.debug("status == closed") logging.debug("status = " + status) # query = q.fetch(PAGE_OFFSET, offset = page_int * PAGE_OFFSET) # for q in query: # ids.append(q.key().id()) this_offset = page_int * PAGE_OFFSET logging.debug("this_offset = " + str(this_offset)) ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset=this_offset)] logging.debug("ids len = " + str(len(ids))) output = json.dumps([s[1] for s in site_db.GetAllCached(event, ids)], default=dthandler) self.response.out.write(output) return # if id_param == "all": # county = self.request.get("county", default_value = "all") # status = self.request.get("status", default_value = "") # q = Query(model_class = site_db.Site, keys_only = True) ##filter by event # q.filter("event =", event.key()) # if status == "open": # q.filter("status >= ", "Open") # elif status == "closed": # q.filter("status < ", "Open") # if county != "all": # q.filter("county =", county) # ids = [key.id() for key in q.run(batch_size = 2000)] # output = json.dumps( # [s[1] for s in site_db.GetAllCached(event, ids)], # default=dthandler) # self.response.out.write(output) # return try: id = int(id_param) except: self.response.set_status(404) return site = site_db.GetAndCache(id) if not site: self.response.set_status(404) return # TODO(jeremy): Add the various fixes for Flash # and other vulnerabilities caused by having user-generated # content in JSON strings, by setting this as an attachment # and prepending the proper garbage strings. # Javascript security is really a pain. self.response.out.write(json.dumps(site_db.SiteToDict(site), default=dthandler))