def ordersEnRoute(): """ return all the orders that are currently enroute """ q = Query(Order) q.filter("state =", "enRoute") return q
def idleOrders(): """ Returns a list of all orders waiting for couriers """ q = Query(Order) q.filter("state =", 'needPickup') return q
def get(self): # List Cities citylist = {} cities = City.all() for city in cities: citylist[city.Name] = {0:abs(hash(city.Name)),1:[]} for station in city.station_set: data = {'Name':station.Name} query = Query(AQIData) query.filter('Station =', station.Code) query.order('-Date') query.run() aqi = query.get() data['AQI'] = aqi.AQI data['Level'] = aqi.AQILevel data['Assess'] = aqi.AQIAssess data['Majority'] = aqi.Majority data['Date'] = aqi.Date citylist[city.Name][1].append(data) # logging.info(str(citylist)) #----generate parameter list---------------------------------------------------------------------- template_values = { 'citylist' : citylist, } path = os.path.join(os.path.dirname(__file__), './/template//citylist.html') #----end------------------------------------------------------------------------------------------ self.response.out.write(template.render(path,template_values))
def idleOrders(): """ Returns a list of all orders waiting for couriers """ q = Query(Order) q.filter("state =",'needPickup') return q
def get(self): event_shortname = self.request.get("shortname") page = self.request.get("page") page_int = int(page) if event_shortname == None: event_shortname = "sandy" event = None events = event_db.GetAllCached() for e in events: if e.short_name == event_shortname: event = e ids = [] where_string = "Open" q = None if event.short_name != 'moore': gql_string = 'SELECT * FROM Site WHERE status >= :1 and event = :2' q = db.GqlQuery(gql_string, where_string, event.key()) else: q = Query(model_class=site_db.Site) q.filter("event =", event.key()) q.is_keys_only() q.filter("status >= ", "Open") this_offset = page_int * PAGE_OFFSET ids = [ key.key().id() for key in q.fetch(PAGE_OFFSET, offset=this_offset) ] this_offset = page_int * PAGE_OFFSET ids = [ key.key().id() for key in q.fetch(PAGE_OFFSET, offset=this_offset) ] def public_site_filter(site): # site as dict return { 'event': site['event'], 'id': site['id'], 'case_number': site['case_number'], 'work_type': site['work_type'], 'claimed_by': site['claimed_by'], 'status': site['status'], 'floors_affected': site.get('floors_affected'), 'blurred_latitude': site.get('blurred_latitude'), 'blurred_longitude': site.get('blurred_longitude'), } output = json.dumps([ public_site_filter(s[1]) for s in site_db.GetAllCached(event, ids) ], default=dthandler) self.response.out.write(output)
def availableCouriers(): """ DEPRECATED Returns all available couriers. Search for all courier whose online =True """ q = Query(Courier) q.filter("online =", True) return q
def BatchSitesFromIds(event): logging.debug("BatchSitesFromIds") """Given a string of ids, like "1,2,3", returns corresponding Site objects. If comma_separated_ids is empty, returns all sites. """ q = Query(model_class=site_db.Site) q.filter('event = ', event) sites = q.run(limit=1000, offset=4000) return sites
def get(self): event_shortname = self.request.get("shortname") page = self.request.get("page") page_int = int(page) if event_shortname == None: event_shortname = "sandy" event = None events = event_db.GetAllCached() for e in events: if e.short_name == event_shortname: event = e ids = [] where_string = "Open" q = None if event.short_name != 'moore': gql_string = 'SELECT * FROM Site WHERE status >= :1 and event = :2' q = db.GqlQuery(gql_string, where_string, event.key()) else: q = Query(model_class = site_db.Site) q.filter("event =", event.key()) q.is_keys_only() q.filter("status >= ", "Open") this_offset = page_int * PAGE_OFFSET ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)] this_offset = page_int * PAGE_OFFSET ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)] def public_site_filter(site): # site as dict return { 'event': site['event'], 'id': site['id'], 'case_number': site['case_number'], 'work_type': site['work_type'], 'claimed_by': site['claimed_by'], 'status': site['status'], 'floors_affected': site.get('floors_affected'), 'blurred_latitude': site.get('blurred_latitude'), 'blurred_longitude': site.get('blurred_longitude'), } output = json.dumps( [public_site_filter(s[1]) for s in site_db.GetAllCached(event, ids)], default=dthandler ) self.response.out.write(output)
def BatchSitesFromIds(event): logging.debug("BatchSitesFromIds") """Given a string of ids, like "1,2,3", returns corresponding Site objects. If comma_separated_ids is empty, returns all sites. """ q = Query(model_class = site_db.Site) q.filter('event = ', event) sites = q.run(limit=1000, offset = 4000) return sites
def couriersIdEnRoute(): """ Returns a list of courierIds of all the couriers who are currently on delivery given a list of all orders currently enRoute. enrouteOrder - a Query() object of all the orders whose state = 'enRoute' """ couriers = [] q = Query(Order, projection=["courierId"]) q.filter("state =", "enRoute") for order in q: couriers.append(order.courierId) return couriers
def couriersIdEnRoute(): """ Returns a list of courierIds of all the couriers who are currently on delivery given a list of all orders currently enRoute. enrouteOrder - a Query() object of all the orders whose state = 'enRoute' """ couriers = [] q = Query(Order,projection=["courierId"]) q.filter("state =", "enRoute") for order in q: couriers.append(order.courierId) return couriers
def testQuery(self): courier = Courier(courierId=1,lat=2.0,lon=3.0,online=True) courier.put() courier = Courier(courierId=2,lat=12.0,lon=13.0,online=False) courier.put() courier = Courier(courierId=14,lat=2.0,lon=3.0,online=True) courier.put() courier = Courier(courierId=51,lat=2.0,lon=3.0,online=True) courier.put() q = Query(Courier) q.filter("online = ", True) for courier in q: self.assertNotEqual(2, courier.courierId)
def available2(): """ Returns all couriers who are currently available. If GQL had a 'NOT IN' function, we would use the following: q = Query(Courier) c = couriersEnRoute() q.filter("courierId not in", c) return q However, it currently does not, we have to do a workaround """ availId = availableCourierId() q = Query(Courier) q.filter("courierId in ", availId) return q
def get(self): # get city id city_id = self.request.get('city','') try: city_id = int(city_id) except: city_id = 0 if city_id == 0: # no city? --> show city list citylist = {} cities = City.all() for city in cities: citylist[city.Name] = [city.Code,abs(hash(city.Name))] #----generate parameter list---------------------------------------------------------------------- template_values = { 'citylist' : citylist, 'stationlist' : None } path = os.path.join(os.path.dirname(__file__), './/template//citylist_all.html') #----end------------------------------------------------------------------------------------------ self.response.out.write(template.render(path,template_values)) else: # show station plots stationlist = {} query = Query(City) query.filter('Code =', city_id) query.run() city = query.get() for station in city.station_set: data = [] query = Query(AQIData) query.filter('Station =', station.Code) query.order('-Date') query.run() aqi = query.fetch(None) for entry in aqi: data.append("['%s',%d]" % (str(entry.Date),entry.AQI)) stationlist[station.Name] = ','.join(data) #----generate parameter list---------------------------------------------------------------------- template_values = { 'citylist' : None, 'stationlist' : stationlist } path = os.path.join(os.path.dirname(__file__), './/template//citylist_all.html') #----end------------------------------------------------------------------------------------------ self.response.out.write(template.render(path,template_values))
def get_children(self, child_class, **kwargs): """ Get all of the children of this model @param child_class: The child class to look for children in @type child_class: Any model instance @return: Iterator over this model's children @rtype: Iterator """ query = Query(child_class) query.ancestor(self) for arg in kwargs.items(): query.filter(arg(0) + " =", arg(1)) # TODO: Ensure only immediate children return query
def GetAllCached(event, ids = None): if ids == None: q = Query(model_class = Site, keys_only = True) q.filter("event =", event) ids = [key.id() for key in q.run(batch_size = 2000)] lookup_ids = [str(id) for id in ids] cache_results = memcache.get_multi(lookup_ids, key_prefix = cache_prefix) not_found = [id for id in ids if not str(id) in cache_results.keys()] data_store_results = [] orgs = dict([(o.key(), o) for o in organization.GetAllCached()]) events = dict([(e.key(), e) for e in event_db.GetAllCached()]) if len(not_found): data_store_results = [(site, SiteToDict(site)) for site in GetSitesAndSetReferences(not_found, events, orgs)] memcache.set_multi(dict([(str(site[0].key().id()), site) for site in data_store_results]), key_prefix = cache_prefix, time = cache_time) sites = cache_results.values() + data_store_results return sites
def GetAllCached(event, ids = None): if ids == None: if cache_ids: cache_key_for_ids = "SiteDictIds:" + event.key().id() + ":" + county ids = memcache.get(cache_key_for_ids) if not ids: # Retrieve all matching keys. As a keys_only scan, # This should be more efficient than a full data scan. q = Query(model_class = Site, keys_only = True) q.filter("event =", event) ids = [key.id() for key in q] # Cache these for up to six minutes. # TODO(Jeremy): This may do more harm than # good, depending on how often # people reload the map. memcache.set(cache_key_for_ids, ids, time = 360) else: q = Query(model_class = Site, keys_only = True) q.filter("event =", event) ids = [key.id() for key in q.run(batch_size = 2000)] lookup_ids = [str(id) for id in ids] cache_results = memcache.get_multi(lookup_ids, key_prefix = cache_prefix) not_found = [id for id in ids if not str(id) in cache_results.keys()] data_store_results = [] orgs = dict([(o.key(), o) for o in organization.GetAllCached()]) events = dict([(e.key(), e) for e in event_db.GetAllCached()]) if len(not_found): data_store_results = [(site, SiteToDict(site)) for site in GetSitesAndSetReferences(not_found, events, orgs)] memcache.set_multi(dict([(str(site[0].key().id()), site) for site in data_store_results]), key_prefix = cache_prefix, time = cache_time) sites = cache_results.values() + data_store_results return sites
def GetAllCached(event, ids=None): if ids == None: q = Query(model_class=Site, keys_only=True) q.filter("event =", event) ids = [key.id() for key in q.run(batch_size=2000)] lookup_ids = [str(id) for id in ids] cache_results = memcache.get_multi(lookup_ids, key_prefix=cache_prefix) not_found = [id for id in ids if not str(id) in cache_results.keys()] data_store_results = [] orgs = dict([(o.key(), o) for o in organization.GetAllCached()]) events = dict([(e.key(), e) for e in event_db.GetAllCached()]) if len(not_found): data_store_results = [ (site, SiteToDict(site)) for site in GetSitesAndSetReferences(not_found, events, orgs) ] memcache.set_multi(dict([(str(site[0].key().id()), site) for site in data_store_results]), key_prefix=cache_prefix, time=cache_time) sites = cache_results.values() + data_store_results return sites
def check_and_write_row(row_d): """ Check and save @row_d, return True if ok and False if failed. """ row_acceptable = bool(row_d['Date Password Provided']) if row_acceptable: # get org query = Query(model_class=Organization) query.filter('name = ', row_d['ORGANIZATION']) org = query.get() if org: try: # write new contact new_contact = Contact( first_name=row_d['First Name'], last_name=row_d['Last Name'], email=row_d['E-MAIL'], phone=row_d['PHONE #'], organization=org, ) new_contact.save() return True except BadValueError, e: pass
def __do_get(self): arguments = self.arguments() # TODO: Limit query by __limit__ argument # Try to determine the project_id project = self.__get_project() # See if we can short-cut by looking up using an ID if GAEController.INSTANCE_ID_PARAM in arguments: instance = self.__get_instance_by_id() if instance == None: self.error(BaseHandler.METHOD_NOT_ALLOWED) return instances = [instance] # If not, build a query from the given parameters else: fields = self.__target_class_defn.get_fields() # NOTE: Query class handles sql injection query = Query(self.__target_class) # Build query with filters for field_name in filter(lambda x: x.is_exposed(), fields.keys()): if field_name in arguments: query.filter(field_name + " =", self.get(field_name)) instances = list(query) # Write out response for check_security = lambda x: self.__is_authorized(x) self.__write_seralized_response(filter(check_security, instances)) self.set_status(BaseHandler.OK)
def query_from_form(org, event, form, projection=None, distinct=None): # start query based on admin type if org.is_global_admin: query = Query(Site, projection=projection, distinct=distinct) elif org.is_local_admin: if projection is not None or distinct is not None: raise Exception("Not currently supported for local admin") query = Query(Site).filter('event in', [ incident.key() for incident in org.incidents ]) else: raise Exception("Not an admin") # if a local admin, filter to logged in event if org.is_local_admin: query.filter('event', event.key()) # apply filters if set if form.event.data: query.filter('event', Key(form.event.data)) if form.reporting_org.data: query.filter('reported_by', Key(form.reporting_org.data)) if form.claiming_org.data: query.filter('claimed_by', Key(form.claiming_org.data)) if form.work_type.data: query.filter('work_type', form.work_type.data) if form.status.data: query.filter('status', form.status.data) # apply order if form.order.data: query.order(form.order.data) return query
def get_resume_state(self, key_value): my_query = Query() my_query.filter('__key__=', key_value) return my_query.fetch(100)
def unavailableCouriers(): unavailId = couriersIdEnRoute() q = Query(Courier) q.filter("courierId in ", unavailId) return q
def AuthenticatedGet(self, org, event): id_param = self.request.get('id') latitude_param = self.request.get("latitude") longitude_param = self.request.get("longitude") if latitude_param and longitude_param: try: latitude = float(latitude_param) longitude = float(longitude_param) except: self.response.set_status(404) json_array = [] for site in site_db.Site.gql( 'Where latitude = :1 and longitude = :2 and event = :3', latitude, longitude, event.key()): json_string = json.dumps({ "id": site.key().id(), "address": site.address, }) json_array.append(json_string) self.response.out.write( json.dumps(json_array, default = dthandler)) return if id_param == "all": status = self.request.get("status", default_value = "") page = self.request.get("page", default_value = "0") page_int = int(page) logging.debug("page = " + page) #query_string = "SELECT * FROM Site WHERE event = :event_key LIMIT %s OFFSET %s" % (PAGE_OFFSET, page_int * PAGE_OFFSET) ##logging.debug("OFFSET = " + PAGE_OFFSET) ##logging.debug("page * OFFSET = " + page_int * PAGE_OFFSET) #query = db.GqlQuery(query_string, event_key = event.key()) q = Query(model_class = site_db.Site) ids = [] #filter by event q.filter("event =", event.key()) q.is_keys_only() if status == "open": logging.debug("status == open") q.filter("status >= ", "Open") elif status == "closed": q.filter("status < ", "Open") logging.debug("status == closed") logging.debug("status = " + status) #query = q.fetch(PAGE_OFFSET, offset = page_int * PAGE_OFFSET) #for q in query: #ids.append(q.key().id()) this_offset = page_int * PAGE_OFFSET logging.debug("this_offset = " + str(this_offset)) ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)] logging.debug("ids len = " + str(len(ids))) output = json.dumps( [s[1] for s in site_db.GetAllCached(event, ids)], default=dthandler) logging.info("after output") self.response.out.write(output) return #if id_param == "all": #county = self.request.get("county", default_value = "all") #status = self.request.get("status", default_value = "") #q = Query(model_class = site_db.Site, keys_only = True) ##filter by event #q.filter("event =", event.key()) #if status == "open": #q.filter("status >= ", "Open") #elif status == "closed": #q.filter("status < ", "Open") #if county != "all": #q.filter("county =", county) #ids = [key.id() for key in q.run(batch_size = 2000)] #output = json.dumps( #[s[1] for s in site_db.GetAllCached(event, ids)], #default=dthandler) #self.response.out.write(output) #return try: id = int(id_param) except: self.response.set_status(404) return site = site_db.GetAndCache(id) if not site: self.response.set_status(404) return # TODO(jeremy): Add the various fixes for Flash # and other vulnerabilities caused by having user-generated # content in JSON strings, by setting this as an attachment # and prepending the proper garbage strings. # Javascript security is really a pain. self.response.out.write( json.dumps(site_db.SiteToDict(site), default = dthandler))
def AuthenticatedGet(self, org, event): id_param = self.request.get("id") latitude_param = self.request.get("latitude") longitude_param = self.request.get("longitude") if latitude_param and longitude_param: try: latitude = float(latitude_param) longitude = float(longitude_param) except: self.response.set_status(404) json_array = [] for site in site_db.Site.gql( "Where latitude = :1 and longitude = :2 and event = :3", latitude, longitude, event.key() ): json_string = json.dumps({"id": site.key().id(), "address": site.address}) json_array.append(json_string) self.response.out.write(json.dumps(json_array, default=dthandler)) return if id_param == "all": status = self.request.get("status", default_value="") page = self.request.get("page", default_value="0") page_int = int(page) logging.debug("page = " + page) # query_string = "SELECT * FROM Site WHERE event = :event_key LIMIT %s OFFSET %s" % (PAGE_OFFSET, page_int * PAGE_OFFSET) ##logging.debug("OFFSET = " + PAGE_OFFSET) ##logging.debug("page * OFFSET = " + page_int * PAGE_OFFSET) # query = db.GqlQuery(query_string, event_key = event.key()) q = Query(model_class=site_db.Site) ids = [] # filter by event q.filter("event =", event.key()) q.is_keys_only() if status == "open": logging.debug("status == open") q.filter("status >= ", "Open") elif status == "closed": q.filter("status < ", "Open") logging.debug("status == closed") logging.debug("status = " + status) # query = q.fetch(PAGE_OFFSET, offset = page_int * PAGE_OFFSET) # for q in query: # ids.append(q.key().id()) this_offset = page_int * PAGE_OFFSET logging.debug("this_offset = " + str(this_offset)) ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset=this_offset)] logging.debug("ids len = " + str(len(ids))) output = json.dumps([s[1] for s in site_db.GetAllCached(event, ids)], default=dthandler) self.response.out.write(output) return # if id_param == "all": # county = self.request.get("county", default_value = "all") # status = self.request.get("status", default_value = "") # q = Query(model_class = site_db.Site, keys_only = True) ##filter by event # q.filter("event =", event.key()) # if status == "open": # q.filter("status >= ", "Open") # elif status == "closed": # q.filter("status < ", "Open") # if county != "all": # q.filter("county =", county) # ids = [key.id() for key in q.run(batch_size = 2000)] # output = json.dumps( # [s[1] for s in site_db.GetAllCached(event, ids)], # default=dthandler) # self.response.out.write(output) # return try: id = int(id_param) except: self.response.set_status(404) return site = site_db.GetAndCache(id) if not site: self.response.set_status(404) return # TODO(jeremy): Add the various fixes for Flash # and other vulnerabilities caused by having user-generated # content in JSON strings, by setting this as an attachment # and prepending the proper garbage strings. # Javascript security is really a pain. self.response.out.write(json.dumps(site_db.SiteToDict(site), default=dthandler))