コード例 #1
0
def ordersEnRoute():
    """
    return all the orders that are currently enroute
    """
    q = Query(Order)
    q.filter("state =", "enRoute")
    return q
コード例 #2
0
ファイル: tasks.py プロジェクト: Hubble1/eventgrinder
def mail_queue_expander(request):
    BATCH_SIZE=5
    edition=db.get(request.form['edition'])
    if not edition: pass
    page=int(request.form.get('page',0))
    subscriber_q=Query(subscriptions.models.Subscription, keys_only=True).filter('site =', edition.site).filter('active =', True)
    if request.form.has_key('cursor'):
        subscriber_q=subscriber_q.with_cursor(request.form['cursor'])
    subscribers=subscriber_q.fetch(BATCH_SIZE)
    if not subscribers:
        edition.status='complete'
        edition.put()
        return
    task=Task(params={'edition':edition.key(),
                      'cursor': subscriber_q.cursor(), 
                      'page':page+1},
                      name="%s-%s-%s-%s" %(edition.site.slug, edition.issue_num,edition.publish_after.strftime("%Y%j%H%M-%S"), page+1)
                      
                      )
    try:
        MailQueueExpander.add(task)
    except (TaskAlreadyExistsError, TombstonedTaskError):
        raise
    for sub in subscribers:
        def _tx():

                pending_email=PendingEmail(subscription=sub, edition=edition)
                db.put(pending_email)  
                SendNewsletter.add(Task(params={'pending_email':pending_email.key()}), transactional=True)
        db.run_in_transaction_custom_retries(10,_tx)
コード例 #3
0
ファイル: tasks.py プロジェクト: Hubble1/eventgrinder
def migrate_sources(request):
    try:
          set_namespace('')
          old_site=db.get(db.Key(request.POST.get('old_site')))
          q=Query(ICalendarSource, namespace='').filter('site = ',old_site)
          old_sources=q.fetch(1000)


          set_namespace(request.POST.get('new_namespace'))
          new_site=db.get(db.Key(request.POST.get('new_site')))

          for old_source in old_sources:

              if old_source.submitted_by:
                  old_source.submitted_by=Profile.all().filter('slug =', old_source.submitted_by.slug).get()                    
              if old_source.approved_by:
                  old_source.approved_by=Profile.all().filter('slug =', old_source.approved_by.slug).get()

              new_source=clone_source(old_source, key_name=old_source.slug)
              new_source.site=new_site
              new_source.put()
              #old_source.delete()
           
           
          taskqueue.add(url='/admin/migrate-events/', params={'new_namespace':request.POST.get('new_namespace'),
                                                              'old_site':old_site.key(),
                                                              'new_site':new_site.key(),
                                                              },)
              

    except Exception,e:
                  logging.error("%s in \n%s"% (traceback.format_exc(),str(request.POST)))
コード例 #4
0
ファイル: tasks.py プロジェクト: Hubble1/eventgrinder
def migrate_profiles(request):
    try:
        set_namespace('')
        old_site=db.get(db.Key(request.POST.get('old_site')))
        q=Query(Profile, namespace='').filter('site = ',old_site)
        old_profiles=q.fetch(1000)
    
    
        set_namespace(request.POST.get('new_namespace'))
        new_site=db.get(db.Key(request.POST.get('new_site')))
    
        for old_profile in old_profiles:
            new_profile=clone_entity(old_profile, key_name=old_profile.key().name())
            new_profile.site=new_site
            new_profile.put()
            #old_profile.delete()
    
  
        taskqueue.add(url='/admin/migrate-sources/', params={'new_namespace':request.POST.get('new_namespace'),
                                                'old_site':old_site.key(),
                                                'new_site':new_site.key(),
                                                },)
  
    except Exception,e:
                logging.error("%s in \n%s"% (traceback.format_exc(),str(request.POST)))
コード例 #5
0
ファイル: assign.py プロジェクト: mingxiao/ebay-now-trial
def idleOrders():
    """
    Returns a list of all orders waiting for couriers
    """
    q = Query(Order)
    q.filter("state =",'needPickup')
    return q
コード例 #6
0
ファイル: assign.py プロジェクト: mingxiao/ebay-now-trial
def ordersEnRoute():
    """
    return all the orders that are currently enroute
    """
    q = Query(Order)
    q.filter("state =", "enRoute")
    return q
def form_and_query_from_params(org, event, limiting_event, form_data, projection=None, distinct=None):
    # get relevant values for search form 
    if org.is_global_admin:
        events = event_db.Event.all()
        work_types = [
            site.work_type for site 
            in Query(Site, projection=['work_type'], distinct=True)
        ]
    elif org.is_local_admin:
        events = org.incidents
        work_types = [
            site.work_type for site
            in Query(Site, projection=['work_type'], distinct=True) \
                .filter('event in', [incident.key() for incident in org.incidents])
        ]

    # construct search form, limiting by event if supplied
    WorkOrderSearchForm = create_work_order_search_form(
        events=events,
        work_types=work_types,
        limiting_event=limiting_event
    )
    form = WorkOrderSearchForm(form_data)
    query = query_from_form(org, event, form, projection=projection, distinct=distinct)
    return form, query
コード例 #8
0
def idleOrders():
    """
    Returns a list of all orders waiting for couriers
    """
    q = Query(Order)
    q.filter("state =", 'needPickup')
    return q
コード例 #9
0
ファイル: tasks.py プロジェクト: rosskarchner/eventgrinder
def migrate_profiles(request):
    try:
        set_namespace('')
        old_site = db.get(db.Key(request.POST.get('old_site')))
        q = Query(Profile, namespace='').filter('site = ', old_site)
        old_profiles = q.fetch(1000)

        set_namespace(request.POST.get('new_namespace'))
        new_site = db.get(db.Key(request.POST.get('new_site')))

        for old_profile in old_profiles:
            new_profile = clone_entity(old_profile,
                                       key_name=old_profile.key().name())
            new_profile.site = new_site
            new_profile.put()
            #old_profile.delete()

        taskqueue.add(
            url='/admin/migrate-sources/',
            params={
                'new_namespace': request.POST.get('new_namespace'),
                'old_site': old_site.key(),
                'new_site': new_site.key(),
            },
        )

    except Exception, e:
        logging.error("%s in \n%s" %
                      (traceback.format_exc(), str(request.POST)))
コード例 #10
0
    def get(self):
        event_shortname = self.request.get("shortname")
        page = self.request.get("page")
        page_int = int(page)

        if event_shortname == None:
            event_shortname = "sandy"
        event = None
        events = event_db.GetAllCached()
        for e in events:
            if e.short_name == event_shortname:
                event = e

        ids = []
        where_string = "Open"
        q = None
        if event.short_name != 'moore':
            gql_string = 'SELECT * FROM Site WHERE status >= :1 and event = :2'
            q = db.GqlQuery(gql_string, where_string, event.key())

        else:
            q = Query(model_class=site_db.Site)

            q.filter("event =", event.key())
            q.is_keys_only()
            q.filter("status >= ", "Open")

            this_offset = page_int * PAGE_OFFSET

            ids = [
                key.key().id()
                for key in q.fetch(PAGE_OFFSET, offset=this_offset)
            ]

        this_offset = page_int * PAGE_OFFSET

        ids = [
            key.key().id() for key in q.fetch(PAGE_OFFSET, offset=this_offset)
        ]

        def public_site_filter(site):
            # site as dict
            return {
                'event': site['event'],
                'id': site['id'],
                'case_number': site['case_number'],
                'work_type': site['work_type'],
                'claimed_by': site['claimed_by'],
                'status': site['status'],
                'floors_affected': site.get('floors_affected'),
                'blurred_latitude': site.get('blurred_latitude'),
                'blurred_longitude': site.get('blurred_longitude'),
            }

        output = json.dumps([
            public_site_filter(s[1]) for s in site_db.GetAllCached(event, ids)
        ],
                            default=dthandler)
        self.response.out.write(output)
コード例 #11
0
def availableCouriers():
    """
    DEPRECATED
    Returns all available couriers. Search for all courier whose online =True
    """
    q = Query(Courier)
    q.filter("online =", True)
    return q
コード例 #12
0
ファイル: assign.py プロジェクト: mingxiao/ebay-now-trial
def availableCouriers():
    """
    DEPRECATED
    Returns all available couriers. Search for all courier whose online =True
    """
    q = Query(Courier)
    q.filter("online =", True)
    return q
コード例 #13
0
def BatchSitesFromIds(event):
    logging.debug("BatchSitesFromIds")
    """Given a string of ids, like "1,2,3", returns corresponding Site objects.
  If comma_separated_ids is empty, returns all sites.
  """
    q = Query(model_class=site_db.Site)
    q.filter('event = ', event)
    sites = q.run(limit=1000, offset=4000)

    return sites
コード例 #14
0
ファイル: Query.py プロジェクト: kemege/pythonToolkit
 def get(self):
     # List Cities
     citylist = {}
     cities = City.all()
     for city in cities:
         citylist[city.Name] = {0:abs(hash(city.Name)),1:[]}
         for station in city.station_set:
             data = {'Name':station.Name}
             query = Query(AQIData)
             query.filter('Station =', station.Code)
             query.order('-Date')
             query.run()
             aqi = query.get()
             data['AQI'] = aqi.AQI
             data['Level'] = aqi.AQILevel
             data['Assess'] = aqi.AQIAssess
             data['Majority'] = aqi.Majority
             data['Date'] = aqi.Date
             citylist[city.Name][1].append(data)
     # logging.info(str(citylist))
     #----generate parameter list----------------------------------------------------------------------
     template_values = {
         'citylist' : citylist,
         }
     path = os.path.join(os.path.dirname(__file__), './/template//citylist.html')
     #----end------------------------------------------------------------------------------------------
     self.response.out.write(template.render(path,template_values))
コード例 #15
0
    def get(self):
        MEMCACHE_KEY = "yw4ct7ntqzh93ioqaxif"
        path_info = self.request.path_info.split("/")
        debug("PATH_INFO = %s" % path_info)
        client = memcache.Client()
        LIMIT = 100

        if len(self.request.get("clear")) != 0:
            client.delete(MEMCACHE_KEY)

        template_values = {}
        template_values["all_raw_data"] = []

        old_key_list = client.get(MEMCACHE_KEY)
        if old_key_list is None:
            old_key_list = []
            query = Query(RawData, keys_only=True)
            query.order("-rawDataId")
            new_key_list = []
            count = 0
            for key_in_query in query:
                count += 1
                if count >= LIMIT: break
                if key_in_query in old_key_list: break
                new_key_list.append(key_in_query)

            all_key_list = new_key_list + old_key_list
            all_key_list = all_key_list[:LIMIT]
            client.set(MEMCACHE_KEY, all_key_list, 15)
        else:
            all_key_list = old_key_list

        for key_in_list in all_key_list:
            raw_data = client.get(str(key_in_list))
            if not isinstance(raw_data, RawData):
                raw_data_list = RawData.get([key_in_list])
                if len(raw_data_list) != 1: continue
                raw_data = raw_data_list[0]
                if not isinstance(raw_data, RawData): continue
                client.set(str(key_in_list), raw_data)
            if not isinstance(raw_data, RawData): continue

            raw_data_dict = {
                "rawDataId": raw_data.rawDataId,
                "path": raw_data.path,
                "parameters": raw_data.parameters,
                "query": raw_data.query,
                "fragment": raw_data.fragment,
                "body": raw_data.body
            }
            #logging.info(raw_data_dict)
            template_values["all_raw_data"].append(raw_data_dict)

        self.writeWithTemplate(template_values, "RawData")
コード例 #16
0
def BatchSitesFromIds(event):
  logging.debug("BatchSitesFromIds")
  """Given a string of ids, like "1,2,3", returns corresponding Site objects.
  If comma_separated_ids is empty, returns all sites.
  """
  q = Query(model_class = site_db.Site)
  q.filter('event = ', event)
  sites = q.run(limit=1000, offset = 4000)

  
  return sites
コード例 #17
0
    def get(self):
        MEMCACHE_KEY = "yw4ct7ntqzh93ioqaxif"
        path_info = self.request.path_info.split("/")
        debug("PATH_INFO = %s" % path_info)
        client = memcache.Client()
        LIMIT = 100

        if len(self.request.get("clear")) != 0:
            client.delete(MEMCACHE_KEY)
            
        template_values = {}
        template_values["all_raw_data"] = []

        
        old_key_list = client.get(MEMCACHE_KEY)
        if old_key_list is None:
            old_key_list = []
            query = Query(RawData, keys_only=True)
            query.order("-rawDataId")
            new_key_list = []
            count = 0
            for key_in_query in query:
                count += 1
                if count >= LIMIT: break
                if key_in_query in old_key_list: break
                new_key_list.append(key_in_query)
            
            all_key_list = new_key_list + old_key_list
            all_key_list = all_key_list[:LIMIT]
            client.set(MEMCACHE_KEY, all_key_list, 15)
        else:
            all_key_list = old_key_list

        for key_in_list in all_key_list:
            raw_data = client.get(str(key_in_list))
            if not isinstance(raw_data, RawData):
                raw_data_list = RawData.get([key_in_list])
                if len(raw_data_list) != 1: continue
                raw_data = raw_data_list[0]
                if not isinstance(raw_data, RawData): continue
                client.set(str(key_in_list), raw_data)
            if not isinstance(raw_data, RawData): continue

            raw_data_dict = {"rawDataId": raw_data.rawDataId,
                            "path":raw_data.path,
                            "parameters": raw_data.parameters,
                            "query": raw_data.query,
                            "fragment":raw_data.fragment,
                            "body": raw_data.body }
            #logging.info(raw_data_dict)
            template_values["all_raw_data"].append(raw_data_dict)
        
        self.writeWithTemplate(template_values, "RawData")
コード例 #18
0
ファイル: assign.py プロジェクト: mingxiao/ebay-now-trial
def couriersIdEnRoute():
    """
    Returns a list of courierIds of all the couriers who are currently on delivery given a list of
    all orders currently enRoute.
    
    enrouteOrder - a Query() object of all the orders whose state = 'enRoute'
    """
    couriers = []
    q = Query(Order,projection=["courierId"])
    q.filter("state =", "enRoute")
    for order in q:
        couriers.append(order.courierId)
    return couriers
コード例 #19
0
 def testQuery(self):
     courier = Courier(courierId=1,lat=2.0,lon=3.0,online=True)
     courier.put()
     courier = Courier(courierId=2,lat=12.0,lon=13.0,online=False)
     courier.put()
     courier = Courier(courierId=14,lat=2.0,lon=3.0,online=True)
     courier.put()
     courier = Courier(courierId=51,lat=2.0,lon=3.0,online=True)
     courier.put()
     q = Query(Courier)
     q.filter("online = ", True)
     for courier in q:
         self.assertNotEqual(2, courier.courierId)
コード例 #20
0
def couriersIdEnRoute():
    """
    Returns a list of courierIds of all the couriers who are currently on delivery given a list of
    all orders currently enRoute.
    
    enrouteOrder - a Query() object of all the orders whose state = 'enRoute'
    """
    couriers = []
    q = Query(Order, projection=["courierId"])
    q.filter("state =", "enRoute")
    for order in q:
        couriers.append(order.courierId)
    return couriers
コード例 #21
0
  def get(self):
    event_shortname = self.request.get("shortname")
    page = self.request.get("page")
    page_int = int(page)

    if event_shortname == None:
      event_shortname = "sandy"
    event = None
    events = event_db.GetAllCached()
    for e in events:
      if e.short_name == event_shortname:
	event = e

      
    ids = []
    where_string = "Open"
    q = None
    if event.short_name != 'moore':
      gql_string = 'SELECT * FROM Site WHERE status >= :1 and event = :2'
      q = db.GqlQuery(gql_string, where_string, event.key())

    else:
      q = Query(model_class = site_db.Site)

      q.filter("event =", event.key())
      q.is_keys_only()
      q.filter("status >= ", "Open")
	  
      this_offset = page_int * PAGE_OFFSET
	  
      ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)]
           
    this_offset = page_int * PAGE_OFFSET
	
    ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)]

    def public_site_filter(site):
        # site as dict
        return {
            'event': site['event'],
            'id': site['id'],
            'case_number': site['case_number'],
            'work_type': site['work_type'],
            'claimed_by': site['claimed_by'],
            'status': site['status'],
            'floors_affected': site.get('floors_affected'),
            'blurred_latitude': site.get('blurred_latitude'),
            'blurred_longitude': site.get('blurred_longitude'),
        }
	
    output = json.dumps(
	[public_site_filter(s[1]) for s in site_db.GetAllCached(event, ids)],
	default=dthandler
    )
    self.response.out.write(output)
コード例 #22
0
ファイル: views.py プロジェクト: rosskarchner/eventgrinder
def migrate(request):
    def clone_entity(e, **extra_args):
        """Clones an entity, adding or overriding constructor attributes.

      The cloned entity will have exactly the same property values as the original
      entity, except where overridden. By default it will have no parent entity or
      key name, unless supplied.

      Args:
        e: The entity to clone
        extra_args: Keyword arguments to override from the cloned entity and pass
          to the constructor.
      Returns:
        A cloned, possibly modified, copy of entity e.
      """
        klass = e.__class__
        props = dict((k, v.__get__(e, klass))
                     for k, v in klass.properties().iteritems())
        props.update(extra_args)
        return klass(**props)

    set_namespace('')
    key_name = os.environ.get('HTTP_HOST')
    #site=site=models.Eventsite.all().filter('hostnames = ',key_name).get()
    site = Query(models.Eventsite,
                 namespace='').filter('hostnames = ', key_name).get()
    if not site: return HttpResponse("Couldn't find a site to migrate")
    new_namespace = request.environ.get('HTTP_HOST').split(':')[0]
    old_chimp = site.chimp
    set_namespace(new_namespace)
    namespace_registry.register(new_namespace)
    new_site = clone_entity(site, key_name=new_namespace)
    new_site.put()
    namespace_registry.register(new_namespace)
    if old_chimp:
        new_chimp = clone_entity(old_chimp, parent=new_site)
        new_chimp.put()
    taskqueue.add(
        url='/admin/migrate-profiles/',
        params={
            'new_namespace': new_namespace,
            'old_site': site.key(),
            'new_site': new_site.key(),
        },
    )

    set_namespace('')
    return HttpResponse('Migrated Site')
コード例 #23
0
ファイル: tasks.py プロジェクト: rosskarchner/eventgrinder
def migrate_events(request):
    try:
        if request.method == 'POST':
            set_namespace('')
            logging.warning("namespace: %s" % get_namespace())
            cursor = request.POST.get('cursor')
            old_site = db.get(db.Key(request.POST.get('old_site')))
            logging.warning("old site: %s" % old_site)

            #q=Event.all().filter('site =', old_site)
            q = Query(Event, namespace='').filter('site = ', old_site)
            if cursor:
                q = q.with_cursor(cursor)
            events = q.fetch(1)
            logging.warning(events)
            set_namespace(request.POST.get('new_namespace'))
            new_site = db.get(db.Key(request.POST.get('new_site')))

            if events:
                taskqueue.add(
                    url='/admin/migrate-events/',
                    params={
                        'new_namespace': request.POST.get('new_namespace'),
                        'old_site': old_site.key(),
                        'new_site': new_site.key(),
                        'cursor': q.cursor()
                    },
                )
            for event in events:
                event.site = new_site

                #new_event.site=new_site
                if event.source:
                    event.source = ICalendarSource.all().filter(
                        'slug =', event.source.slug).get()
                if event.submitted_by:
                    event.submitted_by = Profile.all().filter(
                        'slug =', event.submitted_by.slug).get()
                if event.approved_by:
                    event.approved_by = Profile.all().filter(
                        'slug =', event.approved_by.slug).get()
                new_event = clone_event(event, key_name=event.key().name())
                #event.delete()
                new_event.put()

    except Exception, e:
        logging.error("%s in \n%s" %
                      (traceback.format_exc(), str(request.POST)))
コード例 #24
0
def available2():
    """
    Returns all couriers who are currently available.
    
    If GQL had a 'NOT IN' function, we would use the following:
    q = Query(Courier)
    c = couriersEnRoute()
    q.filter("courierId not in", c)
    return q
    
    However, it currently does not, we have to do a workaround
    """
    availId = availableCourierId()
    q = Query(Courier)
    q.filter("courierId in ", availId)
    return q
コード例 #25
0
ファイル: assign.py プロジェクト: mingxiao/ebay-now-trial
def available2():
    """
    Returns all couriers who are currently available.
    
    If GQL had a 'NOT IN' function, we would use the following:
    q = Query(Courier)
    c = couriersEnRoute()
    q.filter("courierId not in", c)
    return q
    
    However, it currently does not, we have to do a workaround
    """
    availId = availableCourierId()
    q = Query(Courier)
    q.filter("courierId in ", availId)
    return q
コード例 #26
0
ファイル: views.py プロジェクト: datacommunitydc/eventgrinder
def migrate(request):
    
    def clone_entity(e, **extra_args):
      """Clones an entity, adding or overriding constructor attributes.

      The cloned entity will have exactly the same property values as the original
      entity, except where overridden. By default it will have no parent entity or
      key name, unless supplied.

      Args:
        e: The entity to clone
        extra_args: Keyword arguments to override from the cloned entity and pass
          to the constructor.
      Returns:
        A cloned, possibly modified, copy of entity e.
      """
      klass = e.__class__
      props = dict((k, v.__get__(e, klass)) for k, v in klass.properties().iteritems())
      props.update(extra_args)
      return klass(**props)
    
    
    
    set_namespace('')
    key_name = os.environ.get('HTTP_HOST')
    #site=site=models.Eventsite.all().filter('hostnames = ',key_name).get()
    site=Query(models.Eventsite, namespace='').filter('hostnames = ',key_name).get()
    if not site:return HttpResponse("Couldn't find a site to migrate")
    new_namespace=request.environ.get('HTTP_HOST').split(':')[0]
    old_chimp=site.chimp
    set_namespace(new_namespace)
    namespace_registry.register(new_namespace)
    new_site=clone_entity(site, key_name=new_namespace)
    new_site.put()
    namespace_registry.register(new_namespace)
    if old_chimp:
        new_chimp=clone_entity(old_chimp, parent=new_site)
        new_chimp.put()
    taskqueue.add(url='/admin/migrate-profiles/', params={'new_namespace':new_namespace,
                                        'old_site':site.key(),
                                        'new_site':new_site.key(),
                                        },)

    set_namespace('')                                                    
    return HttpResponse('Migrated Site')
    
    
コード例 #27
0
ファイル: RandomRecipeHandler.py プロジェクト: ben/magrecipes
    def get(self):
        q = Query(Recipe, True)
        i = random.randint(0,q.count()-1)
        recipe = Recipe.get(q[i])
        if recipe == None:
            # TODO: error page?
            return

        recipe_dict = recipe.to_dict()

        templatevalues = RequestContext(self.request, {
            'recipe' : recipe,
            'json' : simplejson.dumps(recipe_dict),
            })

        path = os.path.join(os.path.dirname(__file__), 'randomrecipe.html')
        self.response.out.write(template.render(path, templatevalues))
コード例 #28
0
ファイル: adapted_models.py プロジェクト: Samnsparky/Sirpple
    def get_children(self, child_class, **kwargs):
        """ 
        Get all of the children of this model

        @param child_class: The child class to look for children in
        @type child_class: Any model instance
        @return: Iterator over this model's children
        @rtype: Iterator
        """

        query = Query(child_class)
        query.ancestor(self)

        for arg in kwargs.items():
            query.filter(arg(0) + " =", arg(1))

        # TODO: Ensure only immediate children

        return query
コード例 #29
0
 def is_empty(self,items):
     """
     Returns true if items is empty. Items could be a list or a cursor to a query result
     """
     if type(items) == type([]):
         return len(items) == 0
     elif type(items) == type(Query()):
         return items.count() == 0
     else:
         assert False
コード例 #30
0
ファイル: tasks.py プロジェクト: Hubble1/eventgrinder
def migrate_events(request):
    try:
        if request.method == 'POST':
            set_namespace('')
            logging.warning("namespace: %s" % get_namespace())
            cursor=request.POST.get('cursor')
            old_site=db.get(db.Key(request.POST.get('old_site')))
            logging.warning("old site: %s" % old_site)
            
            #q=Event.all().filter('site =', old_site)
            q=Query(Event, namespace='').filter('site = ',old_site)
            if cursor:
                q=q.with_cursor(cursor)
            events= q.fetch(1)                                               
            logging.warning(events)
            set_namespace(request.POST.get('new_namespace'))
            new_site=db.get(db.Key(request.POST.get('new_site')))
            
            if events:
                taskqueue.add(url='/admin/migrate-events/', params={'new_namespace':request.POST.get('new_namespace'),
                                                                    'old_site':old_site.key(),
                                                                    'new_site':new_site.key(),
                                                                    'cursor':q.cursor()
                                                                    },)
            for event in events:   
                    event.site=new_site
                    
                    #new_event.site=new_site
                    if event.source:
                        event.source=ICalendarSource.all().filter('slug =', event.source.slug).get()
                    if event.submitted_by:
                        event.submitted_by=Profile.all().filter('slug =', event.submitted_by.slug).get()                    
                    if event.approved_by:
                        event.approved_by=Profile.all().filter('slug =', event.approved_by.slug).get()
                    new_event= clone_event(event, key_name=event.key().name())
                    #event.delete()
                    new_event.put()

                
    except Exception,e:
            logging.error("%s in \n%s"% (traceback.format_exc(),str(request.POST)))
コード例 #31
0
ファイル: site_db.py プロジェクト: DuaneNClark/crisiscleanup
def GetAllCached(event, ids = None):
  if ids == None:
    q = Query(model_class = Site, keys_only = True)
    q.filter("event =", event)
    ids = [key.id() for key in q.run(batch_size = 2000)]
  lookup_ids = [str(id) for id in ids]
  cache_results = memcache.get_multi(lookup_ids, key_prefix = cache_prefix)
  not_found = [id for id in ids if not str(id) in cache_results.keys()]
  data_store_results = []
  orgs = dict([(o.key(), o) for o in organization.GetAllCached()])
  events = dict([(e.key(), e) for e in event_db.GetAllCached()])
  if len(not_found):
    data_store_results = [(site, SiteToDict(site)) for site in
                          GetSitesAndSetReferences(not_found, events, orgs)]
    memcache.set_multi(dict([(str(site[0].key().id()), site)
                             for site in data_store_results]),
                       key_prefix = cache_prefix,
                       time = cache_time)

  sites = cache_results.values() + data_store_results
  return sites
コード例 #32
0
ファイル: tasks.py プロジェクト: rosskarchner/eventgrinder
def mail_queue_expander(request):
    BATCH_SIZE = 5
    edition = db.get(request.form['edition'])
    if not edition: pass
    page = int(request.form.get('page', 0))
    subscriber_q = Query(subscriptions.models.Subscription,
                         keys_only=True).filter('site =', edition.site).filter(
                             'active =', True)
    if request.form.has_key('cursor'):
        subscriber_q = subscriber_q.with_cursor(request.form['cursor'])
    subscribers = subscriber_q.fetch(BATCH_SIZE)
    if not subscribers:
        edition.status = 'complete'
        edition.put()
        return
    task = Task(params={
        'edition': edition.key(),
        'cursor': subscriber_q.cursor(),
        'page': page + 1
    },
                name="%s-%s-%s-%s" %
                (edition.site.slug, edition.issue_num,
                 edition.publish_after.strftime("%Y%j%H%M-%S"), page + 1))
    try:
        MailQueueExpander.add(task)
    except (TaskAlreadyExistsError, TombstonedTaskError):
        raise
    for sub in subscribers:

        def _tx():

            pending_email = PendingEmail(subscription=sub, edition=edition)
            db.put(pending_email)
            SendNewsletter.add(
                Task(params={'pending_email': pending_email.key()}),
                transactional=True)

        db.run_in_transaction_custom_retries(10, _tx)
コード例 #33
0
def GetAllCached(event, ids=None):
    if ids == None:
        q = Query(model_class=Site, keys_only=True)
        q.filter("event =", event)
        ids = [key.id() for key in q.run(batch_size=2000)]
    lookup_ids = [str(id) for id in ids]
    cache_results = memcache.get_multi(lookup_ids, key_prefix=cache_prefix)
    not_found = [id for id in ids if not str(id) in cache_results.keys()]
    data_store_results = []
    orgs = dict([(o.key(), o) for o in organization.GetAllCached()])
    events = dict([(e.key(), e) for e in event_db.GetAllCached()])
    if len(not_found):
        data_store_results = [
            (site, SiteToDict(site))
            for site in GetSitesAndSetReferences(not_found, events, orgs)
        ]
        memcache.set_multi(dict([(str(site[0].key().id()), site)
                                 for site in data_store_results]),
                           key_prefix=cache_prefix,
                           time=cache_time)

    sites = cache_results.values() + data_store_results
    return sites
コード例 #34
0
ファイル: tasks.py プロジェクト: rosskarchner/eventgrinder
def migrate_sources(request):
    try:
        set_namespace('')
        old_site = db.get(db.Key(request.POST.get('old_site')))
        q = Query(ICalendarSource, namespace='').filter('site = ', old_site)
        old_sources = q.fetch(1000)

        set_namespace(request.POST.get('new_namespace'))
        new_site = db.get(db.Key(request.POST.get('new_site')))

        for old_source in old_sources:

            if old_source.submitted_by:
                old_source.submitted_by = Profile.all().filter(
                    'slug =', old_source.submitted_by.slug).get()
            if old_source.approved_by:
                old_source.approved_by = Profile.all().filter(
                    'slug =', old_source.approved_by.slug).get()

            new_source = clone_source(old_source, key_name=old_source.slug)
            new_source.site = new_site
            new_source.put()
            #old_source.delete()

        taskqueue.add(
            url='/admin/migrate-events/',
            params={
                'new_namespace': request.POST.get('new_namespace'),
                'old_site': old_site.key(),
                'new_site': new_site.key(),
            },
        )

    except Exception, e:
        logging.error("%s in \n%s" %
                      (traceback.format_exc(), str(request.POST)))
コード例 #35
0
def check_and_write_row(row_d):
    """
    Check and save @row_d, return True if ok and False if failed.
    """
    row_acceptable = bool(row_d['Date Password Provided'])
    if row_acceptable:
        # get org
        query = Query(model_class=Organization)
        query.filter('name = ', row_d['ORGANIZATION'])
        org = query.get()
        if org:
            try:
                # write new contact
                new_contact = Contact(
                    first_name=row_d['First Name'],
                    last_name=row_d['Last Name'],
                    email=row_d['E-MAIL'],
                    phone=row_d['PHONE #'],
                    organization=org,
                )
                new_contact.save()
                return True
            except BadValueError, e:
                pass
コード例 #36
0
ファイル: gae_controllers.py プロジェクト: Samnsparky/Sirpple
    def __do_get(self):
        arguments = self.arguments()

        # TODO: Limit query by __limit__ argument

        # Try to determine the project_id
        project = self.__get_project()

        # See if we can short-cut by looking up using an ID
        if GAEController.INSTANCE_ID_PARAM in arguments:
            instance = self.__get_instance_by_id()
            if instance == None:
                self.error(BaseHandler.METHOD_NOT_ALLOWED)
                return
            instances = [instance]

        # If not, build a query from the given parameters
        else:
            fields = self.__target_class_defn.get_fields()

            # NOTE: Query class handles sql injection
            query = Query(self.__target_class)

            # Build query with filters
            for field_name in filter(lambda x: x.is_exposed(), fields.keys()):

                if field_name in arguments:
                    query.filter(field_name + " =", self.get(field_name))
            
            instances = list(query)
        
        # Write out response for 
        check_security = lambda x: self.__is_authorized(x)
        self.__write_seralized_response(filter(check_security, instances))

        self.set_status(BaseHandler.OK)
コード例 #37
0
ファイル: site_db.py プロジェクト: svn2github/cctest1-mirror
def GetAllCached(event, ids = None):
  if ids == None:
    if cache_ids:
      cache_key_for_ids = "SiteDictIds:" + event.key().id() + ":" + county 
      ids = memcache.get(cache_key_for_ids)
      if not ids:
        # Retrieve all matching keys. As a keys_only scan,
        # This should be more efficient than a full data scan.
        q = Query(model_class = Site, keys_only = True)
        q.filter("event =", event)
        ids = [key.id() for key in q]
        # Cache these for up to six minutes.
        # TODO(Jeremy): This may do more harm than
        # good, depending on how often
        # people reload the map.
        memcache.set(cache_key_for_ids, ids,
                     time = 360)
    else:
      q = Query(model_class = Site, keys_only = True)
      q.filter("event =", event)
    
      ids = [key.id() for key in q.run(batch_size = 2000)]
  lookup_ids = [str(id) for id in ids]
  cache_results = memcache.get_multi(lookup_ids, key_prefix = cache_prefix)
  not_found = [id for id in ids if not str(id) in cache_results.keys()]
  data_store_results = []
  orgs = dict([(o.key(), o) for o in organization.GetAllCached()])
  events = dict([(e.key(), e) for e in event_db.GetAllCached()])
  if len(not_found):
    data_store_results = [(site, SiteToDict(site)) for site in
                          GetSitesAndSetReferences(not_found, events, orgs)]
    memcache.set_multi(dict([(str(site[0].key().id()), site)
                             for site in data_store_results]),
                       key_prefix = cache_prefix,
                       time = cache_time)

  sites = cache_results.values() + data_store_results
  return sites
コード例 #38
0
def create_work_order_search_form(events, work_types, limiting_event=None):
    events_by_recency = sorted(events, key=lambda event: event.key().id(), reverse=True)

    # determine orgs and work types to include
    if limiting_event:
        if limiting_event.key() not in [e.key() for e in events]:
            raise Exception("Event %s unavailable" % limiting_event)
        orgs = Organization.all().filter('incidents', limiting_event.key())
        work_types = [
            site.work_type for site
            in Query(Site, projection=['work_type'], distinct=True) \
                .filter('event', limiting_event.key())
            if site.work_type in work_types
        ]
    else:
        orgs = Organization.all().filter('incidents in', [event for event in events])

    class WorkOrderSearchForm(Form):

        def __init__(self, *args, **kwargs):
            super(WorkOrderSearchForm, self).__init__(*args, **kwargs)
            self.offset.data = 0  # offset set by the form should always be 0

        offset = HiddenField(default="0")
        order = HiddenField()
        event = SelectField(
            choices=[
                (e.key(), e.name) for e in events_by_recency
            ],
            default=events_by_recency[0].key()
        )
        query = TextField("Search")
        reporting_org = SelectField(
            choices=[('', '')] + [
                (org.key(), org.name) for org in orgs
            ],
            default=''
        )
        claiming_org = SelectField(
            choices=[('', '')] + [
                (org.key(), org.name) for org in orgs
            ],
            default=''
        )
        work_type = SelectField(
            choices=[('', '')] + [
                (work_type, work_type) for work_type in work_types
            ],
            default=''
        )
        status = SelectField(
            choices=[('', '')] + [
                (s, s) for s in Site.status.choices
            ],
            default=''
        )
        per_page = SelectField(
            choices=[
                (n, n) for n in [10, 50, 100, 250]
            ],
            coerce=int,
            default=10
        )

    return WorkOrderSearchForm
コード例 #39
0
    def _write_csv(cls, params, filename):
        " Note: run deferred only. "
        org = pickle.loads(params['org_pickle'])
        event = pickle.loads(params['event_pickle'])
        post_data = pickle.loads(params['post_pickle'])

        _, query = form_and_query_from_params(org, event, None, post_data)

        # get unique zip codes without using distinct projections (for simpler indexes)
        zip_codes = set(site.zip_code.strip() for site in query if site.zip_code)
        zip_data = {zip_code: {} for zip_code in zip_codes}

        # gather statistics on site statuses
        for zip_code in zip_codes:
            status_counts = {}
            site_statuses = Query(Site, projection=('status',)) \
                .filter('zip_code', zip_code)
            for site in site_statuses:
                status_counts[site.status] = status_counts.get(site.status, 0) + 1
            zip_data[zip_code]['stats'] = status_counts

        # lookup primary city from zip code
        for zip_code in zip_codes:
            zip_code_obj = ZipCode.get_by_key_name(zip_code)
            zip_data[zip_code]['primary_city'] = \
                zip_code_obj.primary_city if zip_code_obj else u"Unknown"

        # call votesmart for data on officials
        candidate_ids = set()
        for zip_code in zip_codes:
            officials = votesmart.officials_by_zip(zip_code)
            zip_data[zip_code][u'officials'] = officials
            candidate_ids.update(official[u'candidateId'] for official in officials)

        # lookup addresses of officials
        official_addresses = {
            candidate_id: votesmart.candidate_addresses(candidate_id)
            for candidate_id in candidate_ids
        }

        # create CSV sio of officials by zip code
        candidate_field_names = officials[0].keys()
        official_field_names = (
            [u'zip_code', u'primary_city'] + 
            STATUSES_UNICODE + 
            [u'candidateId'] + candidate_field_names
        )
        officials_csv_sio = StringIO()
        csv_writer = UnicodeDictWriter(officials_csv_sio, official_field_names)
        csv_writer.writeheader()
        for zip_code in zip_data:
            for official in zip_data[zip_code][u'officials']:
                row_d = {
                    u'zip_code': zip_code,
                    u'primary_city': zip_data[zip_code][u'primary_city']
                }
                row_d.update(zip_data[zip_code][u'stats'])
                row_d.update(official)
                csv_writer.writerow(row_d)

        # create CSV sio of addresses by candidate
        def flatten_office_dict(d):
            return dict([
                (u'address.' + k, v) for (k,v) in d.get(u'address', {}).items()
            ] + [
                (u'phone.' + k, v) for (k,v) in d.get(u'phone', {}).items()
            ])

        addresses_field_names = (
            [u'candidateId'] + 
            sorted(
                flatten_office_dict(
                    next(official_addresses.itervalues())[u'offices'][0]
                ).keys()
            )
        )

        addresses_csv_sio = StringIO()
        csv_writer = UnicodeDictWriter(addresses_csv_sio, addresses_field_names)
        csv_writer.writeheader()
        for candidate_id, addresses_sub_dict in official_addresses.items():
            for office in addresses_sub_dict[u'offices']:
                row_d = flatten_office_dict(office)
                row_d[u'candidateId'] = candidate_id
                csv_writer.writerow(row_d)

        # create XML sio of addresses
        rewritten_addresses_for_xml = {
            u'root': {
                u'addresses': [
                    dict(
                        [(u'@candidateID', candidate_id)] +
                        addresses_sub_dict.items()
                    ) for candidate_id, addresses_sub_dict in official_addresses.items()
                ]
            }
        }
        xml = xmltodict.unparse(
            rewritten_addresses_for_xml,
            pretty=True
        )
        xml_sio = StringIO()
        xml_sio.write(xml.encode('utf-8'))

        # create zip archive of both
        zip_sio = StringIO()
        zf = zipfile.ZipFile(zip_sio, 'w')
        zf.writestr('zips.csv', officials_csv_sio.getvalue())
        zf.writestr('addresses.xml', xml_sio.getvalue())
        zf.writestr('addresses.csv', addresses_csv_sio.getvalue())
        zf.close()

        # create CSV file from data
        bucket_path = BUCKET_NAME + '/' + filename
        zip_gcs_fd = cloudstorage.open(
            bucket_path,
            'w',
            content_type='application/zip'
        )
        zip_gcs_fd.write(zip_sio.getvalue())
        zip_gcs_fd.close()
コード例 #40
0
 def get_resume_state(self, key_value):
     my_query = Query()
     my_query.filter('__key__=', key_value)
     return my_query.fetch(100)
コード例 #41
0
def query_from_form(org, event, form, projection=None, distinct=None):
    # start query based on admin type
    if org.is_global_admin:
        query = Query(Site, projection=projection, distinct=distinct)
    elif org.is_local_admin:
        if projection is not None or distinct is not None:
            raise Exception("Not currently supported for local admin")
        query = Query(Site).filter('event in', [
            incident.key() for incident in org.incidents
        ])
    else:
        raise Exception("Not an admin")

    # if a local admin, filter to logged in event
    if org.is_local_admin:
        query.filter('event', event.key())

    # apply filters if set 
    if form.event.data:
        query.filter('event', Key(form.event.data))
    if form.reporting_org.data:
        query.filter('reported_by', Key(form.reporting_org.data))
    if form.claiming_org.data:
        query.filter('claimed_by', Key(form.claiming_org.data))
    if form.work_type.data:
        query.filter('work_type', form.work_type.data)
    if form.status.data:
        query.filter('status', form.status.data)

    # apply order
    if form.order.data:
        query.order(form.order.data)

    return query
コード例 #42
0
ファイル: utility.py プロジェクト: Hubble1/eventgrinder
def profile_for_user(user, keys_only=False):
    q=Query(Profile, keys_only=keys_only).filter('user =', user)
    return q.fetch(1)[0]
コード例 #43
0
ファイル: Query.py プロジェクト: kemege/pythonToolkit
    def get(self):
        # get city id
        city_id = self.request.get('city','')
        try:
            city_id = int(city_id)
        except:
            city_id = 0

        if city_id == 0:
            # no city? --> show city list
            citylist = {}
            cities = City.all()
            for city in cities:
                citylist[city.Name] = [city.Code,abs(hash(city.Name))]
            #----generate parameter list----------------------------------------------------------------------
            template_values = {
                'citylist' : citylist,
                'stationlist' : None
                }
            path = os.path.join(os.path.dirname(__file__), './/template//citylist_all.html')
            #----end------------------------------------------------------------------------------------------
            self.response.out.write(template.render(path,template_values))
        else:
            # show station plots
            stationlist = {}
            query = Query(City)
            query.filter('Code =', city_id)
            query.run()
            city = query.get()
            for station in city.station_set:
                data = []
                query = Query(AQIData)
                query.filter('Station =', station.Code)
                query.order('-Date')
                query.run()
                aqi = query.fetch(None)
                for entry in aqi:
                    data.append("['%s',%d]" % (str(entry.Date),entry.AQI))
                stationlist[station.Name] = ','.join(data)
            #----generate parameter list----------------------------------------------------------------------
            template_values = {
                'citylist' : None,
                'stationlist' : stationlist
                }
            path = os.path.join(os.path.dirname(__file__), './/template//citylist_all.html')
            #----end------------------------------------------------------------------------------------------
            self.response.out.write(template.render(path,template_values))
コード例 #44
0
ファイル: utility.py プロジェクト: rosskarchner/eventgrinder
def profile_for_user(user, keys_only=False):
    q = Query(Profile, keys_only=keys_only).filter('user =', user)
    return q.fetch(1)[0]
コード例 #45
0
ファイル: resume.py プロジェクト: subtle42/jcd3
 def get_resume_state(self, key_value):
     my_query = Query()
     my_query.filter('__key__=', key_value)
     return my_query.fetch(100)
     
コード例 #46
0
  def AuthenticatedGet(self, org, event):
    id_param = self.request.get('id')
    latitude_param = self.request.get("latitude")
    longitude_param = self.request.get("longitude")
    
    if latitude_param and longitude_param:
      try:
        latitude = float(latitude_param)
        longitude = float(longitude_param)
      except:
        self.response.set_status(404)
      json_array = []
      for site in site_db.Site.gql(
           'Where latitude = :1 and longitude = :2 and event = :3', latitude, longitude, event.key()):
        json_string = json.dumps({
            "id": site.key().id(),
            "address": site.address,
        })
        json_array.append(json_string)
      self.response.out.write(
            json.dumps(json_array, default = dthandler))      
      return
      
    if id_param == "all":
        status = self.request.get("status", default_value = "")
        page = self.request.get("page", default_value = "0")
        page_int = int(page)
        logging.debug("page = " + page)
        
        #query_string = "SELECT * FROM Site WHERE event = :event_key LIMIT %s OFFSET %s" % (PAGE_OFFSET, page_int * PAGE_OFFSET)   
        ##logging.debug("OFFSET = " + PAGE_OFFSET)
        ##logging.debug("page * OFFSET = " + page_int * PAGE_OFFSET)
        
        #query = db.GqlQuery(query_string, event_key = event.key())
        q = Query(model_class = site_db.Site)
       
        ids = []
      #filter by event
        q.filter("event =", event.key())
        q.is_keys_only()
        if status == "open":
            logging.debug("status == open")
            q.filter("status >= ", "Open")
        elif status == "closed":
            q.filter("status < ", "Open")
            logging.debug("status == closed")
        logging.debug("status = " + status)
            
        #query = q.fetch(PAGE_OFFSET, offset = page_int * PAGE_OFFSET)
        #for q in query:
            #ids.append(q.key().id())
            
        this_offset = page_int * PAGE_OFFSET
        logging.debug("this_offset = " + str(this_offset))
            
        ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset = this_offset)]
        logging.debug("ids len = " + str(len(ids)))
           
        output = json.dumps(
            [s[1] for s in site_db.GetAllCached(event, ids)],
            default=dthandler)
        logging.info("after output")
        self.response.out.write(output)
        return
        
        
    #if id_param == "all":
      #county = self.request.get("county", default_value = "all")
      #status = self.request.get("status", default_value = "")
      #q = Query(model_class = site_db.Site, keys_only = True)
      
      ##filter by event
      #q.filter("event =", event.key())
      #if status == "open":
        #q.filter("status >= ", "Open")
      #elif status == "closed":
        #q.filter("status < ", "Open")
      #if county != "all":
        #q.filter("county =", county)

      #ids = [key.id() for key in q.run(batch_size = 2000)]
      #output = json.dumps(
        #[s[1] for s in site_db.GetAllCached(event, ids)],
        #default=dthandler)
      #self.response.out.write(output)
      #return
    try:
      id = int(id_param)
    except:
      self.response.set_status(404)
      return
    site = site_db.GetAndCache(id)
    if not site:
      self.response.set_status(404)
      return
    # TODO(jeremy): Add the various fixes for Flash
    # and other vulnerabilities caused by having user-generated
    # content in JSON strings, by setting this as an attachment
    # and prepending the proper garbage strings.
    # Javascript security is really a pain.
    self.response.out.write(
        json.dumps(site_db.SiteToDict(site), default = dthandler))
コード例 #47
0
    def AuthenticatedGet(self, org, event):
        id_param = self.request.get("id")
        latitude_param = self.request.get("latitude")
        longitude_param = self.request.get("longitude")

        if latitude_param and longitude_param:
            try:
                latitude = float(latitude_param)
                longitude = float(longitude_param)
            except:
                self.response.set_status(404)
            json_array = []
            for site in site_db.Site.gql(
                "Where latitude = :1 and longitude = :2 and event = :3", latitude, longitude, event.key()
            ):
                json_string = json.dumps({"id": site.key().id(), "address": site.address})
                json_array.append(json_string)
            self.response.out.write(json.dumps(json_array, default=dthandler))
            return

        if id_param == "all":
            status = self.request.get("status", default_value="")
            page = self.request.get("page", default_value="0")
            page_int = int(page)
            logging.debug("page = " + page)

            # query_string = "SELECT * FROM Site WHERE event = :event_key LIMIT %s OFFSET %s" % (PAGE_OFFSET, page_int * PAGE_OFFSET)
            ##logging.debug("OFFSET = " + PAGE_OFFSET)
            ##logging.debug("page * OFFSET = " + page_int * PAGE_OFFSET)

            # query = db.GqlQuery(query_string, event_key = event.key())
            q = Query(model_class=site_db.Site)

            ids = []
            # filter by event
            q.filter("event =", event.key())
            q.is_keys_only()
            if status == "open":
                logging.debug("status == open")
                q.filter("status >= ", "Open")
            elif status == "closed":
                q.filter("status < ", "Open")
                logging.debug("status == closed")
            logging.debug("status = " + status)

            # query = q.fetch(PAGE_OFFSET, offset = page_int * PAGE_OFFSET)
            # for q in query:
            # ids.append(q.key().id())

            this_offset = page_int * PAGE_OFFSET
            logging.debug("this_offset = " + str(this_offset))

            ids = [key.key().id() for key in q.fetch(PAGE_OFFSET, offset=this_offset)]
            logging.debug("ids len = " + str(len(ids)))

            output = json.dumps([s[1] for s in site_db.GetAllCached(event, ids)], default=dthandler)
            self.response.out.write(output)
            return

        # if id_param == "all":
        # county = self.request.get("county", default_value = "all")
        # status = self.request.get("status", default_value = "")
        # q = Query(model_class = site_db.Site, keys_only = True)

        ##filter by event
        # q.filter("event =", event.key())
        # if status == "open":
        # q.filter("status >= ", "Open")
        # elif status == "closed":
        # q.filter("status < ", "Open")
        # if county != "all":
        # q.filter("county =", county)

        # ids = [key.id() for key in q.run(batch_size = 2000)]
        # output = json.dumps(
        # [s[1] for s in site_db.GetAllCached(event, ids)],
        # default=dthandler)
        # self.response.out.write(output)
        # return
        try:
            id = int(id_param)
        except:
            self.response.set_status(404)
            return
        site = site_db.GetAndCache(id)
        if not site:
            self.response.set_status(404)
            return
        # TODO(jeremy): Add the various fixes for Flash
        # and other vulnerabilities caused by having user-generated
        # content in JSON strings, by setting this as an attachment
        # and prepending the proper garbage strings.
        # Javascript security is really a pain.
        self.response.out.write(json.dumps(site_db.SiteToDict(site), default=dthandler))
コード例 #48
0
def allCourierIds():
    q = Query(Courier, projection=["courierId"])
    ids = []
    for c in q:
        ids.append(c.courierId)
    return ids
コード例 #49
0
def unavailableCouriers():
    unavailId = couriersIdEnRoute()
    q = Query(Courier)
    q.filter("courierId in ", unavailId)
    return q
コード例 #50
0
ファイル: assign.py プロジェクト: mingxiao/ebay-now-trial
def unavailableCouriers():
    unavailId = couriersIdEnRoute()
    q = Query(Courier)
    q.filter("courierId in ", unavailId)
    return q