Exemple #1
0
def get_rpc():
    """Returns an RPC object tuned for queries that don't have to be 100%
    accurate. Queries using this RPC object may return up to a couple of
    seconds out-of-date data.

    """
    return db.create_rpc(deadline=5, read_policy=db.EVENTUAL_CONSISTENCY)
Exemple #2
0
def _fetch_latest_from_datastore(app_version):
  """Get the latest configuration data for this app-version from the datastore.

  Args:
    app_version: the major version you want configuration data for.

  Side Effects:
    We populate memcache with whatever we find in the datastore.

  Returns:
    A config class instance for most recently set options or None if the
    query could not complete due to a datastore exception.
  """





  rpc = db.create_rpc(deadline=DATASTORE_DEADLINE,
                      read_policy=db.EVENTUAL_CONSISTENCY)
  key = _get_active_config_key(app_version)
  config = None
  try:
    config = Config.get(key, rpc=rpc)
    logging.debug('Loaded most recent conf data from datastore.')
  except:
    logging.warning('Tried but failed to fetch latest conf data from the '
                    'datastore.')

  if config:
    memcache.set(app_version, db.model_to_protobuf(config).Encode(),
                 namespace=NAMESPACE)
    logging.debug('Wrote most recent conf data into memcache.')

  return config
Exemple #3
0
def _fetch_latest_from_datastore(app_version):
    """Get the latest configuration data for this app-version from the datastore.

  Args:
    app_version: the major version you want configuration data for.

  Side Effects:
    We populate memcache with whatever we find in the datastore.

  Returns:
    A config class instance for most recently set options or None if the
    query could not complete due to a datastore exception.
  """

    rpc = db.create_rpc(deadline=DATASTORE_DEADLINE,
                        read_policy=db.EVENTUAL_CONSISTENCY)
    key = _get_active_config_key(app_version)
    config = None
    try:
        config = Config.get(key, rpc=rpc)
        logging.debug('Loaded most recent conf data from datastore.')
    except:
        logging.warning('Tried but failed to fetch latest conf data from the '
                        'datastore.')

    if config:
        memcache.set(app_version,
                     db.model_to_protobuf(config).Encode(),
                     namespace=NAMESPACE)
        logging.debug('Wrote most recent conf data into memcache.')

    return config
Exemple #4
0
def get_rpc():
    """Returns an RPC object tuned for queries that don't have to be 100%
    accurate. Queries using this RPC object may return up to a couple of
    seconds out-of-date data.

    """
    return db.create_rpc(deadline=5, read_policy=db.EVENTUAL_CONSISTENCY)
Exemple #5
0
 def post(self):
   step = self.request.get('step',"")
   phone = self.request.get('phone',"")
   self_recorded = self.request.get('self_recorded',"")
   if (not phone) and (not step):
     return self.response.out.write('no phone given')
   
   if (step):
     access_key = self.request.get('access_key')
     secret = self.request.get('secret')
     ds_entry = ValidationRequest.get_by_key_name(access_key)
     if (ds_entry.secret == int(secret)):
       return self.render_response('validation-demo-right.html')
     else:
       return self.render_response('validation-demo-wrong.html')
   else:
     ip = self.request.remote_addr
     ds_existing = DemoClient.get_by_key_name(ip)
     if not ds_existing:
       DemoClient(key_name=ip, times=1).put(rpc=db.create_rpc())
     else:
       if (ds_existing.times > 4):
         return self.redirect('/validator/demo?step=fail')
       ds_existing.times = ds_existing.times + 1
       ds_existing.put(rpc=db.create_rpc())
     target = 'tel:'+phone
     secret = tropo.generate_secret()
     access_key = tropo.generate_key()
     call_context = {
       'to':target,
       'intro':'This is our service demo! Your secret code is :',
       'secret':secret,
       'access_key':access_key,
       'self_recorded':self_recorded,
       'callback_host':CALLBACK_HOST
     }
     fetch_rpc = tropo.tropo_run_script(call_context, async=True)
     rpc = db.create_rpc()
     validation_entry = ValidationRequest(
           key_name = access_key,
           target = target,
           api_key = tropo.DEMO_API_KEY,
           secret = int(secret)).put(rpc=rpc)
     rpc.wait()
     fetch_rpc.wait()
     
   return self.redirect('/validator/demo?step=2&access_key='+access_key)
Exemple #6
0
 def delete_all(cls):
     N = 40
     rpc = db.create_rpc(deadline=30, read_policy=db.EVENTUAL_CONSISTENCY)
     q = db.GqlQuery("SELECT __key__ FROM ApartmentCounts", rpc=rpc)
     for i in range(25):
         results = q.fetch(N)
         c = len(results)
         db.delete(results)
         if c < N:
             break
Exemple #7
0
 def delete_all(cls):
     N = 40
     rpc = db.create_rpc(deadline=30, read_policy=db.EVENTUAL_CONSISTENCY)
     q = db.GqlQuery("SELECT __key__ FROM ApartmentCounts",
                     rpc=rpc)
     for i in range(25):
         results = q.fetch(N)
         c = len(results)
         db.delete(results)
         if c < N:
             break
Exemple #8
0
    def data_put(models):

        _rpc = db.create_rpc(deadline=5, read_policy=db.EVENTUAL_CONSISTENCY)
    
        if isinstance(models, str):
            models = [models]
        
        puts_list = []
        for model in models:
            entity, natural = DataController.generateNaturalKind(model)
            puts_list.append(entity)
            if natural is not None: puts_list.append(natural)

        return fn(models, rpc=_rpc)
Exemple #9
0
 def flush(list_of_pending_albums):
     if not list_of_pending_albums:
         return
     idx = search.Indexer()
     for alb in list_of_pending_albums:
         process_one_album(idx, alb)
     # This runs as a batch job, so set a very long deadline.
     while True:
         try:
             rpc = db.create_rpc(deadline=120)
             idx.save(rpc=rpc)
             return
         except urllib2.URLError:
             #print "Retrying indexer flush"
             pass
Exemple #10
0
def count_apts(request):
    rpc = db.create_rpc(deadline=18, read_policy=db.EVENTUAL_CONSISTENCY)
    q = db.GqlQuery("SELECT __key__ FROM Apartment", rpc=rpc)
    N = 1000
    results = q.fetch(N)
    count = 0
    errors = ''
    try:
        while True:
            count += len(results)
            results = q.fetch(N)
            if len(results) < N:
                break
    except Exception, e:
        errors = str(e)
Exemple #11
0
 def flush(list_of_pending_albums):
     if not list_of_pending_albums:
         return
     idx = search.Indexer()
     for alb in list_of_pending_albums:
         process_one_album(idx, alb)
     # This runs as a batch job, so set a very long deadline.
     while True:
         try:
             rpc = db.create_rpc(deadline=120)
             idx.save(rpc=rpc)
             return
         except urllib2.URLError:
             #print "Retrying indexer flush"
             pass
Exemple #12
0
def apartment_is_found(cgid):
    rpc = db.create_rpc(deadline=18, read_policy=db.EVENTUAL_CONSISTENCY)
    ckey = "apt_f_%s" % cgid
    TIME = 3600 * 5
    r = memcache.get(ckey)
    if r is not None:
        return True
    apt = db.GqlQuery("SELECT * FROM Apartment "
                      "WHERE id = :1 LIMIT 1", cgid,
                      rpc=rpc).get()
    result = False
    if apt:
        if apt.location or apt.location_accuracy != "TRY_AGAIN":
            result = True

    memcache.set(ckey, result, TIME)
    return result
Exemple #13
0
def apartment_is_found(cgid):
    rpc = db.create_rpc(deadline=18, read_policy=db.EVENTUAL_CONSISTENCY)
    ckey = "apt_f_%s" % cgid
    TIME = 3600 * 5
    r = memcache.get(ckey)
    if r is not None:
        return True
    apt = db.GqlQuery("SELECT * FROM Apartment "
                      "WHERE id = :1 LIMIT 1",
                      cgid,
                      rpc=rpc).get()
    result = False
    if apt:
        if apt.location or apt.location_accuracy != "TRY_AGAIN":
            result = True

    memcache.set(ckey, result, TIME)
    return result
Exemple #14
0
    def get(self):
      target = self.request.get('target')
      api_key = self.request.get('api_key')
      secret = self.request.get('secret')
      intro = self.request.get('intro')
      if (not target) or (not api_key):
        return self.response.out.write('target and service key is required params.')

      method = re.match('(\\w+):', target)
      if (method is None):
        return self.response.out.write('target '+target+' not valid')
      
      ds_user_entry = ServiceUser.get_by_key_name(api_key)
      if (not ds_user_entry):
        return self.response.out.write('api key ' + api_key + ' does not exist')
      
      
      method = method.group(1)
      access_key = tropo.generate_key()
      if (method == 'tel' or 'sip'):
        if not secret:
          secret = tropo.generate_secret() 
        if not intro:
          intro = "Hello! Your secret code is :"
        context = {
          'to':target,
          'secret':secret,
          'intro':intro,
          'access_key':access_key
        }
        http_rpc = tropo.tropo_run_script(context, async=True)
        ds_rpc = db.create_rpc()
        ValidationRequest(
          target = target,
          api_key = api_key,
          key_name = access_key,
          secret = int(secret)
        ).put(rpc=ds_rpc)
        resp = json.dumps({'access_key':access_key,'secret':secret})
        http_rpc.wait()
        ds_rpc.wait()
        return self.response.out.write(resp)
      return self.response.out.write('method not supported.')
Exemple #15
0
 def delete_some(cls, num=10, N=30):
     read_back = 3
     rpc = db.create_rpc(deadline=30, read_policy=db.EVENTUAL_CONSISTENCY)
     q = db.GqlQuery("SELECT __key__ FROM Apartment WHERE updated < :1",
                     datetime.datetime.now() - datetime.timedelta(days=read_back),
                     rpc=rpc)
     results = q.fetch(N)
     quotient, mod = divmod(num, N)
     quotient += 1
     deleted = 0
     for i in range(quotient):
         results = q.fetch(N)
         c = len(results)
         if i == (quotient - 1):
             results = results[:mod]
         deleted += len(results)
         db.delete(results)
         if c < N:
             break
     return deleted
Exemple #16
0
 def delete_some(cls, num=10, N=30):
     read_back = 3
     rpc = db.create_rpc(deadline=30, read_policy=db.EVENTUAL_CONSISTENCY)
     q = db.GqlQuery("SELECT __key__ FROM Apartment WHERE updated < :1",
                     datetime.datetime.now() -
                     datetime.timedelta(days=read_back),
                     rpc=rpc)
     results = q.fetch(N)
     quotient, mod = divmod(num, N)
     quotient += 1
     deleted = 0
     for i in range(quotient):
         results = q.fetch(N)
         c = len(results)
         if i == (quotient - 1):
             results = results[:mod]
         deleted += len(results)
         db.delete(results)
         if c < N:
             break
     return deleted
Exemple #17
0
 def save(self, callback):
     rpc = db.create_rpc(deadline=5, callback=callback)
     self.put(rpc)
Exemple #18
0
 def save(self, callback):
     rpc = db.create_rpc(deadline=5, callback=callback)
     self.put(rpc)
 def get_current_pictures(self):
     # 取得する数が不定なので、rpcオブジェクトを使用する
     # See: https://developers.google.com/appengine/docs/python/datastore/queryclass?hl=ja#Query_fetch
     rpc = db.create_rpc(deadline=10, read_policy=db.EVENTUAL_CONSISTENCY)
     return Picture.all().run(rpc=rpc)
Exemple #20
0
def get_matched_apartments(aptf, updated_since=YESTERDAY):
    rpc = db.create_rpc(deadline=18, read_policy=db.EVENTUAL_CONSISTENCY)
    q = Apartment.all()

    filter_strings = []

    if not aptf.region:
        region = 'BOSTON'
    else:
        region = aptf.region

    q.filter("region =", region)
    filter_strings.append(("region =", region))

    d = datetime.datetime.now()
    dates = []
    while d > updated_since:
        dates.append(int(d.strftime("%Y%m%d")))
        d -= datetime.timedelta(days=1)


    if len(dates) <= 5:
        q.filter("updated_day IN", dates)
        filter_strings.append(("updated_day IN", dates))


    breakdown_data = ApartmentCounts.get_data_dict()

    filter_estimates = []
    for analyzer in analyzers:
        val = analyzer.filter_size_estimate(breakdown_data, aptf)
        if val != NoFiltering:
            filter_estimates.append((val, analyzer))

    filter_estimates.sort()

    i = 0

    applied = []

    for score, analyzer in filter_estimates:
        analyzer.update_apt_query(aptf, q, filter_strings)
        applied.append(analyzer)
        i += 1
        if i > 2:
            break

    results = []
    total_scanned = 0

    logging.info(debug_print_info(filter_strings))

    while True:
        curresults = q.fetch(750, rpc=rpc)
        for apartment in curresults:
            if apartment.updated < updated_since:
                continue
            scores = {}
            total = 1
            for analyzer in analyzers:
                score = analyzer.create_weight(aptf, apartment)
                scores[analyzer.__class__.__name__] = score
                total *= score
            if total > 0.1:
                results.append((total, scores, apartment))
        total_scanned += len(curresults)
        #if len(curresults) < 1000 or total_scanned > 3000:
        break
        #rpc = db.create_rpc(deadline=18, read_policy=db.EVENTUAL_CONSISTENCY)
    return (results, total_scanned, debug_print_info(filter_strings))
Exemple #21
0
def get_matched_apartments(aptf, updated_since=YESTERDAY):
    rpc = db.create_rpc(deadline=18, read_policy=db.EVENTUAL_CONSISTENCY)
    q = Apartment.all()

    filter_strings = []

    if not aptf.region:
        region = 'BOSTON'
    else:
        region = aptf.region

    q.filter("region =", region)
    filter_strings.append(("region =", region))

    d = datetime.datetime.now()
    dates = []
    while d > updated_since:
        dates.append(int(d.strftime("%Y%m%d")))
        d -= datetime.timedelta(days=1)

    if len(dates) <= 5:
        q.filter("updated_day IN", dates)
        filter_strings.append(("updated_day IN", dates))

    breakdown_data = ApartmentCounts.get_data_dict()

    filter_estimates = []
    for analyzer in analyzers:
        val = analyzer.filter_size_estimate(breakdown_data, aptf)
        if val != NoFiltering:
            filter_estimates.append((val, analyzer))

    filter_estimates.sort()

    i = 0

    applied = []

    for score, analyzer in filter_estimates:
        analyzer.update_apt_query(aptf, q, filter_strings)
        applied.append(analyzer)
        i += 1
        if i > 2:
            break

    results = []
    total_scanned = 0

    logging.info(debug_print_info(filter_strings))

    while True:
        curresults = q.fetch(750, rpc=rpc)
        for apartment in curresults:
            if apartment.updated < updated_since:
                continue
            scores = {}
            total = 1
            for analyzer in analyzers:
                score = analyzer.create_weight(aptf, apartment)
                scores[analyzer.__class__.__name__] = score
                total *= score
            if total > 0.1:
                results.append((total, scores, apartment))
        total_scanned += len(curresults)
        #if len(curresults) < 1000 or total_scanned > 3000:
        break
        #rpc = db.create_rpc(deadline=18, read_policy=db.EVENTUAL_CONSISTENCY)
    return (results, total_scanned, debug_print_info(filter_strings))