示例#1
0
def GetImageRecords(photo_ids):
  """Queries the ImageRecord db, w/ memcaching. Returns photo_id -> rec dict"""
  # check if we've got the whole thing in memcache
  multi_key = VERSION + 'MIR' + ','.join(photo_ids)
  recs = memcache.get(multi_key)
  if recs: return recs

  keys_no_prefix = photo_ids[:]
  key_prefix = VERSION + 'IR'

  record_map = memcache.get_multi(keys_no_prefix, key_prefix=key_prefix)
  missing_ids = list(set(keys_no_prefix) - set(record_map.keys()))
  if not missing_ids: return record_map

  config = db.create_config(read_policy=db.EVENTUAL_CONSISTENCY)
  db_recs = ImageRecord.get_by_key_name(missing_ids, config=config)

  memcache_map = {}
  for id, r in zip(missing_ids, db_recs):
    record_map[id] = r
    memcache_map[id] = r

  if memcache_map:
    memcache.add_multi(memcache_map, key_prefix=key_prefix)
    memcache.add(multi_key, record_map)
  return record_map
示例#2
0
def get_data_from_cache(entity, key_prefix):
    tk = []
    for k in entity:
        tk.append(str(k.urlsafe()))

    logging.info(tk)

    # get memcache: results
    cache_results = memcache.get_multi(
        keys=tk, key_prefix=key_prefix)

    result = []
    memcache_to_add = {}
    for wanted in tk:
        if not any(r == wanted for r in cache_results):
            # 沒有 cache
            logging.info('not found')
            memcache_to_add[wanted] = ndb.Key(urlsafe=wanted).get()
            result.append(memcache_to_add[wanted])
        else:
            # 有
            logging.info('found')
            result.append(cache_results[wanted])

    logging.info('memcache(s) to add: ' + str(len(memcache_to_add)))

    if len(memcache_to_add) > 0:
        memcache.add_multi(
            memcache_to_add, key_prefix=key_prefix, time=3600)

    return result
示例#3
0
 def parse_links(self):
     if not self.parse_done:
         url = "http://lynxjuan.com/"+str(self.year)+"/"+str(self.month)+"/"+str(self.day)
         website = urlfetch.fetch(url)
         page = str(website.content)
         tree = html.fromstring(page)
         links = tree.findall(".//a[@rel='bookmark']")
         link_dict = {}
         for link in links:
             draw_url = link.get('href')
             if '55' in draw_url:
                 link_dict['55'] = draw_url
             elif '49' in draw_url:
                 link_dict['49'] = draw_url
             elif '45' in draw_url:
                 link_dict['45'] = draw_url
             elif '42' in draw_url:
                 link_dict['42'] = draw_url
             elif ('6d' in draw_url) or ('six' in draw_url):
                 link_dict['6d'] = draw_url
             elif '4d' in draw_url:
                 link_dict['4d'] = draw_url
             elif ('swertres' in draw_url) or ('3d' in draw_url) or ('suertres' in draw_url):
                 link_dict['3d'] = draw_url
             elif ('ez2' in draw_url) or ('2d' in draw_url):
                 link_dict['2d'] = draw_url
         self.parse_done = True
         memcache.add(key='keys', value=link_dict.keys(), time=3600)
         memcache.add_multi(link_dict, key_prefix='link_dict', time=3600)
         return link_dict
示例#4
0
    def get(self):
        user = users.get_current_user()
        context = {
            'login_url': users.create_login_url('/'),
            'logout_url': users.create_logout_url('/'),
            'is_logged_in': user is not None,
            'is_admin': users.is_current_user_admin(),
        }

        result = memcache.get_multi(['proxies', 'moderated_proxies'])

        if 'proxies' in result:
            logging.info('proxies cache hit')
            context['proxies'] = result['proxies']
        else:
            logging.info('proxies cache miss')
            context['proxies'] = Proxy.all().filter('approved =', True).order('name')

        if 'moderated_proxies' in result:
            logging.info('moderated proxies cache hit')
            context['moderated_proxies'] = result['moderated_proxies']
        else:
            logging.info('moderated proxies cache miss')
            context['moderated_proxies'] = Proxy.all().filter('approved =', False).order('name')

        memcache.add_multi({
            'proxies': context['proxies'],
            'moderated_proxies': context['moderated_proxies'],
        })

        self.render_response('index.html', context)
示例#5
0
    def _Regenerate(self):
        """Regenerates the rule context."""
        context = self._BuildContext()

        if self.online and not IsPredictionMode():
            # We do not want to count ourself twice.
            # We know that the UserRegistration entry that triggered the call is
            # already stored in datastore when we reach this code - except if we
            # are predicting outcome.
            for key, value in context.iteritems():
                context[key] = value - 1
                assert value > 0

        # The following code adds the key/values only if not present and then
        # sets an __init__ flag which contains an enumeration of the keys.
        # Since the memcache.add() calls are not transactional, it is possible
        # that __init__ key can be set but still the values associated with the keys
        # would not be present (for example cache gets flushed between add and
        # add_multi call. This is a very remote/rare case though, and this situation
        # still needs to be addressed in the _Incr / _Decr method anyway since keys
        # can get evicted from the cache at any time.

        # We add the values if and only they are not present.
        memcache.add_multi(context, namespace=self.namespace)
        # The __init__ contains a list of available keys for this context
        memcache.add("__init__", context.keys(), namespace=self.namespace)
示例#6
0
def GetImageRecords(photo_ids):
    """Queries the ImageRecord db, w/ memcaching. Returns photo_id -> rec dict"""
    # check if we've got the whole thing in memcache
    keys_no_prefix = photo_ids[:]
    if MEMCACHE_ENABLED:
        multi_key = VERSION + 'MIR' + ','.join(photo_ids)
        recs = memcache.get(multi_key)
        if recs: return recs

        key_prefix = VERSION + 'IR'

        record_map = memcache.get_multi(keys_no_prefix, key_prefix=key_prefix)
        missing_ids = list(set(keys_no_prefix) - set(record_map.keys()))
        if not missing_ids: return record_map
    else:
        missing_ids = keys_no_prefix
        record_map = {}

    config = db.create_config(read_policy=db.EVENTUAL_CONSISTENCY)
    db_recs = ImageRecord.get_by_key_name(missing_ids, config=config)

    memcache_map = {}
    for id, r in zip(missing_ids, db_recs):
        record_map[id] = r
        memcache_map[id] = r

    if MEMCACHE_ENABLED and memcache_map:
        memcache.add_multi(memcache_map, key_prefix=key_prefix)
        memcache.add(multi_key, record_map)
    return record_map
示例#7
0
 def add_default_entities_to_memcache(cls, ids):
   """Add blank entities to memcache so get_by_ids quickly returns them."""
   entities = {}
   for key in ids:
     entities[key] = cls(key_name= cls.DATASTORE_PREFIX + key)
   memcache.add_multi(entities, time=cls.MEMCACHE_TIME,
                      key_prefix=cls.MEMCACHE_PREFIX)
示例#8
0
def prefetch_posts_list(posts):
    prefetch_refprops(posts, Post.user)
    posts_keys = [str(post.key()) for post in posts]

    # get user, if no user, all already_voted = no
    session = get_current_session()
    if session.has_key('user'):
        user = session['user']
        memcache_voted_keys = [
            "vp_" + post_key + "_" + str(user.key()) for post_key in posts_keys
        ]
        memcache_voted = memcache.get_multi(memcache_voted_keys)
        memcache_to_add = {}
        for post in posts:
            logging.info("Got a post")
            vote_value = memcache_voted.get("vp_" + str(post.key()) + "_" +
                                            str(user.key()))
            if vote_value is not None:
                post.prefetched_already_voted = vote_value == 1
            else:
                vote = Vote.all().filter("user ="******"post =", post).fetch(1)
                memcache_to_add["vp_" + str(post.key()) + "_" +
                                str(user.key())] = len(vote)
                post.prefetched_already_voted = len(vote) == 1
        if memcache_to_add.keys():
            memcache.add_multi(memcache_to_add, 3600)
    else:
        for post in posts:
            post.prefetched_already_voted = False
示例#9
0
def prefetch_posts_list(posts):
  prefetch_refprops(posts, Post.user)
  posts_keys = [str(post.key()) for post in posts]

  # get user, if no user, all already_voted = no
  session = get_current_session()
  if session.has_key('user'): 
    user = session['user']
    memcache_voted_keys = ["vp_" + post_key + "_" + str(user.key()) for post_key in posts_keys]
    memcache_voted = memcache.get_multi(memcache_voted_keys)
    memcache_to_add = {}
    for post in posts:
      logging.info("Got a post")
      vote_value = memcache_voted.get("vp_" + str(post.key()) + "_" +str(user.key()))
      if vote_value is not None:
        post.prefetched_already_voted = vote_value == 1
      else:
        vote = Vote.all().filter("user ="******"post =", post).fetch(1) 
        memcache_to_add["vp_" + str(post.key()) + "_" + str(user.key())] = len(vote)
        post.prefetched_already_voted = len(vote) == 1
    if memcache_to_add.keys():
      memcache.add_multi(memcache_to_add, 3600)
  else:
    for post in posts:
      post.prefetched_already_voted = False
示例#10
0
    def _Regenerate(self):
        """Regenerates the rule context."""
        context = self._BuildContext()

        if self.online and not IsPredictionMode():
            # We do not want to count ourself twice.
            # We know that the UserRegistration entry that triggered the call is
            # already stored in datastore when we reach this code - except if we
            # are predicting outcome.
            for key, value in context.iteritems():
                context[key] = value - 1
                assert value > 0

        # The following code adds the key/values only if not present and then
        # sets an __init__ flag which contains an enumeration of the keys.
        # Since the memcache.add() calls are not transactional, it is possible
        # that __init__ key can be set but still the values associated with the keys
        # would not be present (for example cache gets flushed between add and
        # add_multi call. This is a very remote/rare case though, and this situation
        # still needs to be addressed in the _Incr / _Decr method anyway since keys
        # can get evicted from the cache at any time.

        # We add the values if and only they are not present.
        memcache.add_multi(context, namespace=self.namespace)
        # The __init__ contains a list of available keys for this context
        memcache.add('__init__', context.keys(), namespace=self.namespace)
示例#11
0
 def _get_tasklet(self, todo):
   assert todo
   # First check memcache.
   memkeymap = {}
   for fut, key, options in todo:
     if self._use_memcache(key, options):
       memkeymap[key] = key.urlsafe()
   if memkeymap:
     results = memcache.get_multi(memkeymap.values(),
                                  key_prefix=self._memcache_prefix)
     leftover = []
     for fut, key, options in todo:
       mkey = memkeymap.get(key)
       if mkey is not None and mkey in results:
         pb = results[mkey]
         ent = self._conn.adapter.pb_to_entity(pb)
         fut.set_result(ent)
       else:
         leftover.append((fut, key, options))
     todo = leftover
   # Segregate things by ConfigOptions.
   by_options = {}
   for fut, key, options in todo:
     if options in by_options:
       futures, keys = by_options[options]
     else:
       futures, keys = by_options[options] = [], []
     futures.append(fut)
     keys.append(key)
   # Make the RPC calls.
   mappings = {}  # Maps timeout value to {urlsafe_key: pb} mapping.
   for options, (futures, keys) in by_options.iteritems():
     datastore_futures = []
     datastore_keys = []
     for fut, key in zip(futures, keys):
       if self._use_datastore(key, options):
         datastore_keys.append(key)
         datastore_futures.append(fut)
       else:
         fut.set_result(None)
     if datastore_keys:
       entities = yield self._conn.async_get(options, datastore_keys)
       for ent, fut, key in zip(entities, datastore_futures, datastore_keys):
         fut.set_result(ent)
         if ent is not None and self._use_memcache(key, options):
           pb = self._conn.adapter.entity_to_pb(ent)
           timeout = self._get_memcache_timeout(key, options)
           mapping = mappings.get(timeout)
           if mapping is None:
             mapping = mappings[timeout] = {}
           mapping[ent._key.urlsafe()] = pb
   if mappings:
     # If the timeouts are not uniform, make a separate call for each
     # distinct timeout value.
     for timeout, mapping in mappings.iteritems():
       # Use add, not set.  This is a no-op within _LOCK_TIME seconds
       # of the delete done by the most recent write.
       memcache.add_multi(mapping, time=timeout,
                          key_prefix=self._memcache_prefix)
示例#12
0
 def _get_tasklet(self, todo):
   assert todo
   # First check memcache.
   memkeymap = {}
   for fut, key, options in todo:
     if self._use_memcache(key, options):
       memkeymap[key] = key.urlsafe()
   if memkeymap:
     results = memcache.get_multi(memkeymap.values(),
                                  key_prefix=self._memcache_prefix)
     leftover = []
     for fut, key, options in todo:
       mkey = memkeymap.get(key)
       if mkey is not None and mkey in results:
         pb = results[mkey]
         ent = self._conn.adapter.pb_to_entity(pb)
         fut.set_result(ent)
       else:
         leftover.append((fut, key, options))
     todo = leftover
   # Segregate things by ConfigOptions.
   by_options = {}
   for fut, key, options in todo:
     if options in by_options:
       futures, keys = by_options[options]
     else:
       futures, keys = by_options[options] = [], []
     futures.append(fut)
     keys.append(key)
   # Make the RPC calls.
   mappings = {}  # Maps timeout value to {urlsafe_key: pb} mapping.
   for options, (futures, keys) in by_options.iteritems():
     datastore_futures = []
     datastore_keys = []
     for fut, key in zip(futures, keys):
       if self._use_datastore(key, options):
         datastore_keys.append(key)
         datastore_futures.append(fut)
       else:
         fut.set_result(None)
     if datastore_keys:
       entities = yield self._conn.async_get(options, datastore_keys)
       for ent, fut, key in zip(entities, datastore_futures, datastore_keys):
         fut.set_result(ent)
         if ent is not None and self._use_memcache(key, options):
           pb = self._conn.adapter.entity_to_pb(ent)
           timeout = self._get_memcache_timeout(key, options)
           mapping = mappings.get(timeout)
           if mapping is None:
             mapping = mappings[timeout] = {}
           mapping[ent._key.urlsafe()] = pb
   if mappings:
     # If the timeouts are not uniform, make a separate call for each
     # distinct timeout value.
     for timeout, mapping in mappings.iteritems():
       # Use add, not set.  This is a no-op within _LOCK_TIME seconds
       # of the delete done by the most recent write.
       memcache.add_multi(mapping, time=timeout,
                          key_prefix=self._memcache_prefix)
示例#13
0
 def add_default_entities_to_memcache(cls, ids):
     """Add blank entities to memcache so get_by_ids quickly returns them."""
     entities = {}
     for key in ids:
         entities[key] = cls(key_name=cls.DATASTORE_PREFIX + key)
     memcache.add_multi(entities,
                        time=cls.MEMCACHE_TIME,
                        key_prefix=cls.MEMCACHE_PREFIX)
示例#14
0
 def testCheckStart_Rejected(self):
     now = 0.0
     keysets = ratelimiter._CreateApiCacheKeys(self.client_id,
                                               self.client_email, now)
     values = [{key: ratelimiter.DEFAULT_API_QPM + 1
                for key in keyset} for keyset in keysets]
     for value in values:
         memcache.add_multi(value)
     with self.assertRaises(ratelimiter.ApiRateLimitExceeded):
         self.ratelimiter.CheckStart(self.client_id, self.client_email, now)
示例#15
0
    def testMulti(self):
        """Stores multiple keys' values at once."""

        memcache.set_multi({'map_key_one': 1, 'map_key_two': u'some value'})
        values = memcache.get_multi(['map_key_one', 'map_key_two'])
        assert {'map_key_one': 1, 'map_key_two': u'some value'} == values

        memcache.add_multi(
            {'map_key_one': 'one', 'map_key_two': 2, 'three': u'trois'})
        values = memcache.get_multi(['map_key_two', 'three'])
        assert {'map_key_two': u'some value', 'three': u'trois'} == values
示例#16
0
 def testCheckStart_fail(self):
     request, _ = testing_helpers.GetRequestObjects(project=self.project)
     request.headers['X-AppEngine-Country'] = 'US'
     request.remote_addr = '192.168.1.0'
     now = 0.0
     cachekeysets, _, _, _ = ratelimiter._CacheKeys(request, now)
     values = [{key: ratelimiter.DEFAULT_LIMIT
                for key in cachekeys} for cachekeys in cachekeysets]
     for value in values:
         memcache.add_multi(value)
     with self.assertRaises(ratelimiter.RateLimitExceeded):
         self.ratelimiter.CheckStart(request, now)
示例#17
0
    def testCheckStart_expiredEntries(self):
        request, _ = testing_helpers.GetRequestObjects(project=self.project)
        request.headers['X-AppEngine-Country'] = 'US'
        request.remote_addr = '192.168.1.0'
        now = 0.0
        cachekeysets, _, _, _ = ratelimiter._CacheKeys(request, now)
        values = [{key: ratelimiter.DEFAULT_LIMIT
                   for key in cachekeys} for cachekeys in cachekeysets]
        for value in values:
            memcache.add_multi(value)

        now = now + 2 * ratelimiter.EXPIRE_AFTER_SECS
        self.ratelimiter.CheckStart(request, now)
示例#18
0
 def testCheckStart_Allowed_LowQPMIgnored(self):
     """Client specifies a QPM lower than the default and default is used."""
     now = 0.0
     keysets = ratelimiter._CreateApiCacheKeys(self.client_id,
                                               self.client_email, now)
     qpm_dict = client_config_svc.GetQPMDict()
     qpm_dict[self.client_email] = ratelimiter.DEFAULT_API_QPM - 10
     values = [{key: ratelimiter.DEFAULT_API_QPM
                for key in keyset} for keyset in keysets]
     for value in values:
         memcache.add_multi(value)
     self.ratelimiter.CheckStart(self.client_id, self.client_email, now)
     del qpm_dict[self.client_email]
示例#19
0
    def testMulti(self):
        """Stores multiple keys' values at once."""

        memcache.set_multi({'map_key_one': 1, 'map_key_two': u'some value'})
        values = memcache.get_multi(['map_key_one', 'map_key_two'])
        assert {'map_key_one': 1, 'map_key_two': u'some value'} == values

        memcache.add_multi({
            'map_key_one': 'one',
            'map_key_two': 2,
            'three': u'trois'
        })
        values = memcache.get_multi(['map_key_two', 'three'])
        assert {'map_key_two': u'some value', 'three': u'trois'} == values
示例#20
0
 def testCheckStart_Allowed_HigherQPMSpecified(self):
     """Client goes over the default, but has a higher QPM set."""
     now = 0.0
     keysets = ratelimiter._CreateApiCacheKeys(self.client_id,
                                               self.client_email, now)
     qpm_dict = client_config_svc.GetQPMDict()
     qpm_dict[self.client_email] = ratelimiter.DEFAULT_API_QPM + 10
     # The client used 1 request more than the default limit in each of the
     # 5 minutes in our 5 minute sample window, so 5 over to the total.
     values = [{key: ratelimiter.DEFAULT_API_QPM + 1
                for key in keyset} for keyset in keysets]
     for value in values:
         memcache.add_multi(value)
     self.ratelimiter.CheckStart(self.client_id, self.client_email, now)
     del qpm_dict[self.client_email]
示例#21
0
def prefetch_comment_list(comments):
    prefetch_refprops(comments, Comment.user, Comment.post)

    # call all the memcache information
    # starting by the already_voted area
    comment_keys = [str(comment.key()) for comment in comments]
    session = get_current_session()
    if session.has_key('user'):
        user = session['user']
        memcache_voted_keys = [
            "cp_" + comment_key + "_" + str(user.key())
            for comment_key in comment_keys
        ]
        memcache_voted = memcache.get_multi(memcache_voted_keys)
        memcache_to_add = {}
        for comment in comments:
            vote_value = memcache_voted.get("cp_" + str(comment.key()) + "_" +
                                            str(user.key()))
            if vote_value is not None:
                comment.prefetched_already_voted = vote_value == 1
            else:
                vote = Vote.all().filter("user ="******"comment =",
                                                      comment).fetch(1)
                memcache_to_add["cp_" + str(comment.key()) + "_" +
                                str(user.key())] = len(vote)
                comment.prefetched_already_voted = len(vote) == 1
        if memcache_to_add.keys():
            memcache.add_multi(memcache_to_add, 3600)
    else:
        for comment in comments:
            comment.prefetched_already_voted = False
    # now the sum_votes
    memcache_sum_votes_keys = [
        "c_" + comment_key for comment_key in comment_keys
    ]
    memcache_sum_votes = memcache.get_multi(memcache_sum_votes_keys)
    memcache_to_add = {}
    for comment in comments:
        sum_votes_value = memcache_sum_votes.get("c_" + str(comment.key()))
        if sum_votes_value is not None:
            comment.prefetched_sum_votes = sum_votes_value
        else:
            sum_votes = Vote.all().filter("comment =", comment).count()
            memcache_to_add["c_" + str(comment.key())] = sum_votes
            comment.prefetched_sum_votes = sum_votes
    if memcache_to_add.keys():
        memcache.add_multi(memcache_to_add, 3600)
示例#22
0
    def testAddMulti(self):
        """Tests setting multiple keys when policy is ADD."""
        mapping1 = {
            self.key1: self.value1,
            self.key2: self.value2,
        }
        self.assertEqual([], memcache.add_multi(mapping1))

        mapping2 = {
            self.key2: self.value3,
            self.key3: self.value3,
        }

        self.assertEqual([self.key2], memcache.add_multi(mapping2))
        self.assertEqual(self.value1, memcache.get(self.key1))
        self.assertEqual(self.value2, memcache.get(self.key2))
        self.assertEqual(self.value3, memcache.get(self.key3))
示例#23
0
    def set(cls, key, data, ttl=0, maxsize=None):
        """ Public method that sets data into memcache and blobstore
            param @key is String
            param @data is String
            param @ttl is Integer (seconds)
            param @maxsize is Integer
        """ 

        if data is None:
            return

        cls.remove(key)

        data = str(data);

        if ttl and str(ttl).isdigit():
            ttl = int(ttl)
        else:
            ttl = 0    

        if not maxsize is None:
            cls.__memcache_block = maxsize

        ba = array("B",data)
        ln = len(ba)

        if ln>cls.__memcache_block:
            blocks = ln/cls.__memcache_block
            res = ln%cls.__memcache_block
            bls={}
            cont = 0
            while cont < blocks:
                bls[key+"_"+str(cont)] = (ba[cont*cls.__memcache_block:((cont+1)*(cls.__memcache_block))-1]).tostring()
                cls.__saveBlob(key+"_"+str(cont),bls[key+"_"+str(cont)],ttl*1)
                cont=cont+1
            
            if res>0:
                bls[key+"_"+str(cont)]=(ba[cont*cls.__memcache_block-1:(cont*cls.__memcache_block)+res+1]).tostring()
                cls.__saveBlob(key+"_"+str(cont),bls[key+"_"+str(cont)],ttl*1)
            
            memcache.delete_multi(bls)
            memcache.add_multi(bls, time=ttl)
        else:
            memcache.add(key, data, time=ttl)
            cls.__saveBlob(key, data, ttl*1)
示例#24
0
 def lookup_many_cached(cls, ids):
     keys = []
     for args in ids:
         keys.append( cls.key_name( *args ) )
     found = memcache.get_multi( keys )
     missing = []
     for key in keys:
         if key not in found:
             missing.append(key)
     logging.info("%d objects not cached - fetching"%( len(missing) ))
     fetched = cls.get_by_key_name(missing)
     added = {}
     for key, instance in zip( missing, fetched ):
         found[key] = instance
         added[key] = found[key]
     
     if added.keys():
         memcache.add_multi( added )
     return found
示例#25
0
文件: data.py 项目: tstone/Kolormodo
 def get_colorschemes(self, count, offset, sort, force_load=False):
     # Get the default front page query from cache (if present)
     if sort == '-published' and offset == 0 and count == 10 and not force_load:
         scheme_key = 'schemes_-published_0_10'
         total_key = 'total_-published_0_10'
         # Hit Memcache
         mc = memcache.get_multi([scheme_key, total_key])
         if scheme_key in mc and total_key in mc:
             schemes = mc[scheme_key]
             pagination = self.paginate(mc[total_key], 10, offset)
         else:
             # Cache for 6 hrs (sorry, not enough users)
             schemes,pagination = self.get_colorschemes(count, offset, sort, force_load=True)
             memcache.add_multi({ scheme_key: schemes, total_key: pagination['total'], }, time=21600)
     else:
         schemes = ColorScheme.all().order(sort).fetch(count, offset)
         total = ColorScheme.all().order(sort).count(200)
         pagination = self.paginate(total, count, offset)
     return (schemes, pagination)
示例#26
0
 def reset_public_web_cmd_cache(web_cmds):
     web_cmd_objs = {}
     cmds = []
     for web_cmd in web_cmds:
         web_cmd_objs[web_cmd.cmd] = web_cmd
         cmds.append(web_cmd.cmd)
     Caching().flush_public_web_cmd_cache(cmds)
     memcache_add_web_cmd_objs_result = memcache.add_multi(web_cmd_objs, 0, WEB_CMD_OBJS_MEMCACHE_KEY_PREFIX + IS_PUBLIC_CMD)
     memcache_add_web_cmds_result = memcache.add(WEB_CMDS_MEMCACHE_KEY + IS_PUBLIC_CMD, cmds, 0)
     if memcache_add_web_cmds_result and len(memcache_add_web_cmd_objs_result) is 0:
         web_cmd_objs_memcached = memcache.get_multi(cmds, WEB_CMD_OBJS_MEMCACHE_KEY_PREFIX + IS_PUBLIC_CMD)
         return web_cmd_objs_memcached
示例#27
0
 def reset_user_web_cmd_cache(web_cmds):
     user = users.get_current_user()
     web_cmd_objs = {}
     cmds = []
     for web_cmd in web_cmds:
         web_cmd_objs[web_cmd.cmd] = web_cmd
         cmds.append(web_cmd.cmd)
     Caching().flush_user_web_cmd_cache(cmds)
     memcache_add_web_cmd_objs_result = memcache.add_multi(web_cmd_objs, 0, WEB_CMD_OBJS_MEMCACHE_KEY_PREFIX + IS_USER_CMD + str(user) + '_')
     memcache_add_web_cmds_result = memcache.add(WEB_CMDS_MEMCACHE_KEY + IS_USER_CMD + str(user) + '_', cmds, 0)
     if memcache_add_web_cmds_result and len(memcache_add_web_cmd_objs_result) is 0:
         web_cmd_objs_memcached = memcache.get_multi(cmds, WEB_CMD_OBJS_MEMCACHE_KEY_PREFIX + IS_USER_CMD + str(user) + '_')
         return web_cmd_objs_memcached
示例#28
0
  def _WriteToMemcache(self, retrieved_dict):
    """Write entries for each key-value pair to memcache.  Encode PBs."""
    strs_to_cache = {
        self._KeyToStr(key): self._ValueToStr(value)
        for key, value in retrieved_dict.iteritems()}

    try:
      memcache.add_multi(
          strs_to_cache, key_prefix=self.memcache_prefix,
          time=framework_constants.MEMCACHE_EXPIRATION)
    except ValueError as e:
      # If memcache does not accept the values, ensure that no stale
      # values are left, then bail out.
      logging.error('Got memcache error: %r', e)
      memcache.delete_multi(
          strs_to_cache.keys(), seconds=5,
          key_prefix=self.memcache_prefix)
      return

    logging.info('cached batch of %d values in memcache %s',
                 len(retrieved_dict), self.memcache_prefix)
    logging.info('_WriteToMemcache wrote %r', retrieved_dict)
示例#29
0
def get_events(query_filter=None):
    logging.info('[SERVICE]: Getting events')
    events_ids = memcache.get('events_ids')
    if events_ids is not None:
        events = memcache.get_multi(events_ids)
        if events is not None:
            return events.values()
        else:
            memcache.delete('events_ids')
            return get_events()
    else:
        if not query_filter:
            events = Event.query()
        else:
            events = Event.query().filter(query_filter)
        events_ids = events.map(lambda event: str(event.id))
        memcache.add('events_ids', events_ids, 3600)
        event_mapping = {}
        for event in events:
            event_mapping[str(event.id)] = event
        memcache.add_multi(event_mapping, 3600)
        return events
示例#30
0
def get_news(query_filter=None):
    logging.info('[SERVICE]: Getting news')
    news_ids = memcache.get('news_ids')
    if news_ids is not None:
        news = memcache.get_multi(news_ids)
        if news is not None:
            return news.values()
        else:
            memcache.delete('news_ids')
            return get_news()
    else:
        if not query_filter:
            news = News.query()
        else:
            news = News.query().filter(query_filter)
        news_ids = news.map(lambda news: str(news.id))
        memcache.add('news_ids', news_ids, 3600)
        news_mapping = {}
        for news_el in news:
            news_mapping[str(news_el.id)] = news_el
        memcache.add_multi(news_mapping, 3600)
        return news
示例#31
0
def prefetch_comment_list(comments):
  prefetch_refprops(comments, Comment.user, Comment.post)

  # call all the memcache information
  # starting by the already_voted area
  comment_keys = [str(comment.key()) for comment in comments]
  session = get_current_session()
  if session.has_key('user'):
    user = session['user']
    memcache_voted_keys = ["cp_" + comment_key + "_" + str(user.key()) for comment_key in comment_keys]
    memcache_voted = memcache.get_multi(memcache_voted_keys)
    memcache_to_add = {}
    for comment in comments:
      vote_value = memcache_voted.get("cp_" + str(comment.key()) + "_" +str(user.key()))
      if vote_value is not None:
        comment.prefetched_already_voted = vote_value == 1
      else:
        vote = Vote.all().filter("user ="******"comment =", comment).fetch(1)
        memcache_to_add["cp_" + str(comment.key()) + "_" + str(user.key())] = len(vote)
        comment.prefetched_already_voted = len(vote) == 1
    if memcache_to_add.keys():
      memcache.add_multi(memcache_to_add, 3600)
  else:
    for comment in comments:
      comment.prefetched_already_voted = False
  # now the sum_votes
  memcache_sum_votes_keys = ["c_" + comment_key for comment_key in comment_keys]
  memcache_sum_votes = memcache.get_multi(memcache_sum_votes_keys)
  memcache_to_add = {}
  for comment in comments:
    sum_votes_value = memcache_sum_votes.get("c_" + str(comment.key()))
    if sum_votes_value is not None:
      comment.prefetched_sum_votes = sum_votes_value
    else:
      sum_votes = Vote.all().filter("comment =", comment).count()
      memcache_to_add["c_" + str(comment.key())] = sum_votes
      comment.prefetched_sum_votes =sum_votes
  if memcache_to_add.keys():
    memcache.add_multi(memcache_to_add, 3600)
示例#32
0
def prefetch_posts_list(posts):
    prefetch_refprops(posts, Post.user)
    posts_keys = [str(post.key()) for post in posts]

    # get user, if no user, all already_voted = no
    session = get_current_session()
    if session.has_key('user'):
        user = session['user']
        memcache_voted_keys = [
            "vp_" + post_key + "_" + str(user.key()) for post_key in posts_keys
        ]
        memcache_voted = memcache.get_multi(memcache_voted_keys)
        memcache_to_add = {}
        for post in posts:
            vote_value = memcache_voted.get("vp_" + str(post.key()) + "_" +
                                            str(user.key()))
            if vote_value is not None:
                post.prefetched_already_voted = vote_value == 1
            else:
                vote = Vote.all().filter("user ="******"post =", post).fetch(1)
                memcache_to_add["vp_" + str(post.key()) + "_" +
                                str(user.key())] = len(vote)
                post.prefetched_already_voted = len(vote) == 1
        if memcache_to_add.keys():
            memcache.add_multi(memcache_to_add, 3600)
    else:
        for post in posts:
            post.prefetched_already_voted = False
    # now the sum_votes
    memcache_sum_votes_keys = ["p_" + post_key for post_key in posts_keys]
    memcache_sum_votes = memcache.get_multi(memcache_sum_votes_keys)
    memcache_to_add = {}
    for post in posts:
        sum_votes_value = memcache_sum_votes.get("p_" + str(post.key()))
        if sum_votes_value is not None:
            post.prefetched_sum_votes = sum_votes_value
        else:
            sum_votes = Vote.all().filter("post =", post).count()
            memcache_to_add["p_" + str(post.key())] = sum_votes
            post.prefetched_sum_votes = sum_votes
    if memcache_to_add.keys():
        memcache.add_multi(memcache_to_add, 3600)
    # finally we get all the comment count from memcache
    memcache_comment_count_keys = ["pc_" + post_key for post_key in posts_keys]
    memcache_comment_count = memcache.get_multi(memcache_comment_count_keys)
    memcache_to_add = {}
    for post in posts:
        comment_count = memcache_comment_count.get("pc_" + str(post.key()))
        if comment_count is not None:
            post.cached_comment_count = comment_count
        else:
            comment_count = post.comments.count()
            memcache_to_add["pc_" + str(post.key())] = comment_count
            post.cached_comment_count = comment_count
    if memcache_to_add.keys():
        memcache.add_multi(memcache_to_add, 3600)
示例#33
0
 def reset_public_web_cmd_cache(web_cmds):
     web_cmd_objs = {}
     cmds = []
     for web_cmd in web_cmds:
         web_cmd_objs[web_cmd.cmd] = web_cmd
         cmds.append(web_cmd.cmd)
     Caching().flush_public_web_cmd_cache(cmds)
     memcache_add_web_cmd_objs_result = memcache.add_multi(
         web_cmd_objs, 0, WEB_CMD_OBJS_MEMCACHE_KEY_PREFIX + IS_PUBLIC_CMD)
     memcache_add_web_cmds_result = memcache.add(
         WEB_CMDS_MEMCACHE_KEY + IS_PUBLIC_CMD, cmds, 0)
     if memcache_add_web_cmds_result and len(
             memcache_add_web_cmd_objs_result) is 0:
         web_cmd_objs_memcached = memcache.get_multi(
             cmds, WEB_CMD_OBJS_MEMCACHE_KEY_PREFIX + IS_PUBLIC_CMD)
         return web_cmd_objs_memcached
示例#34
0
def prefetch_posts_list(posts):
  prefetch_refprops(posts, Post.user)
  posts_keys = [str(post.key()) for post in posts]

  # get user, if no user, all already_voted = no
  session = get_current_session()
  if session.has_key('user'):
    user = session['user']
    memcache_voted_keys = ["vp_" + post_key + "_" + str(user.key()) for post_key in posts_keys]
    memcache_voted = memcache.get_multi(memcache_voted_keys)
    memcache_to_add = {}
    for post in posts:
      vote_value = memcache_voted.get("vp_" + str(post.key()) + "_" +str(user.key()))
      if vote_value is not None:
        post.prefetched_already_voted = vote_value == 1
      else:
        vote = Vote.all().filter("user ="******"post =", post).fetch(1)
        memcache_to_add["vp_" + str(post.key()) + "_" + str(user.key())] = len(vote)
        post.prefetched_already_voted = len(vote) == 1
    if memcache_to_add.keys():
      memcache.add_multi(memcache_to_add, 3600)
  else:
    for post in posts:
      post.prefetched_already_voted = False
  # now the sum_votes
  memcache_sum_votes_keys = ["p_" + post_key for post_key in posts_keys]
  memcache_sum_votes = memcache.get_multi(memcache_sum_votes_keys)
  memcache_to_add = {}
  for post in posts:
    sum_votes_value = memcache_sum_votes.get("p_" + str(post.key()))
    if sum_votes_value is not None:
      post.prefetched_sum_votes = sum_votes_value
    else:
      sum_votes = Vote.all().filter("post =", post).count()
      memcache_to_add["p_" + str(post.key())] = sum_votes
      post.prefetched_sum_votes = sum_votes
  if memcache_to_add.keys():
    memcache.add_multi(memcache_to_add, 3600)
  # finally we get all the comment count from memcache
  memcache_comment_count_keys = ["pc_" + post_key for post_key in posts_keys]
  memcache_comment_count = memcache.get_multi(memcache_comment_count_keys)
  memcache_to_add = {}
  for post in posts:
    comment_count = memcache_comment_count.get("pc_" + str(post.key()))
    if comment_count is not None:
      post.cached_comment_count = comment_count
    else:
      comment_count = post.comments.count() 
      memcache_to_add["pc_" + str(post.key())] = comment_count
      post.cached_comment_count = comment_count 
  if memcache_to_add.keys():
    memcache.add_multi(memcache_to_add, 3600)
示例#35
0
 def reset_user_web_cmd_cache(web_cmds):
     user = users.get_current_user()
     web_cmd_objs = {}
     cmds = []
     for web_cmd in web_cmds:
         web_cmd_objs[web_cmd.cmd] = web_cmd
         cmds.append(web_cmd.cmd)
     Caching().flush_user_web_cmd_cache(cmds)
     memcache_add_web_cmd_objs_result = memcache.add_multi(
         web_cmd_objs, 0,
         WEB_CMD_OBJS_MEMCACHE_KEY_PREFIX + IS_USER_CMD + str(user) + '_')
     memcache_add_web_cmds_result = memcache.add(
         WEB_CMDS_MEMCACHE_KEY + IS_USER_CMD + str(user) + '_', cmds, 0)
     if memcache_add_web_cmds_result and len(
             memcache_add_web_cmd_objs_result) is 0:
         web_cmd_objs_memcached = memcache.get_multi(
             cmds, WEB_CMD_OBJS_MEMCACHE_KEY_PREFIX + IS_USER_CMD +
             str(user) + '_')
         return web_cmd_objs_memcached
示例#36
0
  def post(self):
    keys = self.request.get('keys')
    values = self.request.get('values')
    update = self.request.get('update')
    timeout = self.request.get('timeout')
    if timeout is None or len(timeout) == 0:
      timeout = 3600
    self.response.headers['Content-Type'] = "application/json"
    mapping = dict(zip(keys.split(','), values.split(',')))

    if update is not None and update == 'true':
      response = memcache.set_multi(mapping, int(timeout))
    else:
      response = memcache.add_multi(mapping, int(timeout))

    if not response:
      self.response.out.write(json.dumps({ 'success' : True }))
    else:
      self.response.out.write(
        json.dumps({ 'success' : False, 'failed_keys' : response }))
示例#37
0
    def post(self):
        keys = self.request.get('keys')
        values = self.request.get('values')
        update = self.request.get('update')
        timeout = self.request.get('timeout')
        if timeout is None or len(timeout) == 0:
            timeout = 3600
        self.response.headers['Content-Type'] = "application/json"
        mapping = dict(zip(keys.split(','), values.split(',')))

        if update is not None and update == 'true':
            response = memcache.set_multi(mapping, int(timeout))
        else:
            response = memcache.add_multi(mapping, int(timeout))

        if not response:
            self.response.out.write(json.dumps({'success': True}))
        else:
            self.response.out.write(
                json.dumps({
                    'success': False,
                    'failed_keys': response
                }))
示例#38
0
 def _put_tasklet(self, todo):
   assert todo
   # TODO: What if the same entity is being put twice?
   # TODO: What if two entities with the same key are being put?
   by_options = {}
   delete_keys = []  # For memcache.delete_multi().
   mappings = {}  # For memcache.set_multi(), segregated by timeout.
   for fut, ent, options in todo:
     if ent._has_complete_key():
       if self._use_memcache(ent._key, options):
         if self._use_datastore(ent._key, options):
           delete_keys.append(ent._key.urlsafe())
         else:
           pb = self._conn.adapter.entity_to_pb(ent)
           timeout = self._get_memcache_timeout(ent._key, options)
           mapping = mappings.get(timeout)
           if mapping is None:
             mapping = mappings[timeout] = {}
           mapping[ent._key.urlsafe()] = pb
     else:
       key = ent._key
       if key is None:
         # Create a dummy Key to call _use_datastore().
         key = model.Key(ent.__class__, None)
       if not self._use_datastore(key, options):
         raise datastore_errors.BadKeyError(
             'Cannot put incomplete key when use_datastore=False.')
     if options in by_options:
       futures, entities = by_options[options]
     else:
       futures, entities = by_options[options] = [], []
     futures.append(fut)
     entities.append(ent)
   if delete_keys:  # Pre-emptively delete from memcache.
     memcache.delete_multi(delete_keys, seconds=_LOCK_TIME,
                           key_prefix=self._memcache_prefix)
   if mappings:  # Write to memcache (only if use_datastore=False).
     # If the timeouts are not uniform, make a separate call for each
     # distinct timeout value.
     for timeout, mapping in mappings.iteritems():
       # Use add, not set.  This is a no-op within _LOCK_TIME seconds
       # of the delete done by the most recent write.
       memcache.add_multi(mapping, time=timeout,
                          key_prefix=self._memcache_prefix)
   for options, (futures, entities) in by_options.iteritems():
     datastore_futures = []
     datastore_entities = []
     for fut, ent in zip(futures, entities):
       key = ent._key
       if key is None:
         # Pass a dummy Key to _use_datastore().
         key = model.Key(ent.__class__, None)
       if self._use_datastore(key, options):
         datastore_futures.append(fut)
         datastore_entities.append(ent)
       else:
         # TODO: If ent._key is None, this is really lame.
         fut.set_result(ent._key)
     if datastore_entities:
       keys = yield self._conn.async_put(options, datastore_entities)
       for key, fut, ent in zip(keys, datastore_futures, datastore_entities):
         if key != ent._key:
           if ent._has_complete_key():
             raise datastore_errors.BadKeyError(
                 'Entity key differs from the one returned by the datastore. '
                 'Expected %r, got %r' % (key, ent._key))
           ent._key = key
         fut.set_result(key)
示例#39
0
 def add_multi_data(values):
     memcache.add_multi(values)
示例#40
0
def dataReceived(Sessionid, recv_data):
    """Event handler of receiving some data from client.

    Split, decrypt and hand them back to Control.
    """
    # Avoid repetition caused by ping
    # logging.debug("received %d bytes from client " % len(recv_data) +
    #          addr_to_str(self.transport.getPeer()))

    cipher = getcipher(Sessionid)
    if cipher is None:
        raise NotFoundKey
    # leave the last (may be incomplete) item intact
    try:
        text_dec = cipher.decrypt(recv_data)
    except Exception as err:
        print(recv_data)
        print(len(recv_data))
        raise err
    if len(text_dec) == 14:
        raise Nonsense

    # flag is 0 for normal data packet, 1 for ping packet, 2 for auth
    flag = int(text_dec[0])
    if flag == 0:
        reply, conn_id = client_recv(text_dec[1:])
        prefix = '0' + conn_id + str(INITIAL_INDEX)
        rawpayload = reply
        tosend = []
        length = len(prefix) + len(SPLIT_CHAR) + 17
        while len(rawpayload) + length > chunksize:
            tosend.append(
                cipher.encrypt(prefix + rawpayload[:chunksize - length]))
            rawpayload = rawpayload[chunksize - length:]
        tosend.append(cipher.encrypt(prefix + rawpayload))
        tosend.append(cipher.encrypt(prefix + CLOSE_CHAR))
        tosend.append("")
        #logging.info(len(item))
        result = SPLIT_CHAR.join(tosend)
        h = hashlib.sha1()
        h.update(result)
        # print(tosend)
        #logging.info("%d sent to fetchback" % len(result))
        payloadHash = h.hexdigest()[:16]
        add2mem = dict()
        i = 0
        while len(result) > memcache.MAX_VALUE_SIZE:
            add2mem[str(i)] = result[:memcache.MAX_VALUE_SIZE]
            result = result[memcache.MAX_VALUE_SIZE:]
            i += 1
        add2mem[str(i)] = result
        if len(add2mem) == 1:
            memcache.add(Sessionid + '.' + payloadHash, add2mem['0'], 900)
        else:
            memcache.add_multi(add2mem,
                               time=900,
                               key_prefix=Sessionid + '.' + payloadHash)
        taskqueue.add(queue_name="fetchback1",
                      url="/fetchback/",
                      headers={
                          "Sessionid": Sessionid,
                          "IDChar": conn_id,
                          "PAYLOADHASH": payloadHash,
                          "NUM": str(i)
                      })
示例#41
0
 def _put_tasklet(self, todo):
     assert todo
     # TODO: What if the same entity is being put twice?
     # TODO: What if two entities with the same key are being put?
     by_options = {}
     delete_keys = []  # For memcache.delete_multi().
     mappings = {}  # For memcache.set_multi(), segregated by timeout.
     for fut, ent, options in todo:
         if ent._has_complete_key():
             if self._use_memcache(ent._key, options):
                 if self._use_datastore(ent._key, options):
                     delete_keys.append(ent._key.urlsafe())
                 else:
                     pb = self._conn.adapter.entity_to_pb(ent)
                     timeout = self._get_memcache_timeout(ent._key, options)
                     mapping = mappings.get(timeout)
                     if mapping is None:
                         mapping = mappings[timeout] = {}
                     mapping[ent._key.urlsafe()] = pb
         else:
             key = ent._key
             if key is None:
                 # Create a dummy Key to call _use_datastore().
                 key = model.Key(ent.__class__, None)
             if not self._use_datastore(key, options):
                 raise datastore_errors.BadKeyError(
                     'Cannot put incomplete key when use_datastore=False.')
         if options in by_options:
             futures, entities = by_options[options]
         else:
             futures, entities = by_options[options] = [], []
         futures.append(fut)
         entities.append(ent)
     if delete_keys:  # Pre-emptively delete from memcache.
         memcache.delete_multi(delete_keys,
                               seconds=_LOCK_TIME,
                               key_prefix=self._memcache_prefix)
     if mappings:  # Write to memcache (only if use_datastore=False).
         # If the timeouts are not uniform, make a separate call for each
         # distinct timeout value.
         for timeout, mapping in mappings.iteritems():
             # Use add, not set.  This is a no-op within _LOCK_TIME seconds
             # of the delete done by the most recent write.
             memcache.add_multi(mapping,
                                time=timeout,
                                key_prefix=self._memcache_prefix)
     for options, (futures, entities) in by_options.iteritems():
         datastore_futures = []
         datastore_entities = []
         for fut, ent in zip(futures, entities):
             key = ent._key
             if key is None:
                 # Pass a dummy Key to _use_datastore().
                 key = model.Key(ent.__class__, None)
             if self._use_datastore(key, options):
                 datastore_futures.append(fut)
                 datastore_entities.append(ent)
             else:
                 # TODO: If ent._key is None, this is really lame.
                 fut.set_result(ent._key)
         if datastore_entities:
             keys = yield self._conn.async_put(options, datastore_entities)
             for key, fut, ent in zip(keys, datastore_futures,
                                      datastore_entities):
                 if key != ent._key:
                     if ent._has_complete_key():
                         raise datastore_errors.BadKeyError(
                             'Entity key differs from the one returned by the datastore. '
                             'Expected %r, got %r' % (key, ent._key))
                     ent._key = key
                 fut.set_result(key)
示例#42
0
def GetPublicEndpointResponse(query_id=None,
                              requested_format=None,
                              transform=None):
    """Returns the public response for an external user request.

  This handles all the steps required to get the latest successful API
  response for an API Query.
    1) Check Memcache, if found skip to #4.
    2) If not in memcache, check if the stored response is abandoned and needs
       to be refreshed.
    3) Retrieve response from datastore.
    4) Perform any transforms and return the formatted response to the user.

  Args:
    query_id: The query id to retrieve the response for.
    requested_format: The format type requested for the response.
    transform: The transform instance to use to transform the content to the
               requested format, if required.

  Returns:
    A tuple contatining the response content, and status code to
    render. e.g. (CONTENT, 200)
  """
    transformed_response_content = None
    schedule_query = False
    must_store_memcache = False

    if not requested_format or requested_format not in co.SUPPORTED_FORMATS:
        requested_format = co.DEFAULT_FORMAT

    response = GetApiQueryResponseFromMemcache(query_id, requested_format)

    # 1. Check Memcache
    if response and response.get('api_query') and response.get('content'):
        api_query = response.get('api_query')
        response_content = response.get('content')
        transformed_response_content = response.get('transformed_content')
        response_status = 200
    else:
        api_query = GetApiQuery(query_id)

        # 2. Check if this is an abandoned query
        #   if (api_query is not None and api_query.is_active
        #       and not api_query.is_error_limit_reached
        #       and api_query.is_abandoned):
        #     RefreshApiQueryResponse(api_query)

        # 3. Retrieve response from datastore
        response = GetApiQueryResponseFromDb(api_query)
        response_content = response.get('content')
        response_status = response.get('status')

        # Flag to schedule query later on if there is a successful response.
        if api_query:
            schedule_query = not api_query.in_queue
            must_store_memcache = True

    # 4. Return the formatted response.
    if response_status == 200:
        #    UpdateApiQueryCounter(query_id)
        #    UpdateApiQueryTimestamp(query_id)

        if co.ANONYMIZE_RESPONSES:
            response_content = transformers.RemoveKeys(response_content)

        if not transformed_response_content:
            try:
                transformed_response_content = transform.Transform(
                    response_content)
            except (KeyError, TypeError, AttributeError):
                # If the transformation fails then return the original content.
                transformed_response_content = response_content
        if must_store_memcache:
            memcache_keys = {
                'api_query': api_query,
                co.DEFAULT_FORMAT: response_content,
                requested_format: transformed_response_content
            }

            memcache.add_multi(memcache_keys,
                               key_prefix=query_id,
                               time=api_query.refresh_interval)

        # Attempt to schedule query if required.
        if schedule_query:
            schedule_helper.ScheduleApiQuery(api_query)

        response_content = transformed_response_content
    else:
        raise errors.GaSuperProxyHttpError(response_content, response_status)

    return (response_content, response_status)
示例#43
0
def add_multi(mapping, time=0, key_prefix="", min_compress_len=0, namespace=None):
    if enabled:
        return memcache.add_multi(mapping, time, key_prefix, namespace=namespace)
    return []
def GetPublicEndpointResponse(
    query_id=None, requested_format=None, transform=None):
  """Returns the public response for an external user request.

  This handles all the steps required to get the latest successful API
  response for an API Query.
    1) Check Memcache, if found skip to #4.
    2) If not in memcache, check if the stored response is abandoned and needs
       to be refreshed.
    3) Retrieve response from datastore.
    4) Perform any transforms and return the formatted response to the user.

  Args:
    query_id: The query id to retrieve the response for.
    requested_format: The format type requested for the response.
    transform: The transform instance to use to transform the content to the
               requested format, if required.

  Returns:
    A tuple contatining the response content, and status code to
    render. e.g. (CONTENT, 200)
  """
  transformed_response_content = None
  schedule_query = False

  if not requested_format or requested_format not in co.SUPPORTED_FORMATS:
    requested_format = co.DEFAULT_FORMAT

  response = GetApiQueryResponseFromMemcache(query_id, requested_format)

  # 1. Check Memcache
  if response and response.get('api_query') and response.get('content'):
    api_query = response.get('api_query')
    response_content = response.get('content')
    transformed_response_content = response.get('transformed_content')
    response_status = 200
  else:
    api_query = GetApiQuery(query_id)

    # 2. Check if this is an abandoned query
    if (api_query is not None and api_query.is_active
        and not api_query.is_error_limit_reached
        and api_query.is_abandoned):
      RefreshApiQueryResponse(api_query)

    # 3. Retrieve response from datastore
    response = GetApiQueryResponseFromDb(api_query)
    response_content = response.get('content')
    response_status = response.get('status')

    # Flag to schedule query later on if there is a successful response.
    if api_query:
      schedule_query = not api_query.in_queue

  # 4. Return the formatted response.
  if response_status == 200:
    UpdateApiQueryCounter(query_id)
    UpdateApiQueryTimestamp(query_id)

    if co.ANONYMIZE_RESPONSES:
      response_content = transformers.RemoveKeys(response_content)

    if not transformed_response_content:
      try:
        transformed_response_content = transform.Transform(response_content)
      except (KeyError, TypeError, AttributeError):
        # If the transformation fails then return the original content.
        transformed_response_content = response_content

    memcache_keys = {
        'api_query': api_query,
        co.DEFAULT_FORMAT: response_content,
        requested_format: transformed_response_content
    }

    memcache.add_multi(memcache_keys,
                       key_prefix=query_id,
                       time=api_query.refresh_interval)

    # Attempt to schedule query if required.
    if schedule_query:
      schedule_helper.ScheduleApiQuery(api_query)

    response_content = transformed_response_content
  else:
    raise errors.GaSuperProxyHttpError(response_content, response_status)

  return (response_content, response_status)
示例#45
0
def add_multi(mapping, time=0, key_prefix='', min_compress_len=0):
    return memcache.add_multi(mapping,
                              time,
                              key_prefix,
                              min_compress_len,
                              namespace=os.environ["CURRENT_VERSION_ID"])
示例#46
0
def add(key, value, time=0):
  """Like memcache.add but supports values > 1mb."""
  chunks = _chunks(key, value)
  not_added = memcache.add_multi(chunks, time=time, namespace=_NAMESPACE)
  return key not in not_added
示例#47
0
def add(key, value, time=0):
    """Like memcache.add but supports values > 1mb."""
    chunks = _chunks(key, value)
    not_added = memcache.add_multi(chunks, time=time, namespace=_NAMESPACE)
    return key not in not_added
示例#48
0
def add_multi(mapping, time=0, key_prefix='', min_compress_len=0):
  return memcache.add_multi(mapping, time, key_prefix, min_compress_len,
    namespace=os.environ["CURRENT_VERSION_ID"])