Example #1
0
    def testGetStats(self):
        """Tests get_stats."""
        self.assertTrue(memcache.set(self.key1, self.value1))
        self.assertTrue(memcache.set(self.key2, self.value2))

        self.assertEqual(self.value1, memcache.get(self.key1))
        self.assertEqual(self.value2, memcache.get(self.key2))
        self.assertLen(memcache.get_multi([self.key1, self.key2]), 2)

        self.assertEqual(None, memcache.get(self.key3))
        self.assertEqual(None, memcache.get('unknown'))
        self.assertEqual(None, memcache.get('another not known'))
        self.assertEmpty(memcache.get_multi(['what', 'who']))

        self._StepClock(7)

        result = memcache.get_stats()

        expected = {
            memcache.STAT_HITS:
            4,
            memcache.STAT_MISSES:
            5,
            memcache.STAT_BYTE_HITS:
            (2 * (len(self.key1) + len(self.value1) + len(self.key2_hash) +
                  len(self.value2))),
            memcache.STAT_ITEMS:
            2,
            memcache.STAT_BYTES: (len(self.key1) + len(self.value1) +
                                  len(self.key2_hash) + len(self.value2)),
            memcache.STAT_OLDEST_ITEM_AGES:
            7,
        }

        self.assertEqual(expected, result)
Example #2
0
def get_multi_test():

    memcache.set_multi(DATA, 30, key_prefix='memcache_get_multi_test')
    now = time()
    memcache.get_multi(MULTI_KEYS, key_prefix='memcache_get_multi_test')
    result = time() - now
    memcache.delete_multi(MULTI_KEYS, key_prefix='memcache_get_multi_test')
    return result
Example #3
0
    def get(self):
        self.test_memcache()
        self.test_memcache()

        memcache.set_multi({'key': 'value', 'other': 'value'})
        memcache.get_multi(['key', 'other', 'thing'])

        self.response.out.write('Hello world')
Example #4
0
def prefetch_posts_list(posts):
    prefetch_refprops(posts, Post.user)
    posts_keys = [str(post.key()) for post in posts]

    # get user, if no user, all already_voted = no
    session = get_current_session()
    if session.has_key('user'):
        user = session['user']
        memcache_voted_keys = [
            "vp_" + post_key + "_" + str(user.key()) for post_key in posts_keys
        ]
        memcache_voted = memcache.get_multi(memcache_voted_keys)
        memcache_to_add = {}
        for post in posts:
            vote_value = memcache_voted.get("vp_" + str(post.key()) + "_" +
                                            str(user.key()))
            if vote_value is not None:
                post.prefetched_already_voted = vote_value == 1
            else:
                vote = Vote.all().filter("user ="******"post =", post).fetch(1)
                memcache_to_add["vp_" + str(post.key()) + "_" +
                                str(user.key())] = len(vote)
                post.prefetched_already_voted = len(vote) == 1
        if memcache_to_add.keys():
            memcache.add_multi(memcache_to_add, 3600)
    else:
        for post in posts:
            post.prefetched_already_voted = False
    # now the sum_votes
    memcache_sum_votes_keys = ["p_" + post_key for post_key in posts_keys]
    memcache_sum_votes = memcache.get_multi(memcache_sum_votes_keys)
    memcache_to_add = {}
    for post in posts:
        sum_votes_value = memcache_sum_votes.get("p_" + str(post.key()))
        if sum_votes_value is not None:
            post.prefetched_sum_votes = sum_votes_value
        else:
            sum_votes = Vote.all().filter("post =", post).count()
            memcache_to_add["p_" + str(post.key())] = sum_votes
            post.prefetched_sum_votes = sum_votes
    if memcache_to_add.keys():
        memcache.add_multi(memcache_to_add, 3600)
    # finally we get all the comment count from memcache
    memcache_comment_count_keys = ["pc_" + post_key for post_key in posts_keys]
    memcache_comment_count = memcache.get_multi(memcache_comment_count_keys)
    memcache_to_add = {}
    for post in posts:
        comment_count = memcache_comment_count.get("pc_" + str(post.key()))
        if comment_count is not None:
            post.cached_comment_count = comment_count
        else:
            comment_count = post.comments.count()
            memcache_to_add["pc_" + str(post.key())] = comment_count
            post.cached_comment_count = comment_count
    if memcache_to_add.keys():
        memcache.add_multi(memcache_to_add, 3600)
 def testCacheGetOneItemInMemcache(self):
   ranker_class = self.mox.CreateMock(result_ranker.CountRanker)
   self.mox.StubOutWithMock(memcache, 'get_multi')
   memcache.get_multi(['k1'], **self.memcache_params).AndReturn({'k1': 's1'})
   ranker_class.FromString('k1', 's1').AndReturn('r1')
   self.mox.ReplayAll()
   rankers = self.cls.CacheGet(['k1'], {'k1': ranker_class})
   self.mox.VerifyAll()
   self.assertEqual({'k1': 'r1'}, rankers)
 def testCacheGetOneItemNotFound(self):
     ranker_class = self.mox.CreateMock(result_ranker.CountRanker)
     self.mox.StubOutWithMock(memcache, 'get_multi')
     memcache.get_multi(['k1'], **self.memcache_params).AndReturn({})
     ranker_class.get_by_key_name(['k1']).AndReturn([None])
     self.mox.ReplayAll()
     rankers = self.cls.CacheGet(['k1'], {'k1': ranker_class})
     self.mox.VerifyAll()
     self.assertEqual({}, rankers)
 def testCacheGetOneItemNotFound(self):
   ranker_class = self.mox.CreateMock(result_ranker.CountRanker)
   self.mox.StubOutWithMock(memcache, 'get_multi')
   memcache.get_multi(['k1'], **self.memcache_params).AndReturn({})
   ranker_class.get_by_key_name(['k1']).AndReturn([None])
   self.mox.ReplayAll()
   rankers = self.cls.CacheGet(['k1'], {'k1': ranker_class})
   self.mox.VerifyAll()
   self.assertEqual({}, rankers)
 def testCacheGetOneItemInMemcache(self):
     ranker_class = self.mox.CreateMock(result_ranker.CountRanker)
     self.mox.StubOutWithMock(memcache, 'get_multi')
     memcache.get_multi(['k1'],
                        **self.memcache_params).AndReturn({'k1': 's1'})
     ranker_class.FromString('k1', 's1').AndReturn('r1')
     self.mox.ReplayAll()
     rankers = self.cls.CacheGet(['k1'], {'k1': ranker_class})
     self.mox.VerifyAll()
     self.assertEqual({'k1': 'r1'}, rankers)
Example #9
0
    def testMulti(self):
        """Stores multiple keys' values at once."""

        memcache.set_multi({'map_key_one': 1, 'map_key_two': u'some value'})
        values = memcache.get_multi(['map_key_one', 'map_key_two'])
        assert {'map_key_one': 1, 'map_key_two': u'some value'} == values

        memcache.add_multi(
            {'map_key_one': 'one', 'map_key_two': 2, 'three': u'trois'})
        values = memcache.get_multi(['map_key_two', 'three'])
        assert {'map_key_two': u'some value', 'three': u'trois'} == values
Example #10
0
def prefetch_posts_list(posts):
  prefetch_refprops(posts, Post.user)
  posts_keys = [str(post.key()) for post in posts]

  # get user, if no user, all already_voted = no
  session = get_current_session()
  if session.has_key('user'):
    user = session['user']
    memcache_voted_keys = ["vp_" + post_key + "_" + str(user.key()) for post_key in posts_keys]
    memcache_voted = memcache.get_multi(memcache_voted_keys)
    memcache_to_add = {}
    for post in posts:
      vote_value = memcache_voted.get("vp_" + str(post.key()) + "_" +str(user.key()))
      if vote_value is not None:
        post.prefetched_already_voted = vote_value == 1
      else:
        vote = Vote.all().filter("user ="******"post =", post).fetch(1)
        memcache_to_add["vp_" + str(post.key()) + "_" + str(user.key())] = len(vote)
        post.prefetched_already_voted = len(vote) == 1
    if memcache_to_add.keys():
      memcache.add_multi(memcache_to_add, 3600)
  else:
    for post in posts:
      post.prefetched_already_voted = False
  # now the sum_votes
  memcache_sum_votes_keys = ["p_" + post_key for post_key in posts_keys]
  memcache_sum_votes = memcache.get_multi(memcache_sum_votes_keys)
  memcache_to_add = {}
  for post in posts:
    sum_votes_value = memcache_sum_votes.get("p_" + str(post.key()))
    if sum_votes_value is not None:
      post.prefetched_sum_votes = sum_votes_value
    else:
      sum_votes = Vote.all().filter("post =", post).count()
      memcache_to_add["p_" + str(post.key())] = sum_votes
      post.prefetched_sum_votes = sum_votes
  if memcache_to_add.keys():
    memcache.add_multi(memcache_to_add, 3600)
  # finally we get all the comment count from memcache
  memcache_comment_count_keys = ["pc_" + post_key for post_key in posts_keys]
  memcache_comment_count = memcache.get_multi(memcache_comment_count_keys)
  memcache_to_add = {}
  for post in posts:
    comment_count = memcache_comment_count.get("pc_" + str(post.key()))
    if comment_count is not None:
      post.cached_comment_count = comment_count
    else:
      comment_count = post.comments.count() 
      memcache_to_add["pc_" + str(post.key())] = comment_count
      post.cached_comment_count = comment_count 
  if memcache_to_add.keys():
    memcache.add_multi(memcache_to_add, 3600)
Example #11
0
    def testMulti(self):
        """Stores multiple keys' values at once."""

        memcache.set_multi({'map_key_one': 1, 'map_key_two': u'some value'})
        values = memcache.get_multi(['map_key_one', 'map_key_two'])
        assert {'map_key_one': 1, 'map_key_two': u'some value'} == values

        memcache.add_multi({
            'map_key_one': 'one',
            'map_key_two': 2,
            'three': u'trois'
        })
        values = memcache.get_multi(['map_key_two', 'three'])
        assert {'map_key_two': u'some value', 'three': u'trois'} == values
Example #12
0
def prefetch_comment_list(comments):
    prefetch_refprops(comments, Comment.user, Comment.post)

    # call all the memcache information
    # starting by the already_voted area
    comment_keys = [str(comment.key()) for comment in comments]
    session = get_current_session()
    if session.has_key('user'):
        user = session['user']
        memcache_voted_keys = [
            "cp_" + comment_key + "_" + str(user.key())
            for comment_key in comment_keys
        ]
        memcache_voted = memcache.get_multi(memcache_voted_keys)
        memcache_to_add = {}
        for comment in comments:
            vote_value = memcache_voted.get("cp_" + str(comment.key()) + "_" +
                                            str(user.key()))
            if vote_value is not None:
                comment.prefetched_already_voted = vote_value == 1
            else:
                vote = Vote.all().filter("user ="******"comment =",
                                                      comment).fetch(1)
                memcache_to_add["cp_" + str(comment.key()) + "_" +
                                str(user.key())] = len(vote)
                comment.prefetched_already_voted = len(vote) == 1
        if memcache_to_add.keys():
            memcache.add_multi(memcache_to_add, 3600)
    else:
        for comment in comments:
            comment.prefetched_already_voted = False
    # now the sum_votes
    memcache_sum_votes_keys = [
        "c_" + comment_key for comment_key in comment_keys
    ]
    memcache_sum_votes = memcache.get_multi(memcache_sum_votes_keys)
    memcache_to_add = {}
    for comment in comments:
        sum_votes_value = memcache_sum_votes.get("c_" + str(comment.key()))
        if sum_votes_value is not None:
            comment.prefetched_sum_votes = sum_votes_value
        else:
            sum_votes = Vote.all().filter("comment =", comment).count()
            memcache_to_add["c_" + str(comment.key())] = sum_votes
            comment.prefetched_sum_votes = sum_votes
    if memcache_to_add.keys():
        memcache.add_multi(memcache_to_add, 3600)
Example #13
0
 def get_status_dict(self):
   # It will returns a serialized status in mem_cache
   serialized_status = memcache.get_multi(self.status_ids,
                                               key_prefix='status')
   # Just copying my status_ids list to make some changes localy
   missing_status_ids = list(self.status_ids)
   game_status = {}
   for status_id, status in serialized_status.iteritems():
     game_status[status_id] = deserialize(status)
     missing_status_ids.remove(status_id)
   
   # Taking the missing status in database and add them in memcache
   if missing_status_ids:
     missing_status = Status.get(missing_status_ids)
     serialized_status = {}
     for status in missing_status:
       game_status[status_id] = deserialize(status)
       serialized_status[status.id] = serialize(status)
       
     memcache.set_multi(serialized_status, key_prefix='status')
   
   # I really dunno why, but the game_status list in this function
   # works like a list of string, and when this function pass to some
   # function or when it returns, game_status assume its really identity
   # that is a list of status, not a list of strings... (crazy, I know)
   self.actualise_status(game_status)
   
   return game_status # Returns a random list of Status playing this game
def batch_put(mbc_name, bucket_key, list_keys, decrementing=False):
    from apps.user.models import *

    logging.info("Batch putting %s to memcache: %s" % (mbc_name, list_keys))
    mbc = MemcacheBucketConfig.get_or_create(mbc_name)
    entities_to_put = []
    had_error = False
    object_dict = memcache.get_multi(list_keys)
    for key in list_keys:
        data = object_dict.get(key)
        try:
            entity = db.model_from_protobuf(entity_pb.EntityProto(data))
            if entity:
                entities_to_put.append(entity)
        except AssertionError, e:
            old_key = mbc.get_bucket(mbc.count)
            if bucket_key != old_key and not decrementing and not had_error:
                old_count = mbc.count
                mbc.decrement_count()
                logging.warn(
                    'encounted error, going to decrement buckets from %s to %s' 
                    % (old_count, mbc.count), exc_info=True)

                last_keys = memcache.get(old_key) or []
                memcache.set(old_key, [], time=MEMCACHE_TIMEOUT)
                deferred.defer(batch_put, mbc_name, old_key, last_keys, 
                        decrementing=True, _queue='slow-deferred')
                had_error = True
        except Exception, e:
            logging.error('error getting object: %s' % e, exc_info=True)
Example #15
0
def Get(key):
  """Get a memcache entry, or None."""
  key = MEMCACHE_PREFIX + key
  shard_map = memcache.get(key)
  if not shard_map:
    # The shard_map was evicted or never set.
    return

  # If zero shards, the content was small enough and stored in the shard_map.
  num_shards = shard_map['num_shards']
  if num_shards == 0:
    return pickle.loads(shard_map['content'])

  keys = ['%s%d' % (key, i) for i in range(num_shards)]
  shards = memcache.get_multi(keys)
  if len(shards) != num_shards:
    # One or more content shards were evicted, delete map and content shards.
    memcache.delete_multi([key] + keys)
    return

  # All shards present, stitch contents back together and unpickle.
  shards = tuple([shards[key] for key in keys])
  value = '%s' * shard_map['num_shards']
  value = pickle.loads(value % shards)
  return value
Example #16
0
 def testUpdateStatsBasic(self):
   category = 'Aardvark'
   updated_summary_stats = self.cls.UpdateStats(category, TEST_STATS[category])
   memcache_summary_stats = memcache.get_multi(
       ['Firefox 3', 'Firefox 3.5'], namespace=self.cls.MEMCACHE_NAMESPACE)
   expected_summary_stats = {
       'Firefox 3': {
           'results': {
               'Aardvark': {
                   'score': 2,
                   'display': '93/100',
                   'total_runs': 8,
                   },
               },
           },
       'Firefox 3.5': {
           'results': {
               'Aardvark': {
                   'score': 5,
                   'display': '83/100',
                   'total_runs': 5,
                   },
               },
           },
       }
   self.assertEqual(expected_summary_stats, updated_summary_stats)
   self.assertEqual(expected_summary_stats, memcache_summary_stats)
Example #17
0
    def filter(self, urls):
        """Checks if each URL can proceed based on a successful score.

    Args:
      urls: Iterable of URLs to check. Each input URL will have a corresponding
        result returned in the same order they were passed in.

    Returns:
      List of tuple (allowed, failure_percentage) where:
        allowed: True if the URL passed the filter, False otherwise.
        failure_percentage: Percent of failures seen during the current
          scoring period. Number between 0 and 1.
    """
        domain_list = [get_url_domain(u) for u in urls]
        keys = ['success:' + d for d in domain_list]
        keys.extend('failure:' + d for d in domain_list)
        values = memcache.get_multi(keys, key_prefix=self.prefix)

        result = []
        for domain in domain_list:
            success = values.get('success:' + domain, 0)
            failure = values.get('failure:' + domain, 0)
            requests = success + failure

            if requests > 0:
                failure_percentage = (1.0 * failure) / requests
            else:
                failure_percentage = 0

            allow = bool(DISABLE_FOR_TESTING or requests < self.min_requests
                         or failure_percentage < self.max_failure_percentage)
            result.append((allow, failure_percentage))

        return result
Example #18
0
def retrieve(key):
    result = memcache.get_multi(['%s.%s' % (key, i) for i in xrange(32)])
    if result:
        serialized_data = ''.join([i for key, i in sorted(result.items()) if key in result and i is not None])
        return pickle.loads(serialized_data)
    else:
        return None
Example #19
0
    def _render(self, *args, **kw):
        avatars = []
        shards = memcache.get_multi(
            ['2018avatars_{}'.format(i) for i in xrange(10)])
        if len(shards) == 10:  # If missing a shard, must refetch all
            for _, shard in sorted(shards.items(), key=lambda kv: kv[0]):
                avatars += shard

        if not avatars:
            avatars_future = Media.query(
                Media.media_type_enum == MediaType.AVATAR).fetch_async()
            avatars = sorted(avatars_future.get_result(),
                             key=lambda a: int(a.references[0].id()[3:]))

            shards = {}
            size = len(avatars) / 10 + 1
            for i in xrange(10):
                start = i * size
                end = start + size
                shards['2018avatars_{}'.format(i)] = avatars[start:end]
            memcache.set_multi(shards, 60 * 60 * 24)

        self.template_values.update({
            'avatars': avatars,
        })
        return jinja2_engine.render('avatars2018.html', self.template_values)
    def get(self):
        # Memcache doesn't support saving values > 1MB. Break up features into chunks
        # and save those to memcache.
        if self.MODEL_CLASS == models.FeatureObserver:
            keys = self.PROPERTY_CLASS.get_property_chunk_memcache_keys(
                self.PROPERTY_CLASS, self.MEMCACHE_KEY)
            properties = memcache.get_multi(keys)

            if len(properties.keys()) != len(properties) or not properties:
                properties = self.__query_metrics_for_properties()

                # Memcache doesn't support saving values > 1MB. Break up list into chunks.
                chunk_keys = self.PROPERTY_CLASS.set_property_chunk_memcache_keys(
                    self.MEMCACHE_KEY, properties)
                memcache.set_multi(chunk_keys, time=CACHE_AGE)
            else:
                temp_list = []
                for key in sorted(properties.keys()):
                    temp_list.extend(properties[key])
                properties = temp_list
        else:
            properties = memcache.get(self.MEMCACHE_KEY)
            if properties is None:
                properties = self.__query_metrics_for_properties()
                memcache.set(self.MEMCACHE_KEY, properties, time=CACHE_AGE)

        properties = self._clean_data(properties)
        # Metrics json shouldn't be cached by intermediary caches because users
        # see different data when logged in. Set Cache-Control: private.
        super(FeatureHandler, self).get(properties, public=False)
Example #21
0
    def new_getTopNews(page=1,category=None,cacheread=True,cachewrite=True):
        
        articles=None
        key='topnews_'+str(page)+"_"+str(category)
            
        if cacheread:
            articles=memcache.get(key)
        
        updatecache=False
        if not articles:
            articles=ArticleLib.getTopNews(page, category)
            updatecache=True
           
        if articles:
            keys=[]
            #TODO: get the correct article.numapprovals and article.numcomments
            for article in articles:
                keys.append("n_approvals_"+str(article.key()))
                keys.append("ncomments_"+str(article.key()))
            records=memcache.get_multi(keys)
#            for key,value in records.items():
#                logging.info(key+"="+str(value))
            for article in articles:
                if records.has_key("n_approvals_"+str(article.key())) and records["n_approvals_"+str(article.key())]:
                    if records["n_approvals_"+str(article.key())] > article.numapprovals:
                        #logging.info("overriding numapprovals to "+str(records["n_approvals_"+str(article.key())]))
                        article.numapprovals=records["n_approvals_"+str(article.key())]
                if records.has_key("ncomments_"+str(article.key())) and records["ncomments_"+str(article.key())]:
                    if records["ncomments_"+str(article.key())] > article.numcomments:
                        article.numcomments=records["ncomments_"+str(article.key())]
        if updatecache:
            #logging.info("memcache set articles") 
            memcache.set(key,articles,86400)
        
        return articles
Example #22
0
    def get_counts_from_memcache(self):
        max_time = min(self.poll.duration,self.calc_offset())
        caches = []
        for offset in range(max_time+1):
            caches.append("counts,%s,%d" % (self.name, offset))

        return memcache.get_multi(caches)
Example #23
0
    def get(self, cluster_id=None):
        cluster_attrs = fetch_config[cluster_id]

        result = urlfetch.fetch(cluster_attrs['url'])
        # appids = result.content.split('|')
        appids = lib.getAppidFromINI(result.content)

        appid_dict = memcache.get_multi(appids)

        response_dict = {
            "available": [],
            "over_quota": [],
        }

        for appid, val in appid_dict.iteritems():
            if val is True:
                response_dict['available'].append(appid)
            elif val is False:
                response_dict['over_quota'].append(appid)

        response_dict['status_msg'] = "今日还剩 %dGB/%dGB 流量" % (len(response_dict['available']), len(appids))

        response_json = json.dumps(response_dict, ensure_ascii=False)

        self.response.write(response_json)
Example #24
0
def prefetch_posts_list(posts):
  prefetch_refprops(posts, Post.user)
  posts_keys = [str(post.key()) for post in posts]

  # get user, if no user, all already_voted = no
  session = get_current_session()
  if session.has_key('user'): 
    user = session['user']
    memcache_voted_keys = ["vp_" + post_key + "_" + str(user.key()) for post_key in posts_keys]
    memcache_voted = memcache.get_multi(memcache_voted_keys)
    memcache_to_add = {}
    for post in posts:
      logging.info("Got a post")
      vote_value = memcache_voted.get("vp_" + str(post.key()) + "_" +str(user.key()))
      if vote_value is not None:
        post.prefetched_already_voted = vote_value == 1
      else:
        vote = Vote.all().filter("user ="******"post =", post).fetch(1) 
        memcache_to_add["vp_" + str(post.key()) + "_" + str(user.key())] = len(vote)
        post.prefetched_already_voted = len(vote) == 1
    if memcache_to_add.keys():
      memcache.add_multi(memcache_to_add, 3600)
  else:
    for post in posts:
      post.prefetched_already_voted = False
Example #25
0
def task(stories):
  def check_story(rpc):
    try:
      result = rpc.get_result()
      story = json.loads(result.content)
      if story and story.get('score') >= 100:
        StoryPost.add(story)
      elif story:
        logging.info('STOP: {id} has low score ({score})'.format(**story))
      else:
        logging.info('STOP: story was probably deleted/flagged')
    except urlfetch.DownloadError as ex:
      logging.exception(ex)
    except ValueError as ex:
      logging.info(result.content)
      logging.exception(ex)


  # stringify ids for use in memcache and convert to set for later
  ids = set(str(story_id) for story_id in stories)
  logging.info('checking stories: {}'.format(ids))
  # get stories that we already posted to reduce the number of requests
  cached_stories = set(memcache.get_multi(ids).keys())
  logging.info('cached stories: {}'.format(cached_stories))
  # remove stories we know about from stories that we need to check
  stories_to_check = ids.difference(cached_stories)
  rpcs = map(lambda id: item_async(id, check_story), stories_to_check)
  for rpc in rpcs:
    rpc.wait()
Example #26
0
def load_summary_protos(java_application=False):
  """Load all valid summary records from memcache.

  Args:
    java_application: Boolean. If true, this function is being invoked
      by the download_appstats tool on a java application.

  Returns:
    A list of StatsProto instances, in reverse chronological order
    (i.e. most recent first).

  NOTE: This is limited to returning at most config.KEY_MODULUS records,
  since there are only that many distinct keys.  See also make_key().
  """
  tmpl = config.KEY_PREFIX + config.KEY_TEMPLATE + config.PART_SUFFIX
  if java_application:

    tmpl = '"' + tmpl + '"'
  keys = [tmpl % i
          for i in
          range(0, config.KEY_DISTANCE * config.KEY_MODULUS,
                config.KEY_DISTANCE)]
  results = memcache.get_multi(keys, namespace=config.KEY_NAMESPACE)
  records = []
  for rec in results.itervalues():
    try:
      pb = StatsProto(rec)
    except Exception, err:
      logging.warn('Bad record: %s', err)
    else:
      records.append(pb)
Example #27
0
 def stop(self):
   self.active = False
   game_slugs = get_game_slugs()
   
   if self.slug in game_slugs:
     game_slugs.remove(self.slug)
     memcache.set('game_slugs', game_slugs)
   
   
   serialized_status = memcache.get_multi(self.status_ids,
                                          key_prefix='status')
   
   # Just copying my status_ids list to make some changes localy
   missing_status_ids = list(self.status_ids)
   for status_id, status in serialized_status.iteritems():
     deserialized_status = deserialize(status)
     deserialized_status.player.leave_game(self)
     missing_status_ids.remove(status_id)
   
   # Taking the missing status in database
   if missing_status_ids:
     missing_status = Status.get(missing_status_ids)
     for status in missing_status:
       status.player.leave_game(self)
   
   memcache.delete('game'+self.slug)
   self.status_ids = []
   self.put()
Example #28
0
def Get(key):
    """Get a memcache entry, or None."""
    key = MEMCACHE_PREFIX + key
    shard_map = memcache.get(key)
    if not shard_map:
        # The shard_map was evicted or never set.
        return

    # If zero shards, the content was small enough and stored in the shard_map.
    num_shards = shard_map['num_shards']
    if num_shards == 0:
        return pickle.loads(shard_map['content'])

    keys = ['%s%d' % (key, i) for i in range(num_shards)]
    shards = memcache.get_multi(keys)
    if len(shards) != num_shards:
        # One or more content shards were evicted, delete map and content shards.
        memcache.delete_multi([key] + keys)
        return

    # All shards present, stitch contents back together and unpickle.
    shards = tuple([shards[key] for key in keys])
    value = '%s' * shard_map['num_shards']
    value = pickle.loads(value % shards)
    return value
def get(key):
  result = memcache.get_multi(['%s.%s' % (key, i) for i in xrange(32)])
  serialized = ''.join([v for v in result.values() if v is not None])
  try:
    return pickle.loads(serialized)
  except:  # TODO: Use a more specific exception type.
    return None
Example #30
0
    def UpdateStats(cls, category, stats):
        """Update the summary stats in memory and the datastore.

        This will only update part of a summary score row.

        Args:
            category: a category string like 'network'
            stats: a dict of browser stats (see CategoryStatsManager.GetStats)
        Returns:
            The summary stats that have been updated by the given stats.
            (Used by GetStats.)
        """
        browsers = [b for b in stats.keys() if b != 'total_runs']
        update_summary_stats = memcache.get_multi(
                browsers, namespace=cls.MEMCACHE_NAMESPACE)
        for browser in browsers:
            ua_summary_stats = update_summary_stats.setdefault(browser, {
                    'results': {}})
            ua_summary_stats['results'][category] = {
                    'score': stats[browser]['summary_score'],
                    'display': stats[browser]['summary_display'],
                    'total_runs': stats[browser]['total_runs'],
                    }
            if category == 'acid3':
                ua_summary_stats['results']['acid3']['display'] = (
                        stats[browser]['results']['score']['display'])
        memcache.set_multi(update_summary_stats, namespace=cls.MEMCACHE_NAMESPACE)
        return update_summary_stats
Example #31
0
  def _Dynamic_Get(self, request, response):
    """Intercepts get requests and returns them from cache if available."""
    if request.has_transaction():
      self.CallWrappedStub('Get', request, response)
      return

    new_request = datastore_pb.GetRequest()
    new_response = datastore_pb.GetResponse()
    encoded_keys = [k.Encode() for k in request.key_list()]
    cached = memcache.get_multi(encoded_keys)

    for key, encoded_key in itertools.izip(request.key_list(), encoded_keys):
      if encoded_key not in cached:
        new_request.add_key().CopyFrom(key)

    if new_request.key_size() > 0:
      logging.info("SHIM: keys are not in memcache: %s" % request.key_list())
      self.CallWrappedStub('Get', new_request, new_response)

    entity_iter = iter(new_response.entity_list())
    to_put = dict()
    for encoded_key in encoded_keys:
      entity = cached.get(encoded_key, None)
      if entity:
        response.add_entity().mutable_entity().CopyFrom(entity)
      else:
        entity = entity_iter.next()
        if entity.entity().IsInitialized():
          # self.entity_cache[encoded_key] = entity.entity()
          to_put[encoded_key] = entity.entity()
        response.add_entity().CopyFrom(entity)
    if to_put:
      memcache.set_multi(to_put)
Example #32
0
 def get(self):
     # [START batch]
     values = {'comment': 'I did not ... ', 'comment_by': 'Bill Holiday'}
     if not memcache.set_multi(values):
         logging.error('Unable to set Memcache values')
     tvalues = memcache.get_multi(('comment', 'comment_by'))
     self.response.write(tvalues)
Example #33
0
def cache_retrieve(key):
    result = memcache.get_multi(['%s.%s' % (key, i) for i in xrange(32)])
    serialized = ''.join(
        [v for k, v in sorted(result.items()) if v is not None])
    if serialized == '':
        return None
    return pickle.loads(serialized)
Example #34
0
def get(setting):
    # 1. Try request-local cache
    request = webapp2.get_request()
    if request:
        if 'thonkify_setting_cache' not in request.registry:
            request.registry['thonkify_setting_cache'] = {}
        if setting in request.registry['thonkify_setting_cache']:
            # NOTE: we don't update caches here because that's the only place it
            # could have come from, and it would result in a bunch of memcache set()
            # traffic.
            return request.registry['thonkify_setting_cache'][setting]

    # 2. Try memcache
    mc = memcache.get_multi([setting], key_prefix=SETTING_MC_PREFIX)
    if setting in mc:
        # ... but don't try to update memcache, because we've just got it from there
        return _update_caches(setting, mc[setting], skip_memcache=True)

    # 3. Try datastore
    value = GlobalSetting.get(setting)
    if value is not None:
        return _update_caches(setting, value)

    # 4. Fall back to defaults
    if setting in TWEAKABLE_DEFAULTS:
        s = TWEAKABLE_DEFAULTS[setting]
        return _update_caches(setting, s[0] if isinstance(s, tuple) else s)

    # 5. Module-level constant?
    if setting[0] in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' and setting in globals():
        return _update_caches(setting, globals()[setting])

    # 6. Give up
    raise KeyError('Setting "%s" does not exist' % setting)
Example #35
0
    def get(self):
        user = users.get_current_user()
        context = {
            'login_url': users.create_login_url('/'),
            'logout_url': users.create_logout_url('/'),
            'is_logged_in': user is not None,
            'is_admin': users.is_current_user_admin(),
        }

        result = memcache.get_multi(['proxies', 'moderated_proxies'])

        if 'proxies' in result:
            logging.info('proxies cache hit')
            context['proxies'] = result['proxies']
        else:
            logging.info('proxies cache miss')
            context['proxies'] = Proxy.all().filter('approved =', True).order('name')

        if 'moderated_proxies' in result:
            logging.info('moderated proxies cache hit')
            context['moderated_proxies'] = result['moderated_proxies']
        else:
            logging.info('moderated proxies cache miss')
            context['moderated_proxies'] = Proxy.all().filter('approved =', False).order('name')

        memcache.add_multi({
            'proxies': context['proxies'],
            'moderated_proxies': context['moderated_proxies'],
        })

        self.render_response('index.html', context)
Example #36
0
    def _AuxCheckStart(self, keysets, limit, ratelimiting_enabled,
                       exception_obj):
        for keys in keysets:
            count = 0
            try:
                counters = memcache.get_multi(keys)
                count = sum(counters.values())
                self.checks.increment({'type': 'success'})
            except Exception as e:
                logging.error(e)
                if not self.fail_open:
                    self.checks.increment({'type': 'fail_closed'})
                    raise exception_obj
                self.checks.increment({'type': 'fail_open'})

            if count > limit:
                # Since webapp2 won't let us return a 429 error code
                # <http://tools.ietf.org/html/rfc6585#section-4>, we can't
                # monitor rate limit exceeded events with our standard tools.
                # We return a 400 with a custom error message to the client,
                # and this logging is so we can monitor it internally.
                logging.info('%s, %d' % (exception_obj.message, count))

                self.limit_exceeded.increment()

                if ratelimiting_enabled:
                    self.blocked_requests.increment()
                    raise exception_obj

            k = keys[0]
            # Only update the latest *time* bucket for each prefix (reverse chron).
            memcache.add(k, 0, time=EXPIRE_AFTER_SECS)
            memcache.incr(k, initial_value=0)
Example #37
0
    def _query_work(self, index, cursor):
        """Queries for work in memcache."""
        if cursor:
            try:
                cursor = int(cursor)
            except ValueError:
                # This is an old style task that resides in the Datastore, not
                # memcache. Use the parent implementation instead.
                return super(MemcacheForkJoinQueue,
                             self)._query_work(index, cursor)
        else:
            cursor = 0

        key_list = [
            self._create_index_key(index, n)
            for n in xrange(cursor, cursor + self.batch_size)
        ]
        results = memcache.get_multi(key_list)

        result_list = []
        for key in key_list:
            proto = results.get(key)
            if not proto:
                continue
            try:
                result_list.append(db.model_from_protobuf(proto))
            except ProtocolBuffer.ProtocolBufferDecodeError:
                logging.exception(
                    'Could not decode EntityPb at memcache key %r: %r', key,
                    proto)

        return result_list, cursor + self.batch_size
Example #38
0
def _GetNonviewableIIDs(query_project_ids, logged_in_user_id, needed_shard_ids,
                        rpc_tuples, nonviewable_iids, project_shard_timestamps,
                        invalidation_timestep, use_cached_searches):
    """Build a set of at-risk IIDs, and accumulate RPCs to get uncached ones."""
    if query_project_ids:
        keys = []
        for pid in query_project_ids:
            for sid in needed_shard_ids:
                keys.append('%d;%d;%d' % (pid, logged_in_user_id, sid))
    else:
        keys = [('all;%d;%d' % sid)
                for (logged_in_user_id, sid) in needed_shard_ids]

    if use_cached_searches:
        cached_dict = memcache.get_multi(keys, key_prefix='nonviewable:')
    else:
        cached_dict = {}

    for sid in needed_shard_ids:
        if query_project_ids:
            for pid in query_project_ids:
                _AccumulateNonviewableIIDs(pid, logged_in_user_id, sid,
                                           cached_dict, nonviewable_iids,
                                           project_shard_timestamps,
                                           rpc_tuples, invalidation_timestep)
        else:
            _AccumulateNonviewableIIDs(None, logged_in_user_id, sid,
                                       cached_dict, nonviewable_iids,
                                       project_shard_timestamps, rpc_tuples,
                                       invalidation_timestep)
Example #39
0
def GetImageRecords(photo_ids):
  """Queries the ImageRecord db, w/ memcaching. Returns photo_id -> rec dict"""
  # check if we've got the whole thing in memcache
  multi_key = VERSION + 'MIR' + ','.join(photo_ids)
  recs = memcache.get(multi_key)
  if recs: return recs

  keys_no_prefix = photo_ids[:]
  key_prefix = VERSION + 'IR'

  record_map = memcache.get_multi(keys_no_prefix, key_prefix=key_prefix)
  missing_ids = list(set(keys_no_prefix) - set(record_map.keys()))
  if not missing_ids: return record_map

  config = db.create_config(read_policy=db.EVENTUAL_CONSISTENCY)
  db_recs = ImageRecord.get_by_key_name(missing_ids, config=config)

  memcache_map = {}
  for id, r in zip(missing_ids, db_recs):
    record_map[id] = r
    memcache_map[id] = r

  if memcache_map:
    memcache.add_multi(memcache_map, key_prefix=key_prefix)
    memcache.add(multi_key, record_map)
  return record_map
Example #40
0
def getThreadPostCounts(forum_threads):
  """Helper for retrieving the post count of a list of threads.

  This is more efficient than calling getCounter() on each on individually."""
  keynames = []
  for forum_thread in forum_threads:
    keynames.append("counter:thread:%s:%s:posts" % (forum_thread.forum.slug, forum_thread.slug))
  counts = memcache.get_multi(keynames)

  for forum_thread in forum_threads:
    counter_name = "thread:%s:%s:posts" % (forum_thread.forum.slug, forum_thread.slug)
    keyname = "counter:%s" % (counter_name)
    if keyname not in counts:
      count = 0
      for counter in model.forum.ForumShardedCounter.all().filter("name", counter_name):
        count += counter.count

      counts[keyname] = count
      memcache.set(keyname, count)

  post_counts = {}
  for forum_thread in forum_threads:
    keyname = "counter:thread:%s:%s:posts" % (forum_thread.forum.slug, forum_thread.slug)
    post_counts["%s:%s" % (forum_thread.forum.slug, forum_thread.slug)] = counts[keyname]
  return post_counts
Example #41
0
def retrieve(key):
    result = memcache.get_multi(['%s.%s' % (key, i) for i in xrange(32)])
    logging.debug(result)
    serialized = ''.join([v for k, v in sorted(result.items()) if v is not None])
    if serialized == '':
        return None
    return pickle.loads(serialized)
Example #42
0
def getTopThreadsPerForum(forums):
  """For each forum, returns the 'top' thread, which we'll display in the forum list page.

  The 'top' thread is the most-recently created thread. When you click through to the forum, the
  top thread will actually be the thread with the most recent reply, so it's slightly different."""
  keynames = []
  for forum in forums:
    keynames.append("forum:%s:top-thread" % (forum.slug))
  cache_mapping = memcache.get_multi(keynames)

  # fetch any from the data store that weren't cached
  for forum in forums:
    keyname = "forum:%s:top-thread" % (forum.slug)
    if keyname not in cache_mapping:
      query = model.forum.ForumThread.all().filter("forum", forum).order("-posted").fetch(1)
      for forum_thread in query:
        cache_mapping[keyname] = forum_thread
        break

  memcache.set_multi(cache_mapping)

  # convert from our (internal) memcache key names to a more reasonable key
  top_threads = {}
  for forum in forums:
    keyname = "forum:%s:top-thread" % (forum.slug)
    if keyname in cache_mapping:
      top_threads[forum.slug] = cache_mapping[keyname]

  return top_threads
Example #43
0
def get_games():
  game_slugs = get_game_slugs()
  if game_slugs:
    serialized_games = memcache.get_multi(game_slugs, key_prefix='game')
    
    # Checking game in slug and not in cache
    missing_slugs = list(game_slugs)
    games = []
    if serialized_games:
      for slug, game in serialized_games.iteritems():
        games.append(deserialize(game))
        missing_slugs.remove(slug)
    
    if missing_slugs:
      query = Game.all()
      query.filter('active =', True)
      query.filter('slug in', missing_slugs)
      missing_games = query.fetch(limit=None)
      serialized_games = {}
      for game in missing_games:
        games.append(game)
        serialized_games[game.slug] = serialize(game)
      memcache.set_multi(serialized_games, key_prefix='game')
    
    return games
Example #44
0
def get_data_from_cache(entity, key_prefix):
    tk = []
    for k in entity:
        tk.append(str(k.urlsafe()))

    logging.info(tk)

    # get memcache: results
    cache_results = memcache.get_multi(
        keys=tk, key_prefix=key_prefix)

    result = []
    memcache_to_add = {}
    for wanted in tk:
        if not any(r == wanted for r in cache_results):
            # 沒有 cache
            logging.info('not found')
            memcache_to_add[wanted] = ndb.Key(urlsafe=wanted).get()
            result.append(memcache_to_add[wanted])
        else:
            # 有
            logging.info('found')
            result.append(cache_results[wanted])

    logging.info('memcache(s) to add: ' + str(len(memcache_to_add)))

    if len(memcache_to_add) > 0:
        memcache.add_multi(
            memcache_to_add, key_prefix=key_prefix, time=3600)

    return result
Example #45
0
def load_summary_protos(java_application=False):
  """Load all valid summary records from memcache.

  Args:
    java_application: Boolean. If true, this function is being invoked
      by the download_appstats tool on a java application.

  Returns:
    A list of StatsProto instances, in reverse chronological order
    (i.e. most recent first).

  NOTE: This is limited to returning at most config.KEY_MODULUS records,
  since there are only that many distinct keys.  See also make_key().
  """
  tmpl = config.KEY_PREFIX + config.KEY_TEMPLATE + config.PART_SUFFIX
  if java_application:

    tmpl = '"' + tmpl + '"'
  keys = [tmpl % i
          for i in
          range(0, config.KEY_DISTANCE * config.KEY_MODULUS,
                config.KEY_DISTANCE)]
  results = memcache.get_multi(keys, namespace=config.KEY_NAMESPACE)
  records = []
  for rec in results.itervalues():
    try:
      pb = StatsProto(rec)
    except Exception, err:
      logging.warn('Bad record: %s', err)
    else:
      records.append(pb)
Example #46
0
def prefetch_posts_list(posts):
    prefetch_refprops(posts, Post.user)
    posts_keys = [str(post.key()) for post in posts]

    # get user, if no user, all already_voted = no
    session = get_current_session()
    if session.has_key('user'):
        user = session['user']
        memcache_voted_keys = [
            "vp_" + post_key + "_" + str(user.key()) for post_key in posts_keys
        ]
        memcache_voted = memcache.get_multi(memcache_voted_keys)
        memcache_to_add = {}
        for post in posts:
            logging.info("Got a post")
            vote_value = memcache_voted.get("vp_" + str(post.key()) + "_" +
                                            str(user.key()))
            if vote_value is not None:
                post.prefetched_already_voted = vote_value == 1
            else:
                vote = Vote.all().filter("user ="******"post =", post).fetch(1)
                memcache_to_add["vp_" + str(post.key()) + "_" +
                                str(user.key())] = len(vote)
                post.prefetched_already_voted = len(vote) == 1
        if memcache_to_add.keys():
            memcache.add_multi(memcache_to_add, 3600)
    else:
        for post in posts:
            post.prefetched_already_voted = False
Example #47
0
def load_summary_protos():
    """Load all valid summary records from memcache.

  Returns:
    A list of RequestStatProto instances, in reverse chronological order
    (i.e. most recent first).

  NOTE: This is limited to returning at most config.KEY_MODULUS records,
  since there are only that many distinct keys.  See also make_key().
  """
    tmpl = '%s%s%s' % (recording.config.KEY_PREFIX,
                       recording.config.KEY_TEMPLATE,
                       recording.config.PART_SUFFIX)
    keys = [
        tmpl % i for i in range(
            0, recording.config.KEY_DISTANCE *
            recording.config.KEY_MODULUS, recording.config.KEY_DISTANCE)
    ]
    results = memcache.get_multi(keys,
                                 namespace=recording.config.KEY_NAMESPACE)
    records = []
    for rec in results.itervalues():
        try:
            pb = protobuf.decode_message(apphosting.RequestStatProto, rec)
        except Exception, err:
            logging.warn('Bad record: %s', err)
        else:
            records.append(pb)
  def _query_work(self, index, cursor):
    """Queries for work in memcache."""
    if cursor:
      try:
        cursor = int(cursor)
      except ValueError:
        # This is an old style task that resides in the Datastore, not
        # memcache. Use the parent implementation instead.
        return super(MemcacheForkJoinQueue, self)._query_work(index, cursor)
    else:
      cursor = 0

    key_list = [self._create_index_key(index, n)
                for n in xrange(cursor, cursor + self.batch_size)]
    results = memcache.get_multi(key_list)

    result_list = []
    for key in key_list:
      proto = results.get(key)
      if not proto:
        continue
      try:
        result_list.append(db.model_from_protobuf(proto))
      except ProtocolBuffer.ProtocolBufferDecodeError:
        logging.exception('Could not decode EntityPb at memcache key %r: %r',
                          key, proto)

    return result_list, cursor + self.batch_size
Example #49
0
def task(stories):
    def check_story(rpc):
        try:
            result = rpc.get_result()
            story = json.loads(result.content)
            if story and story.get('score') >= SCORE_THRESHOLD:
                StoryPost.add(story)
            elif story:
                logging.info(
                    'STOP: {id} has low score ({score})'.format(**story))
            else:
                logging.info('STOP: story was probably deleted/flagged')
        except urlfetch.DownloadError as ex:
            logging.exception(ex)
        except ValueError as ex:
            logging.info(result.content)
            logging.exception(ex)

    # stringify ids for use in memcache and convert to set for later
    ids = set(str(story_id) for story_id in stories)
    logging.info('checking stories: {}'.format(ids))
    # get stories that we already posted to reduce the number of requests
    cached_stories = set(memcache.get_multi(ids).keys())
    logging.info('cached stories: {}'.format(cached_stories))
    # remove stories we know about from stories that we need to check
    stories_to_check = ids.difference(cached_stories)
    rpcs = map(lambda id: item_async(id, check_story), stories_to_check)
    for rpc in rpcs:
        rpc.wait()
Example #50
0
  def _get_tasklet(self, todo):
    assert todo
    # First check memcache.
    keys = set(key for _, key in todo)
    memkeymap = dict((key, key.urlsafe())
                     for key in keys if self.should_memcache(key))
    if memkeymap:
      results = memcache.get_multi(memkeymap.values())
      leftover = []
##      del todo[1:]  # Uncommenting this creates an interesting bug.
      for fut, key in todo:
        mkey = memkeymap[key]
        if mkey in results:
          pb = results[mkey]
          ent = self._conn.adapter.pb_to_entity(pb)
          fut.set_result(ent)
        else:
          leftover.append((fut, key))
      todo = leftover
    if todo:
      keys = [key for (_, key) in todo]
      # TODO: What if async_get() created a non-trivial MultiRpc?
      results = yield self._conn.async_get(None, keys)
      for ent, (fut, _) in zip(results, todo):
        fut.set_result(ent)
Example #51
0
def generate_feed(uid):
    subscriptions = Subscription.query(Subscription.uid == uid).fetch(200)
    subscription_urls = [sub.url for sub in subscriptions if sub.url]
    if len(subscription_urls) > 0:
        sources = Source.query(Source.url.IN(subscription_urls)).order(-Source.most_recent_article_added_date).fetch(len(subscription_urls))
        
        source_jsons = {}
        for source_json in memcache.get_multi([source.feed_cache_key() for source in sources]).itervalues():
            source_jsons[source_json['id']] = source_json
        
        to_fetch = [source for source in sources if source.key.id() not in source_jsons]
        print 'HITS {0} TO_FETCH {1}'.format(len(source_jsons), len(to_fetch))
        if len(to_fetch):
            source_promises = [src.json(include_articles=True, article_limit=FEED_ARTICLE_LIMIT, return_promise=True) for src in to_fetch]
            for promise in source_promises:
                data = promise()
                source_jsons[data['id']] = data
        
        # put the cache keys:
        if len(to_fetch):
            memcache.set_multi({source.feed_cache_key(): source_jsons[source.key.id()] for source in to_fetch if (source.key.id() in source_jsons)})
        
        source_json = [source_jsons[source.key.id()] for source in sources if source.key.id() in source_jsons]
    else:
        source_json = []
    return {
        "sources": source_json
    }
    def _render(self, year):
        year = int(year)
        avatars = []
        shards = memcache.get_multi(['{}avatars_{}'.format(year, i) for i in xrange(10)])
        if len(shards) == 10:  # If missing a shard, must refetch all
            for _, shard in sorted(shards.items(), key=lambda kv: kv[0]):
                avatars += shard

        if not avatars:
            avatars_future = Media.query(Media.media_type_enum == MediaType.AVATAR, Media.year == year).fetch_async()
            avatars = sorted(avatars_future.get_result(), key=lambda a: int(a.references[0].id()[3:]))

            shards = {}
            size = len(avatars) / 10 + 1
            for i in xrange(10):
                start = i * size
                end = start + size
                shards['{}avatars_{}'.format(year, i)] = avatars[start:end]
            memcache.set_multi(shards, 60*60*24)

        self.template_values.update({
            'year': year,
            'avatars': avatars,
        })
        return jinja2_engine.render('avatars.html', self.template_values)
Example #53
0
    def get(self, key):
        logs.log('MemcacheLarge get %s.' % key)
        value_len = memcache.get(key)
        if not value_len:
            return value_len

        value_len = int(value_len)
        keys = [
            '%s-%s-%s' % (self.MAGIC_STR, key, chunk_start)
            for chunk_start in xrange(0, value_len, self.CHUNK_LEN)
        ]

        keys_and_values = memcache.get_multi(keys).items()

        def get_chunk_start(key_and_value):
            full_key = key_and_value[0]
            key_without_chunk_start = '%s-%s-' % (self.MAGIC_STR, key)
            return int(full_key[len(key_without_chunk_start):])

        string_value = ''.join(
            value
            for key, value in sorted(keys_and_values, key=get_chunk_start))

        string_len = len(string_value)
        if string_len != value_len:
            logs.log_error(
                'Unable to retrieve %s. Expected length: %s. actual: %s' %
                (key, value_len, string_len))
            return None

        try:
            return json.loads(string_value)
        except ValueError:
            logs.log_error('Unable to retrieve ' + key)
            return None
Example #54
0
def GetImageRecords(photo_ids):
    """Queries the ImageRecord db, w/ memcaching. Returns photo_id -> rec dict"""
    # check if we've got the whole thing in memcache
    keys_no_prefix = photo_ids[:]
    if MEMCACHE_ENABLED:
        multi_key = VERSION + 'MIR' + ','.join(photo_ids)
        recs = memcache.get(multi_key)
        if recs: return recs

        key_prefix = VERSION + 'IR'

        record_map = memcache.get_multi(keys_no_prefix, key_prefix=key_prefix)
        missing_ids = list(set(keys_no_prefix) - set(record_map.keys()))
        if not missing_ids: return record_map
    else:
        missing_ids = keys_no_prefix
        record_map = {}

    config = db.create_config(read_policy=db.EVENTUAL_CONSISTENCY)
    db_recs = ImageRecord.get_by_key_name(missing_ids, config=config)

    memcache_map = {}
    for id, r in zip(missing_ids, db_recs):
        record_map[id] = r
        memcache_map[id] = r

    if MEMCACHE_ENABLED and memcache_map:
        memcache.add_multi(memcache_map, key_prefix=key_prefix)
        memcache.add(multi_key, record_map)
    return record_map