Example #1
0
def load_web():
    user = users.get_current_user()

    profile_key = get_user_profile_key(user)
    friends_map_key = get_friends_map_key_by_user(user)

    async_get = db.get_async([profile_key, friends_map_key])

    messages_query = get_messages_query(app_user=user, cursor=None, user_only=True)
    messages_iterator = messages_query.run(config=datastore_query.QueryOptions(limit=MESSAGES_BATCH_SIZE))

    service_inbox_query = get_service_inbox_query(user, None)
    service_inbox_iterator = service_inbox_query.run(config=datastore_query.QueryOptions(limit=MESSAGES_BATCH_SIZE))

    profile, friends_map = async_get.get_result()

    def convert_to_transferobject(message):
        member = user if not message.sharedMembers and message.sender != user else None
        message_type_descr = MESSAGE_TYPE_MAPPING[message.TYPE]
        args = [message]
        if message_type_descr.include_member_in_conversion:
            args.append(member)
        if message.isRootMessage:
            return message_type_descr.root_model_to_conversion(*args)
        else:
            return message_type_descr.model_to_conversion(*args)

    messages = list()
    async_child_messages = list()
    for parent_message in messages_iterator:
        messages.append(convert_to_transferobject(parent_message))
        async_child_messages.append(db.get_async(parent_message.childMessages))

    for i in xrange(len(messages)):
        messages[i].messages = [convert_to_transferobject(cm) for cm in async_child_messages[i].get_result()]

    user_status = UserStatusTO()
    user_status.profile = UserProfileTO.fromUserProfile(profile)
    avatar = get_avatar_by_id(profile.avatarId)
    user_status.has_avatar = bool(avatar and avatar.picture)
    user_status.registered_mobile_count = get_user_active_mobiles_count(user)

    message_screen_to = RootMessageListTO()
    message_screen_to.messages = messages
    message_screen_to.cursor = unicode(messages_query.cursor())
    message_screen_to.batch_size = MESSAGES_BATCH_SIZE

    service_inbox_to = MessageListTO()
    service_inbox_to.messages = [convert_to_transferobject(m) for m in service_inbox_iterator]
    service_inbox_to.cursor = unicode(service_inbox_query.cursor())
    service_inbox_to.batch_size = MESSAGES_BATCH_SIZE

    result = WebTO()
    result.friends = [FriendTO.fromDBFriendDetail(f) for f in friends_map.friendDetails] if friends_map else []
    result.user_status = user_status
    result.messages = message_screen_to
    result.service_inbox = service_inbox_to

    return result
Example #2
0
def _prefetchListFieldsAsync(model, fields, data):
  """Prefetches the specified list fields in data asynchronously.

  NOTE: The key difference here is that, we don't distribute the keys! The
  caller is expected to do it.
  """
  for field in fields:
    prop = getattr(model, field, None)

    if not prop:
      logging.exception(
          'Model %s does not have attribute %s', model.kind(), field)
      return

    if not isinstance(prop, db.ListProperty):
      logging.exception(
          'Property %s of %s is not a ReferenceProperty but a %s',
          field, model.kind(), prop.__class__.__name__)
      return

  keys = []

  for field in fields:
    for i in data:
      keys += getattr(i, field)

  return db.get_async(keys)
Example #3
0
def analyze_status():
    LOG_LIMIT = 30  # to prevent 'InvalidArgumentError: Too many request ids specified.'
    # Asynchronously fetch troubled services
    services_in_trouble = get_monitored_services_in_trouble_qry().run()

    # Asynchronously fetch rogerthat backend status
    rbe_rpc = db.get_async(RogerthatBackendErrors.get_key())

    # Fetch queue statusses
    default, controller, worker, fast, broadcast = taskqueue.QueueStatistics.fetch(
            ["default", "highload-controller-queue", "highload-worker-queue", 'fast', 'broadcast-queue'])
    rbe = rbe_rpc.get_result()
    logs = logservice.fetch(request_ids=rbe.requestIds[:LOG_LIMIT]) if rbe else None
    total_error_count = len(rbe.requestIds) if rbe else 0
    skipped_error_count = max(0, total_error_count - LOG_LIMIT)
    services = list(services_in_trouble)
    five_min_ago = (now() - 300) * 1000000

    client_errors = ClientError.all().order("-timestamp").fetch(20)
    result = dict(queues=dict(default=default, controller=controller, worker=worker, fast=fast, broadcast=broadcast),
                  rogerthatBackendStatus=logs,
                  errorCount=total_error_count,
                  skippedCount=skipped_error_count,
                  services=services,
                  five_min_ago=five_min_ago,
                  client_errors=client_errors)
    return result
Example #4
0
 def get(self):
   project_name = self.request.get('project_name')
   module_name = self.request.get('module_name')
   if project_name is not None and len(project_name) > 0:
     if module_name is not None and len(module_name) > 0:
       project_query = db.GqlQuery("SELECT * FROM Project WHERE "
                                   "name = '%s'" % project_name)
       get_future = db.get_async(db.Key.from_path('Module',
         module_name, parent=project_query[0].key()))
       entity = get_future.get_result()
     else:
       get_future = db.get_async(db.Key.from_path('Project',
         project_name, parent=None))
       entity = get_future.get_result()
     self.response.headers['Content-Type'] = "application/json"
     self.response.out.write(json.dumps(entity, default=serialize))
   else:
     raise Exception('Missing parameters')
Example #5
0
 def GetMulti(self, keys):
   db_futures = dict((k, db.get_async(
       PersistentObjectStoreItem.CreateKey(self._namespace, k)))
       for k in keys)
   def resolve():
     return dict((key, future.get_result().GetValue())
                 for key, future in db_futures.iteritems()
                 if future.get_result() is not None)
   return Future(callback=resolve)
Example #6
0
 def get(self):
   project_name = self.request.get('project_name')
   module_name = self.request.get('module_name')
   if project_name is not None and len(project_name) > 0:
     if module_name is not None and len(module_name) > 0:
       project_query = db.GqlQuery("SELECT * FROM Project WHERE "
                                   "name = '%s'" % project_name)
       get_future = db.get_async(db.Key.from_path('Module',
         module_name, parent=project_query[0].key()))
       entity = get_future.get_result()
     else:
       get_future = db.get_async(db.Key.from_path('Project',
         project_name, parent=None))
       entity = get_future.get_result()
     self.response.headers['Content-Type'] = "application/json"
     self.response.out.write(json.dumps(entity, default=serialize))
   else:
     raise Exception('Missing parameters')
Example #7
0
def view_list(request, id, template='generic/view_list.html'):
    def load_suggestions_async(suggestions):
        suggestions = suggestions.get_result()
        from georemindme.funcs import prefetch_refprops
        from geoalert.models import Suggestion
        suggestions = prefetch_refprops(suggestions, Suggestion.user, Suggestion.poi)
        suggestions_loaded = []
        for suggestion in suggestions:
            suggestions_loaded.append({
                                    'instance': suggestion,
                                    'has_voted':  Vote.objects.user_has_voted(request.user, suggestion.key()) if request.user.is_authenticated() else False,
                                    'vote_counter': Vote.objects.get_vote_counter(suggestion.key())
                                   }
                                  )
        return suggestions_loaded
   
    list = ListSuggestion.objects.get_by_id_querier(id, request.user)
    if list is None:
        raise Http404
    from google.appengine.ext import db
    from geoalert.models import Event
    from geovote.models import Vote, Comment
    suggestions_async = db.get_async(list.keys)
    if 'print' in request.GET:
        top_comments = Comment.objects.get_top_voted(list, request.user)
        vote_counter = Vote.objects.get_vote_counter(list.key())
        return render_to_response('print/view_list.html',
                                {'list': list,
                                 'suggestions': load_suggestions_async(suggestions_async),
                                 'vote_counter': vote_counter,
                                 'top_comments': top_comments,
                                },
                                context_instance=RequestContext(request)
                              )
    from geovote.api import get_comments
    query_id, comments_async = get_comments(request.user, instance=list, async=True)
    has_voted = Vote.objects.user_has_voted(request.user, list.key())
    vote_counter = Vote.objects.get_vote_counter(list.key())
    #comments = get_comments_list(request.user, list.id)
    top_comments = Comment.objects.get_top_voted(list, request.user)
    user_follower = list.has_follower(request.user)
    if not request.user.is_authenticated():
        pos = template.rfind('.html')
        template = template[:pos] + '_anonymous' + template[pos:]
    return render_to_response(template,
                                {'list': list,
                                 'has_voted': has_voted,
                                 'vote_counter': vote_counter,
                                 'user_follower': user_follower,
                                 'suggestions': load_suggestions_async(suggestions_async),
                                 'comments': Comment.objects.load_comments_from_async(query_id, comments_async, request.user),
                                 'top_comments': top_comments
                                },
                                context_instance=RequestContext(request)
                              )
Example #8
0
  def increment_counters(self, key, amount):
    backup = key + '_backup'
    counter1_future = db.get_async(db.Key.from_path('Counter', key))
    counter2_future = db.get_async(db.Key.from_path('Counter', backup))

    counter1 = counter1_future.get_result()
    counter2 = counter2_future.get_result()
    if counter1 is None:
      counter1 = Counter(key_name=key, counter=0)
      counter2 = Counter(key_name=backup, counter=0)

    for i in range(0,amount):
      counter1.counter += 1
      counter2.counter += 1
      if counter1.counter == 5:
        raise Exception('Mock Exception')
      counter1_future = db.put_async(counter1)
      counter2_future = db.put_async(counter2)
      counter1_future.get_result()
      counter2_future.get_result()
class UserCounterHelper(object):
    def get_by_id(self, userid, async=False):
        try:
            userid = long(userid)
        except:
            return None
        key = db.Key.from_path(User.kind(), userid, UserCounter.kind(),
                               'counters_%s' % userid)
        if async:
            return db.get_async(key)
        return UserCounter.get(key)
Example #10
0
  def increment_counters(self, key, amount):
    backup = key + '_backup'
    counter1_future = db.get_async(db.Key.from_path('Counter', key))
    counter2_future = db.get_async(db.Key.from_path('Counter', backup))

    counter1 = counter1_future.get_result()
    counter2 = counter2_future.get_result()
    if counter1 is None:
      counter1 = Counter(key_name=key, counter=0)
      counter2 = Counter(key_name=backup, counter=0)

    for i in range(0,amount):
      counter1.counter += 1
      counter2.counter += 1
      if counter1.counter == 5:
        raise Exception('Mock Exception')
      counter1_future = db.put_async(counter1)
      counter2_future = db.put_async(counter2)
      counter1_future.get_result()
      counter2_future.get_result()
Example #11
0
  def increment_counter(self, key, amount):
    get_future = db.get_async(db.Key.from_path('Counter', key))
    counter = get_future.get_result()
    if counter is None:
      counter = Counter(key_name=key, counter=0)

    for i in range(0,amount):
      counter.counter += 1
      if counter.counter == 5:
        raise Exception('Mock Exception')
      put_future = db.put_async(counter)
      put_future.get_result()
Example #12
0
    def testAsyncPutGetDelete(self):
        """Tests asynchronously putting, getting and deleting entities."""

        class Person(db.Model):
            name = db.StringProperty()

        person = Person(name="Arthur")
        async = db.put_async(person)
        key = async.get_result()

        self.assertEqual(key, async.get_result())

        async = db.get_async(key)
        person = async.get_result()

        self.assertEqual("Arthur", person.name)

        async = db.delete_async(key)
        async.get_result()

        self.assertRaises(datastore_errors.EntityNotFoundError, datastore.Get, key)
    def testAsyncPutGetDelete(self):
        """Tests asynchronously putting, getting and deleting entities."""
        class Person(db.Model):
            name = db.StringProperty()

        person = Person(name="Arthur")
        async = db.put_async(person)
        key = async .get_result()

        self.assertEqual(key, async .get_result())

        async = db.get_async(key)
        person = async .get_result()

        self.assertEqual("Arthur", person.name)

        async = db.delete_async(key)
        async .get_result()

        self.assertRaises(datastore_errors.EntityNotFoundError, datastore.Get,
                          key)
Example #14
0
  def increment_counter(self, key, amount):
    get_future = db.get_async(db.Key.from_path('Counter', key))
    counter = get_future.get_result()
    if counter is None:
      counter = Counter(key_name=key, counter=0)

    for i in range(0,amount):
      counter.counter += 1
      if counter.counter == 5:
        raise Exception('Mock Exception')
      put_future = db.put_async(counter)
      put_future.get_result()
Example #15
0
 def __init__(self, top_key=None, already_have=[]):
     self.top_key = top_key
     self.cache = { x.key(): x for x in already_have }
     self.prefetch_query = None
     self.new_keys_fetched = False
     if top_key:
         keys_to_prefetch = MemcacheManager.get(self._collection_key)
         if keys_to_prefetch:
             keys_to_prefetch = set(keys_to_prefetch).difference(map(str, self.cache.keys()))
             self.prefetch_query = db.get_async(keys_to_prefetch)
         else:
             self.new_keys_fetched = len(already_have) > 0
Example #16
0
 def get_friends(self):
     from models_acc import UserFollowingIndex
     indexes = UserFollowingIndex.all().ancestor(self.key())
     # cargo la lista completa de todos a los que sigue el usuario
     followings = []
     followings.extend([following for index in indexes for following in index.following])
     friends = []
     for follow in followings:
         key = db.GqlQuery('SELECT __key__ FROM UserFollowingIndex WHERE ANCESTOR IS :1 AND following =:2', follow, self.key()).get()
         if key is not None:
             friends.append(follow)
     return db.get_async(friends)
 def store_stats_qr_scans(counters):
     sid_rpcs = list()
     for (service_identity_email, sid), sid_counters in counters.iteritems():
         sid_key = db.Key.from_path(ServiceInteractionDef.kind(), sid,
                                    parent=parent_key(get_service_user_from_service_identity_user(users.User(service_identity_email))))
         sid_rpcs.append((db.get_async(sid_key), sid_counters))
     for sid_rpc, sid_counters in sid_rpcs:
         sid = sid_rpc.get_result()
         sid.totalScanCount += sid_counters["total"]
         sid.scannedFromRogerthatCount += sid_counters["from_rogerthat"]
         sid.scannedFromOutsideRogerthatOnSupportedPlatformCount += sid_counters['supported']
         sid.scannedFromOutsideRogerthatOnUnsupportedPlatformCount += sid_counters['unsupported']
         put_rpcs['qr_scans'].append(db.put_async(sid))
Example #18
0
 def __init__(self, top_key=None, already_have=[]):
     self.top_key = top_key
     self.cache = {x.key(): x for x in already_have}
     self.prefetch_query = None
     self.new_keys_fetched = False
     if top_key:
         keys_to_prefetch = MemcacheManager.get(self._collection_key)
         if keys_to_prefetch:
             keys_to_prefetch = set(keys_to_prefetch).difference(
                 map(str, self.cache.keys()))
             self.prefetch_query = db.get_async(keys_to_prefetch)
         else:
             self.new_keys_fetched = len(already_have) > 0
def get_existing_profiles_via_facebook_ids(facebook_ids, app_id=App.APP_ID_ROGERTHAT):
    fpps = (db.Key.from_path(FacebookProfilePointer.kind(), str(fid)) for fid in facebook_ids)
    i = 0
    lst = []
    qrys = []
    for fpp in fpps:
        i += 1
        lst.append(fpp)
        if i == 10:
            qrys.append(db.get_async(lst))
            lst = []
            i = 0
    if i > 0:
        qrys.append(db.get_async(lst))
    def list_sequential():
        for qry in qrys:
            for item in qry.get_result():
                yield item
    matches = []
    for fpp in list_sequential():
        if fpp and get_app_id_from_app_user(fpp.user) == app_id:
            matches.append((fpp.facebookId, fpp.user))
    return matches
Example #20
0
 def get_friends(self):
     from models_acc import UserFollowingIndex
     indexes = UserFollowingIndex.all().ancestor(self.key())
     # cargo la lista completa de todos a los que sigue el usuario
     followings = []
     followings.extend(
         [following for index in indexes for following in index.following])
     friends = []
     for follow in followings:
         key = db.GqlQuery(
             'SELECT __key__ FROM UserFollowingIndex WHERE ANCESTOR IS :1 AND following =:2',
             follow, self.key()).get()
         if key is not None:
             friends.append(follow)
     return db.get_async(friends)
    def store_broadcast_stats(counters):
        broadcast_stats_rpcs = list()
        for (broadcast_guid), broadcast_counters in counters.iteritems():
            service_identity_user = users.User(broadcast_counters["service_identity_user_email"])
            broadcast_statistic_key = BroadcastStatistic.create_key(broadcast_guid, service_identity_user)
            broadcast_stats_rpcs.append((db.get_async(broadcast_statistic_key), broadcast_counters, broadcast_statistic_key, broadcast_guid, service_identity_user))
        for broadcast_stat_rpc, broadcast_counters, broadcast_statistic_key, broadcast_guid, service_identity_user in broadcast_stats_rpcs:
            broadcast_statistic = broadcast_stat_rpc.get_result()
            if not broadcast_statistic:
                broadcast_statistic = BroadcastStatistic(key=broadcast_statistic_key, timestamp=now())

            broadcast_statistic.sent = (broadcast_statistic.sent or 0) + broadcast_counters[BROADCAST_STATS_SENT]
            broadcast_statistic.received = (broadcast_statistic.received or 0) + broadcast_counters[BROADCAST_STATS_RECEIVED]
            broadcast_statistic.read = (broadcast_statistic.read or 0) + broadcast_counters[BROADCAST_STATS_READ]

            put_rpcs.append(db.put_async(broadcast_statistic))
Example #22
0
class StoreNode(db.Model):
    value = db.TextProperty()
    # These aren't readable just yet
    collection = db.StringProperty()
    created = db.DateTimeProperty(auto_now_add=True)
    updated = db.DateTimeProperty(auto_now=True)

    @classmethod
    def get(cls, key, async=True):
        if async:
            return AsyncValue(db.get_async(key))
        else:
            model = cls.get_by_key_name(key)
            if model is None:
                raise KeyError
            return model.value
Example #23
0
 def kick(self, registration_id, priority):
     settings = get_server_settings()
     rpc_item = urlfetch.create_rpc(5, None)
     collapse_key = "rogerthat" if priority == PRIORITY_NORMAL else "rogerthat_high_prio"
     registration_ids = [registration_id] if not isinstance(registration_id, list) else registration_id
     data = dict(registration_ids=registration_ids, collapse_key=collapse_key)
     if priority == PRIORITY_NORMAL:
         # There is no guarantee this message will ever reach the device
         # but in order to avoid throtteling of kicks while the user is actively using
         # Rogerthat we add time_to_live = 0
         data['time_to_live'] = 0
         self.outstandingKicks.append((db.get_async(OutStandingGCMKick.createKey(registration_id)), registration_id))
     data = json.dumps(data)
     url = "https://android.googleapis.com/gcm/send"
     urlfetch.make_fetch_call(rpc=rpc_item, url=url,
                              payload=data, method="POST", headers={'Content-Type':'application/json', 'Authorization': 'key=%s' % settings.gcmKey},
                              allow_truncated=False, follow_redirects=False, validate_certificate=False)
     self.items.append((rpc_item, time.time(), registration_id))
Example #24
0
  def get(self):
    key1 = db.Key.from_path('Text', self.request.get('key1'))
    key2 = db.Key.from_path('Text', self.request.get('key2'))
    key3 = db.Key.from_path('Text', 'definitelyDoesNotExist')

    async_get = db.get_async([key1, key2, key3])
    text1, text2, text3 = async_get.get_result()

    if text3:
      text3_is_none = False
    else:
      text3_is_none = True

    self.response.out.write(json.dumps({
      'success' : True,
      'val1' : text1.text,
      'val2' : text2.text,
      'val3_is_None' : text3_is_none
    }))
Example #25
0
  def get(self):
    key1 = db.Key.from_path('Text', self.request.get('key1'))
    key2 = db.Key.from_path('Text', self.request.get('key2'))
    key3 = db.Key.from_path('Text', 'definitelyDoesNotExist')

    async_get = db.get_async([key1, key2, key3])
    text1, text2, text3 = async_get.get_result()

    if text3:
      text3_is_none = False
    else:
      text3_is_none = True

    self.response.out.write(json.dumps({
      'success' : True,
      'val1' : text1.text,
      'val2' : text2.text,
      'val3_is_None' : text3_is_none
    }))
Example #26
0
 def kick(self, registration_id, priority, push_data=None, is_gcm=False):
     if not push_data:
         push_data = PushData()
     collapse_key = "rogerthat" if priority == PRIORITY_NORMAL else "rogerthat_high_prio"
     priority_string = "normal" if priority == PRIORITY_NORMAL else "high"
     registration_ids = [registration_id] if not isinstance(registration_id, list) else registration_id
     data = {
         'registration_ids': registration_ids,
         'collapse_key': collapse_key,
         'priority': priority_string
     }
     data.update(push_data.to_dict())
     if priority == PRIORITY_NORMAL:
         # There is no guarantee this message will ever reach the device
         # but in order to avoid throttling of kicks while the user is actively using
         # Rogerthat we add time_to_live = 0
         data['time_to_live'] = 0
         self.outstandingKicks.append(
             (db.get_async(OutStandingFirebaseKick.createKey(registration_id)), registration_id))
     rpc_item = create_firebase_request(data, is_gcm=is_gcm)
     self.items.append((rpc_item, time.time(), registration_id, data, is_gcm))
Example #27
0
  def get(self):
    key = self.request.get('key')
    amount = self.request.get('amount')
    xg = self.request.get('xg')
    if xg is not None and xg == 'true':
      try:
        xg_on = db.create_transaction_options(xg=True)
        db.run_in_transaction_options(xg_on,
          self.increment_counters, key, int(amount))
        counter1_future = db.get_async(db.Key.from_path('Counter', key))
        counter2_future = db.get_async(db.Key.from_path('Counter', 
          key + '_backup'))

        counter1 = counter1_future.get_result()
        counter2 = counter2_future.get_result()
        status = {
          'success' : True,
          'counter' : counter1.counter,
          'backup' : counter2.counter
        }
      except Exception:
        counter1_future = db.get_async(db.Key.from_path('Counter', key))
        counter2_future = db.get_async(db.Key.from_path('Counter',
          key + '_backup'))

        counter1 = counter1_future.get_result()
        counter2 = counter2_future.get_result()
        status = {
          'success' : False,
          'counter' : counter1.counter,
          'backup' : counter2.counter
        }
    else:
      try:
        db.run_in_transaction(self.increment_counter, key, int(amount))
        counter_future = db.get_async(db.Key.from_path('Counter', key))
        counter = counter_future.get_result()
        status = { 'success' : True, 'counter' : counter.counter }
      except Exception:
        counter_future = db.get_async(db.Key.from_path('Counter', key))
        counter = counter_future.get_result()
        status = { 'success' : False, 'counter' : counter.counter }
    self.response.headers['Content-Type'] = "application/json"
    self.response.out.write(json.dumps(status))
Example #28
0
  def get(self):
    key = self.request.get('key')
    amount = self.request.get('amount')
    xg = self.request.get('xg')
    if xg is not None and xg == 'true':
      try:
        xg_on = db.create_transaction_options(xg=True)
        db.run_in_transaction_options(xg_on,
          self.increment_counters, key, int(amount))
        counter1_future = db.get_async(db.Key.from_path('Counter', key))
        counter2_future = db.get_async(db.Key.from_path('Counter', 
          key + '_backup'))

        counter1 = counter1_future.get_result()
        counter2 = counter2_future.get_result()
        status = {
          'success' : True,
          'counter' : counter1.counter,
          'backup' : counter2.counter
        }
      except Exception:
        counter1_future = db.get_async(db.Key.from_path('Counter', key))
        counter2_future = db.get_async(db.Key.from_path('Counter',
          key + '_backup'))

        counter1 = counter1_future.get_result()
        counter2 = counter2_future.get_result()
        status = {
          'success' : False,
          'counter' : counter1.counter,
          'backup' : counter2.counter
        }
    else:
      try:
        db.run_in_transaction(self.increment_counter, key, int(amount))
        counter_future = db.get_async(db.Key.from_path('Counter', key))
        counter = counter_future.get_result()
        status = { 'success' : True, 'counter' : counter.counter }
      except Exception:
        counter_future = db.get_async(db.Key.from_path('Counter', key))
        counter = counter_future.get_result()
        status = { 'success' : False, 'counter' : counter.counter }
    self.response.headers['Content-Type'] = "application/json"
    self.response.out.write(json.dumps(status))
Example #29
0
def change_suggestion_to_list(querier, timeline_id, status):
    """
        Aceptar o rechazar añadir una sugerencia a una lista
    """
    if not status in (0,1,2):
        return False  
    from google.appengine.api import datastore
    from google.appengine.ext import db
    timeline = datastore.Get(db.Key.from_path('UserTimelineBase', timeline_id))
    if timeline is None:
        return None
    if status == 1:
        list = db.get_async(timeline['list'])
        timeline['status'] = status
        p = datastore.PutAsync(timeline)
        list = list.get_result()
        if list is None:
            return None
        list.update(instances=[timeline['instance'].id()])
        p.get_result()
    else:
        timeline['status'] = status
        p = datastore.Put(timeline)
    return True
Example #30
0
 def _tx(index_key, user_key):
     index = db.get_async(index_key)
     index = index.get_result()
     index.keys.remove(user_key)
     index.count -= 1
     db.put_async([index])
Example #31
0
    def get(self):
        """
        Render the index page. Currently this renders a 'Coming soon' landing
        page that will eventually be replaced with a proper home page.
        """
        user = self.user

        if user:
            # Try to get rendered output from memcache
            rendered = memcache.get('dashboard-' + user.user_id)
            if rendered and not settings.DEBUG:
                return self.response.out.write(rendered)

            # Fetch following users
            following = user.following_users\
                            .order('name')\
                            .fetch(100)

            user_keys = [user.key()] + [u.key() for u in following]

            # Start async fetch of top recipes
            top_recipes = Recipe.all()\
                                .filter('owner IN', user_keys)\
                                .order('-likes_count')\
                                .run(limit=15)

            # Get and process interesting events
            interesting_events = UserAction.all()\
                                           .filter('owner IN', user_keys)\
                                           .order('-created')\
                                           .fetch(15)

            object_ids = UserAction.gather_object_ids(interesting_events)
            object_ids['users'] = [id for id in object_ids['users'] if id not in [user.key().id()] + user.following]

            # Start async fetch of relevant recipes
            recipes = db.get_async([Key.from_path('Recipe', id) for id in object_ids['recipes']])

            # Convert iterators to  lists of items in memory and setup a map
            # of user id -> user for easy lookups
            following = list(following)
            top_recipes = list(top_recipes)

            user_map = {
                user.key().id(): user
            }

            for u in following:
                user_map[u.key().id()] = u

            if object_ids['users']:
                for u in UserPrefs.get_by_id(object_ids['users']):
                    user_map[u.key().id()] = u

            # Setup a map of recipe id -> recipe for easy lookups
            recipe_map = {}

            for r in recipes.get_result():
                recipe_map[r.key().id()] = r

            # Render and cache for 1 minute
            memcache.set('dashboard-' + user.user_id, self.render('dashboard.html', {
                'following': following,
                'user_map': user_map,
                'recipe_map': recipe_map,
                'top_recipes': top_recipes,
                'interesting_events': interesting_events
            }), 60)
        else:
            # Try to get rendered output from memcache
            rendered = memcache.get('index')
            if rendered and not settings.DEBUG:
                return self.response.out.write(rendered)

            recipes = Recipe.all()\
                            .order('-likes_count')\
                            .run(limit=15)

            # Render and cache for 15 minutes
            memcache.set('index', self.render('index.html', {
                'recipes': recipes
            }), 900)
Example #32
0
def view_list(request, id, template='generic/view_list.html'):
    def load_suggestions_async(suggestions):
        suggestions = suggestions.get_result()
        from georemindme.funcs import prefetch_refprops
        from geoalert.models import Suggestion
        suggestions = prefetch_refprops(suggestions, Suggestion.user,
                                        Suggestion.poi)
        suggestions_loaded = []
        for suggestion in suggestions:
            suggestions_loaded.append({
                'instance':
                suggestion,
                'has_voted':
                Vote.objects.user_has_voted(request.user, suggestion.key())
                if request.user.is_authenticated() else False,
                'vote_counter':
                Vote.objects.get_vote_counter(suggestion.key())
            })
        return suggestions_loaded

    list = ListSuggestion.objects.get_by_id_querier(id, request.user)
    if list is None:
        raise Http404
    from google.appengine.ext import db
    from geoalert.models import Event
    from geovote.models import Vote, Comment
    suggestions_async = db.get_async(list.keys)
    if 'print' in request.GET:
        top_comments = Comment.objects.get_top_voted(list, request.user)
        vote_counter = Vote.objects.get_vote_counter(list.key())
        return render_to_response(
            'print/view_list.html', {
                'list': list,
                'suggestions': load_suggestions_async(suggestions_async),
                'vote_counter': vote_counter,
                'top_comments': top_comments,
            },
            context_instance=RequestContext(request))
    from geovote.api import get_comments
    query_id, comments_async = get_comments(request.user,
                                            instance=list,
                                            async=True)
    has_voted = Vote.objects.user_has_voted(request.user, list.key())
    vote_counter = Vote.objects.get_vote_counter(list.key())
    #comments = get_comments_list(request.user, list.id)
    top_comments = Comment.objects.get_top_voted(list, request.user)
    user_follower = list.has_follower(request.user)
    if not request.user.is_authenticated():
        pos = template.rfind('.html')
        template = template[:pos] + '_anonymous' + template[pos:]
    return render_to_response(template, {
        'list':
        list,
        'has_voted':
        has_voted,
        'vote_counter':
        vote_counter,
        'user_follower':
        user_follower,
        'suggestions':
        load_suggestions_async(suggestions_async),
        'comments':
        Comment.objects.load_comments_from_async(query_id, comments_async,
                                                 request.user),
        'top_comments':
        top_comments
    },
                              context_instance=RequestContext(request))
Example #33
0
 def begin_getitem(self, commit):
     key = db.Key.from_path('Commit', commit)
     return db.get_async(key)
Example #34
0
 def get_async(cls, key):  # pragma: no cover
   return db.get_async(key)
Example #35
0
    def get(self):
        """
        Render the index page. Currently this renders a 'Coming soon' landing
        page that will eventually be replaced with a proper home page.
        """
        user = self.user

        if user:
            # Try to get rendered output from memcache
            rendered = memcache.get('dashboard-' + user.user_id)
            if rendered and not settings.DEBUG:
                return self.response.out.write(rendered)

            # Fetch following users
            following = user.following_users\
                            .order('name')\
                            .fetch(100)

            user_keys = [user.key()] + [u.key() for u in following]

            # Start async fetch of top recipes
            top_recipes = Recipe.all()\
                                .filter('owner IN', user_keys)\
                                .order('-likes_count')\
                                .run(limit=15)

            # Get and process interesting events
            interesting_events = UserAction.all()\
                                           .filter('owner IN', user_keys)\
                                           .order('-created')\
                                           .fetch(15)

            object_ids = UserAction.gather_object_ids(interesting_events)
            object_ids['users'] = [
                id for id in object_ids['users']
                if id not in [user.key().id()] + user.following
            ]

            # Start async fetch of relevant recipes
            recipes = db.get_async(
                [Key.from_path('Recipe', id) for id in object_ids['recipes']])

            # Convert iterators to  lists of items in memory and setup a map
            # of user id -> user for easy lookups
            following = list(following)
            top_recipes = list(top_recipes)

            user_map = {user.key().id(): user}

            for u in following:
                user_map[u.key().id()] = u

            if object_ids['users']:
                for u in UserPrefs.get_by_id(object_ids['users']):
                    user_map[u.key().id()] = u

            # Setup a map of recipe id -> recipe for easy lookups
            recipe_map = {}

            for r in recipes.get_result():
                recipe_map[r.key().id()] = r

            # Render and cache for 1 minute
            memcache.set(
                'dashboard-' + user.user_id,
                self.render(
                    'dashboard.html', {
                        'following': following,
                        'user_map': user_map,
                        'recipe_map': recipe_map,
                        'top_recipes': top_recipes,
                        'interesting_events': interesting_events
                    }), 60)
        else:
            # Try to get rendered output from memcache
            rendered = memcache.get('index')
            if rendered and not settings.DEBUG:
                return self.response.out.write(rendered)

            recipes = Recipe.all()\
                            .order('-likes_count')\
                            .run(limit=15)

            # Render and cache for 15 minutes
            memcache.set('index',
                         self.render('index.html', {'recipes': recipes}), 900)
Example #36
0
 def begin_getitems(self, filesets):
     keys = [db.Key.from_path('FileSet', x) for x in filesets]
     return db.get_async(keys)
Example #37
0
    def post(self):
        global global_dict
        #global_dict = {}
        global global_sync
        global locks
        global last_write
        #global sync_lock

        #        starttime = time.time()
        results = json.loads(self.request.body)

        rumble = results.get("game", "ERROR")

        syncname = str(
            bool(results.get("melee") == "YES")) + "|" + structures.sync
        if syncname not in locks:
            locks[syncname] = threading.Lock()
        sync_lock = locks[syncname]

        global_sync[syncname] = global_sync.get(syncname, {})
        botsync = global_sync[syncname]

        bota = results.get("fname")
        botb = results.get("sname")
        #bota = rreplace(bota,"_"," ",1)
        #botb = rreplace(botb,"_"," ",1)
        logging.debug("Bots : " + bota + " vs. " + botb)

        bd = [[bota, rumble], [botb, rumble]]

        botHashes = [string.join(a, "|").encode('ascii') for a in bd]

        memget = [rumble]
        memget.extend(botHashes)  #botHashes.append(rumble)
        memdict = memcache.get_multi(memget)

        game = memdict.get(rumble, None)
        game_future = None
        if game is None:
            #game = structures.Rumble.get_by_key_name(rumble)
            game_future = db.get_async(db.Key.from_path('Rumble', rumble))

        newBot = False

        bots = [memdict.get(h, None) for h in botHashes]

        pairingsarray = [[], []]
        botFutures = [None, None]
        for i in [0, 1]:
            if bots[i] is None or bots[i].PairingsList is None:
                botFutures[i] = db.get_async(
                    db.Key.from_path('BotEntry', botHashes[i]))

        for i in [0, 1]:
            if botFutures[i] is not None:
                modelbot = botFutures[i].get_result()
                if modelbot is not None:
                    bots[i] = structures.CachedBotEntry(modelbot)
                    #logging.debug("retrieved from database")

        for i in [0, 1]:
            if bots[i] is None:
                modelbot = structures.BotEntry(key_name=botHashes[i],
                                               Name=bd[i][0],
                                               Battles=0,
                                               Pairings=0,
                                               APS=0.0,
                                               Survival=0.0,
                                               PL=0,
                                               Rumble=rumble,
                                               Active=False,
                                               PairingsList=zlib.compress(
                                                   pickle.dumps([]), 1))
                bots[i] = structures.CachedBotEntry(modelbot)
                newBot = True
            if isinstance(bots[i], structures.BotEntry):
                bots[i] = structures.CachedBotEntry(bots[i])

            try:
                pairingsarray[i] = pickle.loads(
                    zlib.decompress(bots[i].PairingsList))
                bots[i].PairingsList = None
            except:
                try:
                    pairsDicts = marshal.loads(
                        zlib.decompress(bots[i].PairingsList))

                    pairingsarray[i] = [
                        structures.ScoreSet() for _ in pairsDicts
                    ]
                    for s, d in zip(pairingsarray[i], pairsDicts):
                        s.__dict__.update(d)
                    bots[i].PairingsList = None
                except:
                    pairingsarray[i] = []

        if game_future is not None:
            game = game_future.get_result()

        if game is None:
            game = structures.Rumble(key_name=rumble,
                                     Name=rumble,
                                     Rounds=int(results["rounds"]),
                                     Field=results["field"],
                                     Melee=bool(results["melee"] == "YES"),
                                     Teams=bool(results["teams"] == "YES"),
                                     TotalUploads=0,
                                     MeleeSize=10,
                                     ParticipantsScores=db.Blob(
                                         zlib.compress(pickle.dumps([]))))
            self.response.out.write("CREATED NEW GAME TYPE " + rumble + "\n")

            logging.info("Created new game type: " + rumble)
        else:
            field = game.Field == results["field"]
            rounds = (game.Rounds == int(results["rounds"]))
            teams = game.Teams == bool(results["teams"] == "YES")
            melee = game.Melee == bool(results["melee"] == "YES")
            allowed = field and rounds and teams and melee
            if not allowed:
                errstr = "OK. ERROR. Incorrect " + rumble + " config: "
                errorReasons = []
                if not field:
                    errorReasons.append("field size ")
                if not rounds:
                    errorReasons.append("number of rounds ")
                if not teams:
                    errorReasons.append("teams ")
                if not melee:
                    errorReasons.append("melee ")
                logging.error(errstr + string.join(errorReasons, ", ") +
                              "  User: "******"user"])

                return
        scores = None

        try:
            scores = pickle.loads(zlib.decompress(game.ParticipantsScores))
            game.ParticipantsScores = None
            if len(scores) == 0:
                scores = {}
        except:
            scoresdicts = marshal.loads(
                zlib.decompress(game.ParticipantsScores))
            game.ParticipantsScores = None
            scoreslist = [structures.LiteBot(loadDict=d) for d in scoresdicts]
            #for s,d in zip(scoreslist,scoresdicts):
            #    s.__dict__.update(d)
            scores = {s.Name: s for s in scoreslist}
            if len(scores) == 0:
                scores = {}

            #logging.debug("uncompressed scores: " + str(len(s)) + "   compressed: " + str(a))

        for i in [0, 1]:

            if not bots[i].Active or bots[i].Name not in scores:
                bots[i].Active = True
                scores[bots[i].Name] = structures.LiteBot(bots[i])
                newBot = True
                self.response.out.write("Added " + bd[i][0] + " to " + rumble +
                                        "\n")
                logging.info("added new bot!")

        #retrievetime = time.time() - starttime

        scorea = float(results["fscore"])
        scoreb = float(results["sscore"])

        if scorea + scoreb > 0:
            APSa = 100 * scorea / (scorea + scoreb)
        else:
            APSa = 50  #register a 0/0 as 50%
        #APSb = 100 - APSa

        survivala = float(results["fsurvival"])
        survivalb = float(results["ssurvival"])

        survivala = 100.0 * survivala / game.Rounds
        survivalb = 100.0 * survivalb / game.Rounds

        for b, pairings in zip(bots, pairingsarray):

            #b.PairingsList = zlib.compress(marshal.dumps([s.__dict__ for s in pairings]),1)

            if len(pairings) > 0:
                removes = []
                for p in pairings:
                    try:
                        p.APS = float(p.APS)
                        p.Survival = float(p.Survival)
                        p.Battles = int(p.Battles)
                    except:
                        removes.append(pairings.index(p))
                        continue

                removes.sort(reverse=True)
                for i in removes:
                    pairings.pop(i)

        apair = None
        for p in pairingsarray[0]:
            if p.Name == botb:
                apair = p
        if apair is None:
            apair = structures.ScoreSet(name=botb)
            pairingsarray[0].append(apair)

        bpair = None
        for p in pairingsarray[1]:
            if p.Name == bota:
                bpair = p
        if bpair is None:
            bpair = structures.ScoreSet(name=bota)
            pairingsarray[1].append(bpair)

        #participantsSet = set(game.Participants)

        for b, pairings in zip(bots, pairingsarray):
            i = 0
            while i < len(pairings):
                if pairings[i].Name == b.Name:
                    pairings.pop(i)
                    continue
                if not hasattr(pairings[i], "Alive"):
                    pairings[i].Alive = True

                if pairings[i].Alive and pairings[i].Name not in scores:
                    pairings[i].Alive = False

                i += 1
            #b.Pairings = i

        aBattles = apair.Battles

        #rolling average with a half-life of 10k
        maxPerPair = 10000 / len(bots)
        if aBattles > maxPerPair:
            aBattles = maxPerPair

        #bBattles = bpair.Battles

        inv_ab = 1.0 / (aBattles + 1.0)

        apair.APS *= float(aBattles) * inv_ab
        apair.APS += APSa * inv_ab
        apair.__dict__["Min_APS"] = min(APSa,
                                        apair.__dict__.get("Min_APS", 100))
        #bpair.APS *= float(bBattles)/(bBattles + 1)
        #bpair.APS += APSb/(bBattles+1)
        bpair.APS = 100 - apair.APS
        bpair.__dict__["Min_APS"] = min(100 - APSa,
                                        bpair.__dict__.get("Min_APS", 100))

        apair.Survival *= float(aBattles) * inv_ab
        apair.Survival += survivala * inv_ab

        bpair.Survival *= float(aBattles) * inv_ab
        bpair.Survival += survivalb * inv_ab

        apair.Battles += 1
        bpair.Battles = apair.Battles

        apair.LastUpload = datetime.datetime.now().strftime(
            "%Y-%m-%d %H:%M:%S")
        bpair.LastUpload = apair.LastUpload

        for b, pairings in zip(bots, pairingsarray):
            aps = 0.0
            survival = 0.0
            pl = 0
            battles = 0
            alivePairings = 0
            if len(pairings) > 0:

                for p in pairings:
                    if p.Alive:
                        aps += p.APS
                        survival += p.Survival
                        if p.APS > 50:
                            pl += 1
                        else:
                            pl -= 1

                        battles += p.Battles
                        alivePairings += 1

                aps /= alivePairings
                survival /= alivePairings
                b.APS = aps
                b.Survival = survival
                b.PL = pl
                b.Battles = battles

            b.PairingsList = db.Blob(
                zlib.compress(pickle.dumps(pairings, pickle.HIGHEST_PROTOCOL),
                              1))
            b.LastUpload = apair.LastUpload
            b.Pairings = alivePairings

        game.TotalUploads += 1

        #self.response.out.write("<" + str(bots[0].Battles) + " " + str(bots[1].Battles) + ">")
        game.LastUpload = apair.LastUpload
        game.AvgBattles = game.AvgBattles * 0.99 + 0.005 * (bots[0].Battles +
                                                            bots[1].Battles)
        if game.Uploaders is None:
            uploaders = None
        else:
            uploaders = pickle.loads(zlib.decompress(game.Uploaders))
        if uploaders is None or len(uploaders) == 0:
            uploaders = {}
        uploaderName = results["user"]

        try:
            uploader = uploaders[uploaderName]
            uploader.latest = apair.LastUpload
            uploader.total += 1
        except KeyError:
            uploader = structures.User(name=uploaderName)
            uploaders[uploaderName] = uploader
        game.Uploaders = zlib.compress(pickle.dumps(uploaders, -1), 1)

        for b in bots:
            try:
                bUploaders = b.Uploaders
                if uploaderName not in bUploaders:
                    bUploaders.append(uploaderName)
            except:
                b.__dict__["Uploaders"] = [uploaderName]

        with sync_lock:
            for b in bots:
                key = None
                if isinstance(b, structures.BotEntry):
                    key = b.key().name()
                else:
                    key = b.key_name
                botsync[key] = botsync.get(key, 0) + 1

            minSize = min(10, len(scores) / 2)
            wrote = False

            logging.debug("botsync: " + str(len(botsync)))

            if len(botsync) > minSize:
                syncset = botsync.keys()

                syncbotsDict = memcache.get_multi(syncset)

                syncbots = []
                for sb in syncset:
                    b = syncbotsDict.get(sb, None)

                    if b is None or b.PairingsList is None:
                        syncbotsDict.pop(sb, 1)
                    else:
                        syncbots.append(b)

                    botsync.pop(sb, 1)

                try:
                    thisput = []
                    while len(syncbots) > 0:
                        b = syncbots.pop()

                        key = None
                        if isinstance(b, structures.BotEntry):
                            key = b.key().name()
                        else:
                            key = b.key_name

                        putb = structures.BotEntry(key_name=key)
                        putb.init_from_cache(b)
                        thisput.append(putb)

                    db.put(thisput)

                    logging.info("wrote " + str(len(thisput)) +
                                 " results to database")
                    for b in thisput:
                        s = b.key().name()
                        botsync.pop(s, 1)
                        syncbotsDict.pop(s, 1)
                    wrote = True

                except Exception, e:
                    logging.error('Failed to write data: ' + str(e))
Example #38
0
 def begin_getitems(self, commits):
     keys = [db.Key.from_path('Commit', x) for x in commits]
     return db.get_async(keys)
Example #39
0
 def _tx(index_key, user_key):
     index = db.get_async(index_key)
     index = index.get_result()
     index.keys.remove(user_key)
     index.count -= 1
     db.put_async([index])
Example #40
0
 def get_async(cls, key):  # pragma: no cover
     return db.get_async(key)
Example #41
0
 def begin_getitems(self, metricnames):
     keys = [db.Key.from_path('Metric', x) for x in metricnames]
     return db.get_async(keys)
Example #42
0
 def begin_getitem(self, commit):
     key = db.Key.from_path('Commit', commit)
     return db.get_async(key)
Example #43
0
 def no_tx_get_async(i):
   return db.get_async(keys[i:i+cls._MAX_STATES_IN_MEMORY])
    def store_service_user_stats(counters):
        sid_rpcs = list()
        for (service_identity_user_email), sid_counters in counters.iteritems():
            sevice_identity_user = users.User(service_identity_user_email)
            sid_key = ServiceIdentityStatistic.create_key(sevice_identity_user)
            sid_rpcs.append((db.get_async(sid_key), sid_counters, sid_key, sevice_identity_user))
        for sid_rpc, sid_counters, sid_key, sevice_identity_user in sid_rpcs:
            sid = sid_rpc.get_result()
            new_sid = False
            if not sid:
                new_sid = True
                sid = ServiceIdentityStatistic(key=sid_key)
                sid.users_gained = list()
                sid.users_lost = list()
                sid.last_ten_users_gained = list()
                sid.last_ten_users_lost = list()
                sid.recommends_via_rogerthat = list()
                sid.recommends_via_email = list()
                sid.mip_labels = list()

            now_ = datetime.datetime.utcnow()
            today = int("%d%02d%02d" % (now_.year, now_.month, now_.day))
            if today != sid.last_entry_day:

                if new_sid:
                    add_days = 1
                else:
                    tmp = str(sid.last_entry_day)
                    start = datetime.date(int(tmp[0:4]), int(tmp[4:6]), int(tmp[6:8]))
                    end = datetime.date(now_.year, now_.month, now_.day)
                    delta = end - start
                    add_days = delta.days

                sid.last_entry_day = today

                def do(lst):
                    for _ in xrange(add_days):
                        lst.append(0)
                        if len(lst) > 1000:
                            lst.pop(0)

                do(sid.users_gained)
                do(sid.users_lost)
                do(sid.recommends_via_rogerthat)
                do(sid.recommends_via_email)
                for i in xrange(len(sid.mip_labels)):
                    do(getattr(sid, 'mip_%s' % i))

            gained = sid_counters[SERVICE_STATS_TYPE_GAINED]
            if new_sid:
                sid.number_of_users = count_users_connected_to_service_identity(sevice_identity_user)

            sid.last_ten_users_gained.extend(gained)
            sid.last_ten_users_gained = sid.last_ten_users_gained[-10:]
            sid.users_gained[-1] = sid.users_gained[-1] + len(gained)

            lost = sid_counters[SERVICE_STATS_TYPE_LOST]
            sid.last_ten_users_lost.extend(lost)
            sid.last_ten_users_lost = sid.last_ten_users_lost[-10:]
            sid.users_lost[-1] = sid.users_lost[-1] + len(lost)

            recommendsViaRogerthat = sid_counters[SERVICE_STATS_TYPE_RECOMMEND_VIA_ROGERTHAT]
            recommendsViaEmail = sid_counters[SERVICE_STATS_TYPE_RECOMMEND_VIA_EMAIL]

            sid.recommends_via_rogerthat[-1] = sid.recommends_via_rogerthat[-1] + len(recommendsViaRogerthat)
            sid.recommends_via_email[-1] = sid.recommends_via_email[-1] + len(recommendsViaEmail)

            if not new_sid:
                sid.number_of_users = sid.number_of_users + len(gained) - len(lost)

            sid.gained_last_week = sum(sid.users_gained[-7:])
            sid.lost_last_week = sum(sid.users_lost[-7:])

            for x in sid_counters[SERVICE_STATS_TYPE_MIP]:
                if x not in sid.mip_labels:
                    sid.mip_labels.append(x)
                    i = sid.mip_labels.index(x)
                    l = list()
                    l.append(1)
                    setattr(sid, 'mip_%s' % i, l)
                else:
                    i = sid.mip_labels.index(x)
                    l = getattr(sid, 'mip_%s' % i)
                    l[-1] = l[-1] + 1
                    setattr(sid, 'mip_%s' % i, l)

            put_rpcs['service_user_stats'].append(db.put_async(sid))
Example #45
0
 def get_async(cls, key):
     return db.get_async(key)
Example #46
0
 def no_tx_get_async(i):
     return db.get_async(keys[i:i + cls._MAX_STATES_IN_MEMORY])
Example #47
0
    def get(self):
        user = self.user

        # Try to get rendered output from memcache
        rendered = memcache.get('dashboard-' + user.user_id)
        if rendered and not settings.DEBUG:
            return self.response.out.write(rendered)

        # Fetch following users
        following = user.following_users\
                        .order('name')\
                        .fetch(100)

        user_keys = [user.key()] + [u.key() for u in following]

        # Start async fetch of top recipes
        top_recipes = Recipe.all()\
                            .filter('owner IN', user_keys)\
                            .order('-grade')\
                            .run(limit=15)

        # Get and process interesting events
        interesting_events = UserAction.all()\
                                       .filter('owner IN', user_keys)\
                                       .order('-created')\
                                       .fetch(15)

        object_ids = UserAction.gather_object_ids(interesting_events)
        object_ids['users'] = [
            id for id in object_ids['users']
            if id not in [user.key().id()] + user.following
        ]

        # Start async fetch of relevant recipes
        recipes = db.get_async(
            [Key.from_path('Recipe', id) for id in object_ids['recipes']])

        # Start async fetch of relevant brews
        brews = db.get_async(
            [Key.from_path('Brew', id) for id in object_ids['brews']])

        # Convert iterators to  lists of items in memory and setup a map
        # of user id -> user for easy lookups
        following = list(following)
        top_recipes = list(top_recipes)

        user_map = {user.key().id(): user}

        for u in following:
            user_map[u.key().id()] = u

        if object_ids['users']:
            for u in UserPrefs.get_by_id(object_ids['users']):
                user_map[u.key().id()] = u

        # Setup a map of brew id -> brew for easy lookups
        brew_map = {}
        brew_recipe_ids = set()

        for b in brews.get_result():
            brew_recipe_ids.add(b.recipe_key.id())
            brew_map[b.key().id()] = b

        # Async fetch of any recipes brews reference that weren't
        # included in the recipe fetch above...
        brew_recipes = db.get_async([
            Key.from_path('Recipe', id) for id in brew_recipe_ids
            if id not in object_ids['recipes']
        ])

        # Setup a map of recipe id -> recipe for easy lookups
        recipe_map = {}

        for r in recipes.get_result():
            recipe_map[r.key().id()] = r

        for r in brew_recipes.get_result():
            recipe_map[r.key().id()] = r

        # Render and cache for 1 minute
        memcache.set(
            'dashboard-' + user.user_id,
            self.render(
                'dashboard.html', {
                    'following': following,
                    'user_map': user_map,
                    'recipe_map': recipe_map,
                    'brew_map': brew_map,
                    'top_recipes': top_recipes,
                    'interesting_events': interesting_events
                }), self.CACHE_TIME)
Example #48
0
 def begin_getitems(self, metricnames):
     keys = [db.Key.from_path('Metric', x) for x in metricnames]
     return db.get_async(keys)
Example #49
0
 def begin_getitems(self, filesets):
     keys = [db.Key.from_path('FileSet', x) for x in filesets]
     return db.get_async(keys)
Example #50
0
 def begin_getitems(self, commits):
     keys = [db.Key.from_path('Commit', commit) for commit in commits]
     return db.get_async(keys)
Example #51
0
 def get(self, seguid_str=""):
   """
   Return all sequence id mappings for a list of 
   comma separated SEGUIDs as JSON.
   
     eg. http://seguid-engine.appspot.com/seguid/7P65lsNr3DqnqDYPw8AIyhSKSOw,NetfrtErWuBipcsdLZvTy/rTS80
     
     Returns:
     {"NetfrtErWuBipcsdLZvTy/rTS80": ["sp|P14693", "ref|NP_011951.1", "gb|AAB68885.1"], "result": "success", "7P65lsNr3DqnqDYPw8AIyhSKSOw": ["sp|P82805", "ref|NP_198909.1", "gb|AAL58947.1"]}
         
   
   If a SEGUID in the list doesn't exist, return a mapping to an
   empty list for that SEGUID. If none of the SEGUIDs exist, return 404.
   """
   if (not seguid_str) or \
      (len(seguid_str) > 27 and (',' not in seguid_str)):
     # if we don't get an acceptable query string, fail
     self.response.status = 400 # Bad Request
     self.response.headers['Content-Type'] = 'application/json'
     self.response.out.write('{"result":"malformed request"}')
     return
   
   # convert a comma separated list of seguids into a Python list
   # or if only one SEGUID is provided, a single item list 
   if len(seguid_str) > 27 and (',' in seguid_str):
     seguids = seguid_str.split(',')
   else:
     seguids = [seguid_str]
   
   #################################################################
   # Retrieve SEGUID entities
   #
   # There are three implementations here:
   # 
   # 1. a normal query not using the key or key_name
   # 2. a get by a list of keys in one operation
   # 3. a set of get_async requests, by key
   #
   # It would be interesting to benchmark each approach, and maybe
   # a few others, with different sized requests.
   #
   
   """
   # 1. non-async query of seguid entities
   out = {'result':'seguids not found'}
   seguid_entities = []
   for s in seguids:
     se = Seguid.all().filter("seguid =", s).get()
     if se:
       out[s] = se.ids
       out['result'] = 'success'
     else:
       out[s] = []
   """
   
   """
   # 2. non-async, single-get operation
   out = {'result':'seguids not found'}
   seguid_entities = []
   skeys = []
   for s in seguids:
     skeys.append(db.Key.from_path(Seguid.kind(), 'seguid:'+s))
   # grab every Seguid by key in a single get operation
   seguid_entities = db.get(skeys)
   for se in seguid_entities:
     if se:
       out[s] = se.ids
       out['result'] = 'success'
     else:
       out[s] = []
   """
   
   # 3. async get seguids by key
   out = {'result':'seguids not found'}
   seguid_entities = []
   for s in seguids:
     # convert a key_name into a Key so that we can then use
     # async query
     seguid_key = db.Key.from_path(Seguid.kind(), 'seguid:'+s)
     seguid_entities.append(db.get_async(seguid_key))
   # get results from async lookups
   for se in seguid_entities:
     se = se.get_result()
     if se:
       out[s] = se.ids
       out['result'] = 'success'
     else:
       out[s] = []
   
   #
   #################################################################
     
   if out['result'] == 'success':
     # success
     self.response.status = 200
     self.response.headers['Content-Type'] = 'application/json'
     self.response.out.write(simplejson.dumps(out))
     return
   else:
     # fail
     self.response.status = 404 # Not Found
     self.response.headers['Content-Type'] = 'application/json'
     self.response.out.write(out)
     return
Example #52
0
    def get(self):
        user = self.user

        # Try to get rendered output from memcache
        rendered = memcache.get('dashboard-' + user.user_id)
        if rendered and not settings.DEBUG:
            return self.response.out.write(rendered)

        # Fetch following users
        following = user.following_users\
                        .order('name')\
                        .fetch(100)

        user_keys = [user.key()] + [u.key() for u in following]

        # Start async fetch of top recipes
        top_recipes = Recipe.all()\
                            .filter('owner IN', user_keys)\
                            .order('-grade')\
                            .run(limit=15)

        # Get and process interesting events
        interesting_events = UserAction.all()\
                                       .filter('owner IN', user_keys)\
                                       .order('-created')\
                                       .fetch(15)

        object_ids = UserAction.gather_object_ids(interesting_events)
        object_ids['users'] = [id for id in object_ids['users'] if id not in [user.key().id()] + user.following]

        # Start async fetch of relevant recipes
        recipes = db.get_async([Key.from_path('Recipe', id) for id in object_ids['recipes']])

        # Start async fetch of relevant brews
        brews = db.get_async([Key.from_path('Brew', id) for id in object_ids['brews']])

        # Convert iterators to  lists of items in memory and setup a map
        # of user id -> user for easy lookups
        following = list(following)
        top_recipes = list(top_recipes)

        user_map = {
            user.key().id(): user
        }

        for u in following:
            user_map[u.key().id()] = u

        if object_ids['users']:
            for u in UserPrefs.get_by_id(object_ids['users']):
                user_map[u.key().id()] = u

        # Setup a map of brew id -> brew for easy lookups
        brew_map = {}
        brew_recipe_ids = set()

        for b in brews.get_result():
            brew_recipe_ids.add(b.recipe_key.id())
            brew_map[b.key().id()] = b

        # Async fetch of any recipes brews reference that weren't
        # included in the recipe fetch above...
        brew_recipes = db.get_async([Key.from_path('Recipe', id) for id in brew_recipe_ids if id not in object_ids['recipes']])

        # Setup a map of recipe id -> recipe for easy lookups
        recipe_map = {}

        for r in recipes.get_result():
            recipe_map[r.key().id()] = r

        for r in brew_recipes.get_result():
            recipe_map[r.key().id()] = r

        # Render and cache for 1 minute
        memcache.set('dashboard-' + user.user_id, self.render('dashboard.html', {
            'following': following,
            'user_map': user_map,
            'recipe_map': recipe_map,
            'brew_map': brew_map,
            'top_recipes': top_recipes,
            'interesting_events': interesting_events
        }), self.CACHE_TIME)
Example #53
0
 def get_async(cls, key):
     return db.get_async(key)