def _render(self, team_number):
        team = Team.get_by_id("frc" + team_number)

        if not team:
            return self.redirect("/error/404")

        event_team_keys_future = EventTeam.query(EventTeam.team == team.key).fetch_async(1000, keys_only=True)
        award_keys_future = Award.query(Award.team == team.key).fetch_async(1000, keys_only=True)

        event_teams_futures = ndb.get_multi_async(event_team_keys_future.get_result())
        awards_futures = ndb.get_multi_async(award_keys_future.get_result())

        event_keys = [event_team_future.get_result().event for event_team_future in event_teams_futures]
        events_futures = ndb.get_multi_async(event_keys)

        awards_by_event = {}
        for award_future in awards_futures:
            award = award_future.get_result()
            if award.event.id() not in awards_by_event:
                awards_by_event[award.event.id()] = [award]
            else:
                awards_by_event[award.event.id()].append(award)

        event_awards = []
        current_event = None
        matches_upcoming = None
        short_cache = False
        for event_future in events_futures:
            event = event_future.get_result()
            if event.now:
                current_event = event

                team_matches_future = Match.query(Match.event == event.key, Match.team_key_names == team.key_name)\
                  .fetch_async(500, keys_only=True)
                matches = ndb.get_multi(team_matches_future.get_result())
                matches_upcoming = MatchHelper.upcomingMatches(matches)

            if event.within_a_day:
                short_cache = True

            if event.key_name in awards_by_event:
                sorted_awards = AwardHelper.organizeAwards(awards_by_event[event.key_name])['list']
            else:
                sorted_awards = []
            event_awards.append((event, sorted_awards))
        event_awards = sorted(event_awards, key=lambda (e, _): e.start_date if e.start_date else datetime.datetime(e.year, 12, 31))

        years = sorted(set([et.get_result().year for et in event_teams_futures if et.get_result().year != None]))

        template_values = {'team': team,
                           'event_awards': event_awards,
                           'years': years,
                           'current_event': current_event,
                           'matches_upcoming': matches_upcoming}

        if short_cache:
            self._cache_expiration = self.SHORT_CACHE_EXPIRATION

        path = os.path.join(os.path.dirname(__file__), '../templates/team_history.html')
        return template.render(path, template_values)
Beispiel #2
0
def _PrewarmGets(data):
  """Prepares the cache so that fetching is faster later.

  The add_point request handler does a LOT of gets, and it's possible for
  each to take seconds.

  However, NDB will does automatic in-context caching:
  https://developers.google.com/appengine/docs/python/ndb/cache#incontext
  This means that doing an async get() at the start will cache the result, so
  that we can prewarm the cache for everything we'll need throughout the
  request at the start.

  Args:
    data: The request json.
  """
  # Prewarm lookups of masters, bots, and tests.
  master_keys = {ndb.Key('Master', r['master']) for r in data}
  bot_keys = {ndb.Key('Master', r['master'], 'Bot', r['bot']) for r in data}
  test_keys = set()
  for row in data:
    start = ['Master', row['master'], 'Bot', row['bot']]
    test_parts = row['test'].split('/')
    for part in test_parts:
      if not part:
        break
      start += ['Test', part]
      test_keys.add(ndb.Key(*start))

  ndb.get_multi_async(list(master_keys) + list(bot_keys) + list(test_keys))
    def update(self, event_key):
        """
        Updates EventTeams for an event.
        Returns a tuple of (teams, event_teams, event_team_keys_to_delete)
        An EventTeam is valid iff the team:
        a) played a match at the event,
        b) the team received an award at the event,
        c) or the event has not yet occurred.
        """
        event = Event.get_by_id(event_key)

        # Add teams from Matches and Awards
        team_ids = set()
        match_key_futures = Match.query(
            Match.event == event.key).fetch_async(1000, keys_only=True)
        award_key_futures = Award.query(
            Award.event == event.key).fetch_async(1000, keys_only=True)
        match_futures = ndb.get_multi_async(match_key_futures.get_result())
        award_futures = ndb.get_multi_async(award_key_futures.get_result())

        for match_future in match_futures:
            match = match_future.get_result()
            for team in match.team_key_names:
                team_ids.add(team)
        for award_future in award_futures:
            award = award_future.get_result()
            for team_key in award.team_list:
                team_ids.add(team_key.id())

        # Create or update EventTeams
        teams = [Team(id=team_id,
                      team_number=int(team_id[3:]))
                      for team_id in team_ids]

        if teams:
            event_teams = [EventTeam(id=event_key + "_" + team.key.id(),
                                     event=event.key,
                                     team=team.key,
                                     year=event.year)
                                     for team in teams]
        else:
            event_teams = None

        # Delete EventTeams for teams who did not participate in the event
        # Only runs if event is over
        existing_event_teams_keys = EventTeam.query(
            EventTeam.event == event.key).fetch(1000, keys_only=True)
        existing_event_teams = ndb.get_multi(existing_event_teams_keys)
        existing_team_ids = set()
        for et in existing_event_teams:
            existing_team_ids.add(et.team.id())

        et_keys_to_delete = set()
        if event.end_date is not None and event.end_date < datetime.datetime.now():
            for team_id in existing_team_ids.difference(team_ids):
                et_key_name = "{}_{}".format(event.key_name, team_id)
                et_keys_to_delete.add(ndb.Key(EventTeam, et_key_name))
            ndb.delete_multi(et_keys_to_delete)

        return teams, event_teams, et_keys_to_delete
    def get(self):
        team_keys_future = Team.query().order(Team.team_number).fetch_async(keys_only=True)
        social_media_keys_future = Media.query(Media.year == None).fetch_async(keys_only=True)

        team_futures = ndb.get_multi_async(team_keys_future.get_result())
        social_futures = ndb.get_multi_async(social_media_keys_future.get_result())

        socials_by_team = defaultdict(dict)
        for social_future in social_futures:
            social = social_future.get_result()
            for reference in social.references:
                socials_by_team[reference.id()][social.media_type_enum] = social

        if team_futures:
            with cloudstorage.open(self.TEAMS_FILENAME_PATTERN, 'w') as teams_file:
                writer = csv.writer(teams_file, delimiter=',')
                for team_future in team_futures:
                    team = team_future.get_result()
                    team_row = [team.key.id(), team.nickname, team.name, team.city, team.state_prov, team.country, team.website, team.rookie_year]
                    for social_type in MediaType.social_types:
                        social = socials_by_team[team.key.id()].get(social_type, None)
                        team_row.append(social.social_profile_url if social is not None else None)
                    self._writerow_unicode(writer, team_row)

        self.response.out.write("Done backing up teams!")
    def get(self):
        suggestions = (
            Suggestion.query()
            .filter(Suggestion.review_state == Suggestion.REVIEW_PENDING)
            .filter(Suggestion.target_model == "media")
            .fetch(limit=50)
        )

        # Quick and dirty way to group images together
        suggestions = sorted(
            suggestions, key=lambda x: 0 if x.contents["media_type_enum"] in MediaType.image_types else 1
        )

        reference_keys = []
        existing_preferred_keys_futures = []
        for suggestion in suggestions:
            reference_key = suggestion.contents["reference_key"]
            reference = Media.create_reference(suggestion.contents["reference_type"], reference_key)
            reference_keys.append(reference)

            if "details_json" in suggestion.contents:
                suggestion.details = json.loads(suggestion.contents["details_json"])
                if "image_partial" in suggestion.details:
                    suggestion.details["thumbnail"] = suggestion.details["image_partial"].replace("_l", "_m")

            # Find existing preferred images
            existing_preferred_keys_futures.append(
                Media.query(
                    Media.media_type_enum.IN(MediaType.image_types),
                    Media.references == reference,
                    Media.preferred_references == reference,
                    Media.year == suggestion.contents["year"],
                ).fetch_async(keys_only=True)
            )

        reference_futures = ndb.get_multi_async(reference_keys)
        existing_preferred_futures = map(lambda x: ndb.get_multi_async(x.get_result()), existing_preferred_keys_futures)

        references = map(lambda r: r.get_result(), reference_futures)
        existing_preferred = map(lambda l: map(lambda x: x.get_result(), l), existing_preferred_futures)

        suggestions_and_references_and_preferred = zip(suggestions, references, existing_preferred)

        self.template_values.update(
            {
                "suggestions_and_references_and_preferred": suggestions_and_references_and_preferred,
                "max_preferred": Media.MAX_PREFERRED,
            }
        )

        self.response.out.write(
            jinja2_engine.render("suggestions/suggest_team_media_review_list.html", self.template_values)
        )
    def _render(self, district_abbrev, year=None, explicit_year=False):
        district_type = DistrictType.abbrevs[district_abbrev]

        event_keys = Event.query(Event.year == year, Event.event_district_enum == district_type).fetch(None, keys_only=True)
        if not event_keys:
            self.abort(404)

        # needed for valid_years
        all_cmp_event_keys_future = Event.query(Event.event_district_enum == district_type, Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(None, keys_only=True)

        # needed for valid_districts
        district_cmp_keys_future = Event.query(Event.year == year, Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(None, keys_only=True)  # to compute valid_districts

        event_futures = ndb.get_multi_async(event_keys)
        event_team_keys_future = EventTeam.query(EventTeam.event.IN(event_keys)).fetch_async(None, keys_only=True)
        if year == 2014:  # TODO: only 2014 has accurate rankings calculations
            team_futures = ndb.get_multi_async(set([ndb.Key(Team, et_key.id().split('_')[1]) for et_key in event_team_keys_future.get_result()]))

        events = [event_future.get_result() for event_future in event_futures]
        EventHelper.sort_events(events)

        district_cmp_futures = ndb.get_multi_async(district_cmp_keys_future.get_result())

        if year == 2014:  # TODO: only 2014 has accurate rankings calculations
            team_totals = DistrictHelper.calculate_rankings(events, team_futures, year)
        else:
            team_totals = None

        valid_districts = set()
        for district_cmp_future in district_cmp_futures:
            district_cmp = district_cmp_future.get_result()
            cmp_dis_type = district_cmp.event_district_enum
            if cmp_dis_type is None:
                logging.warning("District event {} has unknown district type!".format(district_cmp.key.id()))
            else:
                valid_districts.add((DistrictType.type_names[cmp_dis_type], DistrictType.type_abbrevs[cmp_dis_type]))
        valid_districts = sorted(valid_districts, key=lambda (name, _): name)

        self.template_values.update({
            'explicit_year': explicit_year,
            'year': year,
            'valid_years': sorted(set([int(event_key.id()[:4]) for event_key in all_cmp_event_keys_future.get_result()])),
            'valid_districts': valid_districts,
            'district_name': DistrictType.type_names[district_type],
            'district_abbrev': district_abbrev,
            'events': events,
            'team_totals': team_totals,
        })

        path = os.path.join(os.path.dirname(__file__), '../templates/district_details.html')
        return template.render(path, self.template_values)
    def _render(self, district_abbrev, year=None):
        self._set_district(district_abbrev)

        if self.year < 2009:
            return json.dumps([], ensure_ascii=True)

        event_keys = Event.query(Event.year == self.year, Event.event_district_enum == self.district).fetch(
            None, keys_only=True
        )
        if not event_keys:
            return json.dumps([], ensure_ascii=True)
        events = ndb.get_multi(event_keys)

        event_futures = ndb.get_multi_async(event_keys)
        event_team_keys_future = EventTeam.query(EventTeam.event.IN(event_keys)).fetch_async(None, keys_only=True)

        team_futures = ndb.get_multi_async(
            set([ndb.Key(Team, et_key.id().split("_")[1]) for et_key in event_team_keys_future.get_result()])
        )

        events = [event_future.get_result() for event_future in event_futures]
        EventHelper.sort_events(events)

        team_totals = DistrictHelper.calculate_rankings(events, team_futures, self.year)

        rankings = []

        current_rank = 1
        for key, points in team_totals:
            point_detail = {}
            point_detail["rank"] = current_rank
            point_detail["team_key"] = key
            point_detail["event_points"] = {}
            for event in points["event_points"]:
                event_key = event[0].key_name
                point_detail["event_points"][event_key] = event[1]
                event_details = Event.get_by_id(event_key)
                point_detail["event_points"][event[0].key_name]["district_cmp"] = (
                    True if event_details.event_type_enum == EventType.DISTRICT_CMP else False
                )

            if "rookie_bonus" in points:
                point_detail["rookie_bonus"] = points["rookie_bonus"]
            else:
                point_detail["rookie_bonus"] = 0
            point_detail["point_total"] = points["point_total"]
            rankings.append(point_detail)
            current_rank += 1

        return json.dumps(rankings)
def Prewarm(keys):
    """Prewarms the NDB in-context cache by doing async_get for the keys.

  For requests like /add_point which can get/set dozens of keys, contention
  occasionally causes the gets to take several seconds. But they will be
  cached in context by NDB if they are requested at the start of the request.

  Args:
    keys: List of string keys.
  """
    to_get = []
    for key in keys:
        to_get.append(ndb.Key("CachedPickledString", _NamespaceKey(key, datastore_hooks.EXTERNAL)))
        to_get.append(ndb.Key("CachedPickledString", _NamespaceKey(key, datastore_hooks.INTERNAL)))
    ndb.get_multi_async(to_get)
 def get_events_and_matches_async():
     if return_valid_years:
         event_team_keys_query = EventTeam.query(EventTeam.team == team.key)
     else:
         event_team_keys_query = EventTeam.query(EventTeam.team == team.key, EventTeam.year == year)
     event_team_keys = yield event_team_keys_query.fetch_async(1000, keys_only=True)
     event_teams = yield ndb.get_multi_async(event_team_keys)
     event_keys = []
     for event_team in event_teams:
         if return_valid_years:
             valid_years.add(event_team.year)  # valid_years is a "global" variable (defined below). Doing this removes the complexity of having to propagate the years up through the tasklet call chain.
         if not return_valid_years or event_team.year == year:
             event_keys.append(event_team.event)
     events, matches = yield ndb.get_multi_async(event_keys), get_matches_async(event_keys)
     raise ndb.Return((events, matches))
    def _render(self, team_key, event_key):
        match_keys_future = Match.query(Match.event == ndb.Key(Event, self.event_key), Match.team_key_names == self.team_key).fetch_async(None, keys_only=True)
        match_futures = ndb.get_multi_async(match_keys_future.get_result())

        matches = [ModelToDict.matchConverter(match_future.get_result()) for match_future in match_futures]

        return json.dumps(matches, ensure_ascii=True)
def event_updated(affected_refs):
    event_keys = _filter(affected_refs['key'])
    years = _filter(affected_refs['year'])
    event_district_keys = _filter(affected_refs['district_key'])

    event_team_keys_future = EventTeam.query(EventTeam.event.IN([event_key for event_key in event_keys])).fetch_async(None, keys_only=True)
    events_future = ndb.get_multi_async(event_keys)

    queries_and_keys = []
    for event_key in event_keys:
        queries_and_keys.append((EventQuery(event_key.id())))
        queries_and_keys.append(EventDivisionsQuery(event_key.id()))

    for year in years:
        queries_and_keys.append((EventListQuery(year)))

    for event_district_key in event_district_keys:
        queries_and_keys.append((DistrictEventsQuery(event_district_key.id())))

    if event_keys:
        for et_key in event_team_keys_future.get_result():
            team_key = et_key.id().split('_')[1]
            year = int(et_key.id()[:4])
            queries_and_keys.append((TeamEventsQuery(team_key)))
            queries_and_keys.append((TeamYearEventsQuery(team_key, year)))
            queries_and_keys.append((TeamYearEventTeamsQuery(team_key, year)))

    events_with_parents = filter(lambda e: e.get_result() is not None and e.get_result().parent_event is not None, events_future)
    parent_keys = set([e.get_result().parent_event for e in events_with_parents])
    for parent_key in parent_keys:
        queries_and_keys.append((EventDivisionsQuery(parent_key.id())))

    return queries_and_keys
 def get_matches_async(event_keys):
     if event_keys == []:
         raise ndb.Return([])
     match_keys = yield Match.query(
         Match.event.IN(event_keys), Match.team_key_names == team.key_name).fetch_async(500, keys_only=True)
     matches = yield ndb.get_multi_async(match_keys)
     raise ndb.Return(matches)
Beispiel #13
0
    def get(self):
        result = {
            'streams': {}
            }
        streams_result = result['streams']

        activity_ids = self.request.GET.getall('activity_id')
        resolution = self.request.get('resolution')
        stream_keys = []
        stream_types = []
        activity_ids2 = []
        for activity_id in activity_ids:
            streams_result[activity_id] = {}
            for type in ('latlng', 'altitude'):
                stream_key = ndb.Key(
                    model.Stream,
                    model.Stream.make_key_string(activity_id, type, resolution=resolution))
                stream_keys.append(stream_key)
                stream_types.append(type)
                activity_ids2.append(activity_id)

        streams = yield ndb.get_multi_async(stream_keys)

        for activity_id, stream_type, stream in zip(activity_ids2, stream_types, streams):
            activity = streams_result[activity_id]
            if stream is None:
                activity[stream_type] = {}
            else:
                activity[stream_type] = stream.to_dict()

        # self.response.cache_expires(60*60)
        # self.response.cache_control = 'private'
        # self.response.vary = 'Cookie'
        raise ndb.Return(result)
Beispiel #14
0
    def get(self):
        team_key = self.request.get("team_key")

        self._require_registration()

        if not team_key:
            self.redirect("/", abort=True)

        team_future = Team.get_by_id_async(self.request.get("team_key"))
        team = team_future.get_result()
        if not team:
            self.redirect("/", abort=True)

        media_key_futures = Media.query(Media.references == team.key, Media.year == None).fetch_async(500, keys_only=True)
        media_futures = ndb.get_multi_async(media_key_futures.get_result())
        medias = [media_future.get_result() for media_future in media_futures]
        social_medias = MediaHelper.get_socials(medias)

        self.template_values.update({
            "status": self.request.get("status"),
            "team": team,
            "social_medias": social_medias,
        })

        self.response.out.write(jinja2_engine.render('suggestions/suggest_team_social_media.html', self.template_values))
Beispiel #15
0
  def update(self, request):
    user_key = model.User.gen_key(tap.endpoints.get_user_id())
    comment_key = ndb.Key(model.Comment, request.key)
    user, comment = yield ndb.get_multi_async((user_key, comment_key))

    if not user:
      raise endpoints.UnauthorizedException()
    if not comment:
      raise endpoints.NotFoundException()
    if user.key != comment.author_key:
      raise endpoints.ForbiddenException()

    comment.body      = request.body
    comment.update_at = datetime.utcnow()
    _comment_key = yield comment.put_async()

    raise ndb.Return(message.CommentResponse(
      issue         = comment.issue_key.string_id(),
      body          = comment.body,
      key           = comment.key.string_id(),
      project       = comment.project_key.integer_id(),
      time_at       = comment.time_at,
      author        = comment.author_key.string_id(),
      author_name   = comment.author_name,
      update_at     = comment.update_at if hasattr(comment, "update_at") else None,
    ))
Beispiel #16
0
    def get(self):
        team_key = self.request.get("team_key")
        year_str = self.request.get("year")

        self._require_registration()

        if not team_key or not year_str:
            self.redirect("/", abort=True)

        year = int(year_str)
        team_future = Team.get_by_id_async(self.request.get("team_key"))
        team = team_future.get_result()
        if not team:
            self.redirect("/", abort=True)

        media_key_futures = Media.query(Media.references == team.key, Media.year == year).fetch_async(500, keys_only=True)
        media_futures = ndb.get_multi_async(media_key_futures.get_result())
        medias = [media_future.get_result() for media_future in media_futures]
        medias_by_slugname = MediaHelper.group_by_slugname(medias)

        self.template_values.update({
            "status": self.request.get("status"),
            "team": team,
            "year": year,
            "medias_by_slugname": medias_by_slugname,
        })

        self.response.out.write(jinja2_engine.render('suggestions/suggest_team_media.html', self.template_values))
    def get(self):
        self._validate_tba_app_id()
        memcache_key = "csv_teams_all"
        output = memcache.get(memcache_key)

        if output is None:
            team_keys = Team.query().order(Team.team_number).fetch(10000, keys_only=True)
            team_futures = ndb.get_multi_async(team_keys)

            sio = StringIO.StringIO()
            writer = csv.writer(sio, delimiter=',')
            writer.writerow(['team_number','name','nickname','location','website'])

            for team_future in team_futures:
                team = team_future.get_result()
                row = [team.team_number, team.name, team.nickname, team.location, team.website]
                row_utf8 = [unicode(e).encode('utf-8') for e in row]
                writer.writerow(row_utf8)

            output = sio.getvalue()

            if tba_config.CONFIG["memcache"]:
                memcache.set(memcache_key, output, 86400)

        self.response.headers["content-type"] = "text/csv"
        self.response.out.write(output)

        self._track_call_defer('teams/list')
Beispiel #18
0
 def search(self, request):
   if len(request.query.encode("utf-8")) < 3:
     raise endpoints.BadRequestException("Bad query")
   query = search.Query(
     query_string = request.query,
     options = search.QueryOptions(
       limit  = 20,
       cursor = search.Cursor(web_safe_string=request.pagination),
       ids_only = True,
     ),
   )
   key_list = list()
   documents = UserSearchIndex.search_index.search(query)
   for document in documents:
     key_list.append(ndb.Key(model.User, document.doc_id))
   items = list()
   if key_list:
     entities = yield ndb.get_multi_async(key_list)
     for user in entities:
       if user is None:
         continue
       items.append(message.UserResponse(key=user.user_id,
                                     name=user.name,
                                     language=user.language))
   if documents.cursor:
     cursor_string = documents.cursor.web_safe_string
   else:
     cursor_string = None
   raise ndb.Return(message.UserResponseCollection(
     items = items,
     pagination = cursor_string,
   ))
Beispiel #19
0
  def post(self, namespace, timestamp):
    digests = []
    now = utils.timestamp_to_datetime(long(timestamp))
    expiration = config.settings().default_expiration
    try:
      digests = payload_to_hashes(self, namespace)
      # Requests all the entities at once.
      futures = ndb.get_multi_async(
          model.entry_key(namespace, binascii.hexlify(d)) for d in digests)

      to_save = []
      while futures:
        # Return opportunistically the first entity that can be retrieved.
        future = ndb.Future.wait_any(futures)
        futures.remove(future)
        item = future.get_result()
        if item and item.next_tag_ts < now:
          # Update the timestamp. Add a bit of pseudo randomness.
          item.expiration_ts, item.next_tag_ts = model.expiration_jitter(
              now, expiration)
          to_save.append(item)
      if to_save:
        ndb.put_multi(to_save)
      logging.info(
          'Timestamped %d entries out of %s', len(to_save), len(digests))
    except Exception as e:
      logging.error('Failed to stamp entries: %s\n%d entries', e, len(digests))
      raise
Beispiel #20
0
  def get_output_async(cls, output_key, number_chunks):
    """Returns the stdout for a single command as a ndb.Future."""
    # TODO(maruel): Save number_chunks locally in this entity.
    if not number_chunks:
      raise ndb.Return(None)

    number_chunks = min(number_chunks, cls.FETCH_MAX_CHUNKS)

    # TODO(maruel): Always get one more than necessary, in case number_chunks
    # is invalid. If there's an unexpected TaskOutputChunk entity present,
    # continue fetching for more incrementally.
    parts = []
    for f in ndb.get_multi_async(
        _output_key_to_output_chunk_key(output_key, i)
        for i in xrange(number_chunks)):
      chunk = yield f
      parts.append(chunk.chunk if chunk else None)

    # Trim ending empty chunks.
    while parts and not parts[-1]:
      parts.pop()

    # parts is now guaranteed to not end with an empty chunk.
    # Replace any missing chunk.
    for i in xrange(len(parts)):
      if not parts[i]:
        parts[i] = '\x00' * cls.CHUNK_SIZE
    raise ndb.Return(''.join(parts))
Beispiel #21
0
 def _query_async(self):
     year = self._query_args[0]
     all_cmp_event_keys = yield Event.query(
         Event.year == int(year),
         Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(keys_only=True)
     events = yield ndb.get_multi_async(all_cmp_event_keys)
     raise ndb.Return(events)
    def get(self):
        suggestions = Suggestion.query().filter(
            Suggestion.review_state == Suggestion.REVIEW_PENDING).filter(
            Suggestion.target_model == "event_media").fetch(limit=50)

        # Quick and dirty way to group images together
        suggestions = sorted(suggestions, key=lambda x: 0 if x.contents['media_type_enum'] in MediaType.image_types else 1)

        reference_keys = []
        for suggestion in suggestions:
            reference_key = suggestion.contents['reference_key']
            reference = Media.create_reference(
                suggestion.contents['reference_type'],
                reference_key)
            reference_keys.append(reference)

            if 'details_json' in suggestion.contents:
                suggestion.details = json.loads(suggestion.contents['details_json'])
                if 'image_partial' in suggestion.details:
                    suggestion.details['thumbnail'] = suggestion.details['image_partial'].replace('_l', '_m')

        reference_futures = ndb.get_multi_async(reference_keys)
        references = map(lambda r: r.get_result(), reference_futures)

        suggestions_and_references = zip(suggestions, references)

        self.template_values.update({
            "suggestions_and_references": suggestions_and_references,
        })

        self.response.out.write(jinja2_engine.render('suggestions/suggest_event_media_review_list.html', self.template_values))
Beispiel #23
0
    def process_parsed_feed(cls, parsed_feed, feed, overflow, overflow_reason=OVERFLOW_REASON.BACKLOG):
        keys_by_guid = {guid_for_item(item): ndb.Key(cls, guid_for_item(item), parent=feed.key) for item in parsed_feed.entries}
        entries = yield ndb.get_multi_async(keys_by_guid.values())
        old_guids = [x.key.id() for x in entries if x]
        new_guids = filter(lambda x: x not in old_guids, keys_by_guid.keys())
        new_entries_by_guid = {x: cls(key=keys_by_guid.get(x), guid=x, creating=True) for x in new_guids}
        new_entries = yield ndb.put_multi_async(new_entries_by_guid.values())

        published = overflow
        futures = []
        for item in parsed_feed.entries:
            entry = new_entries_by_guid.get(guid_for_item(item))
            if not entry:
                continue

            futures.append((entry, prepare_entry_from_item(parsed_feed, item, feed, overflow, overflow_reason, published)))

        for entry, future in futures:
            entry_kwargs = yield future
            if not entry_kwargs:
                continue

            entry_kwargs.pop('parent')
            entry_kwargs['creating'] = False
            entry.populate(**entry_kwargs)

        saved_entries = yield ndb.put_multi_async(new_entries_by_guid.values())

        raise ndb.Return((new_guids, old_guids))
    def getWeekEvents(self):
        """
        Get events this week
        In general, if an event is currently going on, it shows up in this query
        An event shows up in this query iff:
        a) The event is within_a_day
        OR
        b) The event.start_date is on or within 4 days after the closest Wednesday
        """
        today = datetime.datetime.today()

        # Make sure all events to be returned are within range
        two_weeks_of_events_keys_future = Event.query().filter(
          Event.start_date >= (today - datetime.timedelta(days=7))).filter(
          Event.start_date <= (today + datetime.timedelta(days=7))).order(
          Event.start_date).fetch_async(50, keys_only=True)

        events = []
        diff_from_wed = 2 - today.weekday()  # 2 is Wednesday. diff_from_wed ranges from 3 to -3 (Monday thru Sunday)
        closest_wednesday = today + datetime.timedelta(days=diff_from_wed)

        two_weeks_of_event_futures = ndb.get_multi_async(two_weeks_of_events_keys_future.get_result())
        for event_future in two_weeks_of_event_futures:
            event = event_future.get_result()
            if event.within_a_day:
                events.append(event)
            else:
                offset = event.start_date.date() - closest_wednesday.date()
                if (offset == datetime.timedelta(0)) or (offset > datetime.timedelta(0) and offset < datetime.timedelta(4)):
                    events.append(event)

        EventHelper.sort_events(events)
        return events
    def get(self):
        suggestions = Suggestion.query().filter(
            Suggestion.review_state == Suggestion.REVIEW_PENDING).filter(
            Suggestion.target_model == "media")

        reference_keys = []
        for suggestion in suggestions:
            reference_keys.append(Media.create_reference(
                suggestion.contents['reference_type'],
                suggestion.contents['reference_key']))
            if 'details_json' in suggestion.contents:
                suggestion.details = json.loads(suggestion.contents['details_json'])
                if 'image_partial' in suggestion.details:
                    suggestion.details['thumbnail'] = suggestion.details['image_partial'].replace('_l', '_m')

        reference_futures = ndb.get_multi_async(reference_keys)
        references = map(lambda r: r.get_result(), reference_futures)

        suggestions_and_references = zip(suggestions, references)

        self.template_values.update({
            "suggestions_and_references": suggestions_and_references,
        })

        path = os.path.join(os.path.dirname(__file__), '../../templates/suggest_team_media_review_list.html')
        self.response.out.write(template.render(path, self.template_values))
Beispiel #26
0
    def testGetMultiAsyncWithContext(self):
        """Test is_new and is_dirty after getting models with ndb.get_multi_async with models in context cache"""

        # Create first model
        model1 = self._createModel(1, put=True)
        del model1

        # Create second model
        model2 = self._createModel(2, put=True)
        del model2

        key1 = ndb.Key(helper_models.TestModel, 1)
        key2 = ndb.Key(helper_models.TestModel, 2)

        keys = [key1, key2]
        futures = ndb.get_multi_async(keys)

        models = []

        for future in futures:
            model = future.get_result()
            self.assertFalse(model.is_new)
            self.assertFalse(model.is_dirty)
            models.append(model)

        self.assertTrue(2, len(models))
Beispiel #27
0
  def create(self, request):
    user_key = model.User.gen_key(tap.endpoints.get_user_id())
    issue_key = ndb.Key(model.Issue, request.issue)
    project_key, _will_start_at, _user_id, _name = model.Issue.parse_key(issue_key)
    user, issue, project = yield ndb.get_multi_async((user_key, issue_key, project_key))

    if not user:
      raise endpoints.UnauthorizedException()
    if not issue:
      raise endpoints.BadRequestException()
    if not project:
      raise endpoints.BadRequestException()
    if user.key not in project.member:
      raise endpoints.ForbiddenException()

    time_at = datetime.utcnow()
    comment = model.Comment(
      key  = model.Comment.gen_key(issue_key, time_at, user_key, user.name),
      body = request.body,
    )
    _comment_key = yield comment.put_async()

    raise ndb.Return(message.CommentResponse(
      issue         = comment.issue_key.string_id(),
      body          = comment.body,
      key           = comment.key.string_id(),
      project       = comment.project_key.integer_id(),
      time_at       = comment.time_at,
      author        = comment.author_key.string_id(),
      author_name   = comment.author_name,
      update_at     = comment.update_at if hasattr(comment, "update_at") else None,
    ))
    def doAwardInsights(self, year):
        """
        Calculate award insights for a given year. Returns a list of Insights.
        """
        # Get all Blue Banner, Division Finalist, and Championship Finalist awards
        blue_banner_award_keys_future = Award.query(
            Award.year == year,
            Award.award_type_enum.IN(AwardType.BLUE_BANNER_AWARDS),
            Award.event_type_enum.IN({EventType.REGIONAL, EventType.DISTRICT, EventType.DISTRICT_CMP, EventType.CMP_DIVISION, EventType.CMP_FINALS})
        ).fetch_async(10000, keys_only=True)
        cmp_finalist_award_keys_future = Award.query(
            Award.year == year,
            Award.award_type_enum == AwardType.FINALIST,
            Award.event_type_enum.IN({EventType.CMP_DIVISION, EventType.CMP_FINALS})
        ).fetch_async(10000, keys_only=True)

        award_futures = ndb.get_multi_async(
            set(blue_banner_award_keys_future.get_result()).union(
            set(cmp_finalist_award_keys_future.get_result()))
        )

        insights = []
        insights += self._calculateBlueBanners(award_futures, year)
        insights += self._calculateChampionshipStats(award_futures, year)
        insights += self._calculateRegionalStats(award_futures, year)
        insights += self._calculateSuccessfulElimTeamups(award_futures, year)

        return insights
Beispiel #29
0
def get_futures_from_keys(fd, keys):
    futures = []
    if isinstance(keys, types.ListType):
        futures = ndb.get_multi_async(keys)
    else:
        futures = keys.get_async()
    return futures
Beispiel #30
0
def _get_count_async(name, key_gen, use_memcache=True):
    """Retrieve the value for a given sharded counter.

    Args:
        name: The name of the counter.
        num_shards: the number of shards
    Returns:
        Integer; the cumulative count of all sharded counters for the given
            counter name.
    """
    
    # only cache if it exists at all 
    do_cache = False
    
    total = None 
    
    if use_memcache:
      total = memcache.get(name)
      
    if total is None:
        total = 0
        all_keys = key_gen()
        counters = yield ndb.get_multi_async( all_keys, use_cache=False, use_memcache=False )
        for counter in counters:
            if counter is not None:
                total += counter.count
                do_cache = True 
                
        if do_cache and use_memcache:
            memcache.set(name, total)
            
    raise ndb.Return( total )
    def getWeekEvents(self):
        """
        Get events this week
        In general, if an event is currently going on, it shows up in this query
        An event shows up in this query iff:
        a) The event is within_a_day
        OR
        b) The event.start_date is on or within 4 days after the closest Wednesday/Monday (pre-2020/post-2020)
        """
        event_keys = memcache.get('EventHelper.getWeekEvents():event_keys')
        if event_keys is not None:
            return ndb.get_multi(event_keys)

        today = datetime.datetime.today()

        # Make sure all events to be returned are within range
        two_weeks_of_events_keys_future = Event.query().filter(
          Event.start_date >= (today - datetime.timedelta(days=7))).filter(
          Event.start_date <= (today + datetime.timedelta(days=7))).order(
          Event.start_date).fetch_async(keys_only=True)

        events = []

        days_diff = 0
        # Before 2020, event weeks start on Wednesdays
        if today.year < 2020:
            days_diff = 2  # 2 is Wednesday. diff_from_week_start ranges from 3 to -3 (Monday thru Sunday)
        diff_from_week_start = days_diff - today.weekday()
        closest_wednesday = today + datetime.timedelta(days=diff_from_week_start)

        two_weeks_of_event_futures = ndb.get_multi_async(two_weeks_of_events_keys_future.get_result())
        for event_future in two_weeks_of_event_futures:
            event = event_future.get_result()
            if event.within_a_day:
                events.append(event)
            else:
                offset = event.start_date.date() - closest_wednesday.date()
                if (offset == datetime.timedelta(0)) or (offset > datetime.timedelta(0) and offset < datetime.timedelta(4)):
                    events.append(event)

        EventHelper.sort_events(events)
        memcache.set('EventHelper.getWeekEvents():event_keys', [e.key for e in events], 60*60)
        return events
Beispiel #32
0
    def _add_alliance_status(self, event_key, alliances):
        captain_team_keys = []
        for alliance in alliances:
            if alliance['picks']:
                captain_team_keys.append(alliance['picks'][0])

        event_team_keys = [
            ndb.Key(EventTeam, "{}_{}".format(event_key, team_key))
            for team_key in captain_team_keys
        ]
        captain_eventteams_future = ndb.get_multi_async(event_team_keys)
        for captain_future, alliance in zip(captain_eventteams_future,
                                            alliances):
            captain = captain_future.get_result()
            if captain and captain.status and 'alliance' in captain.status and 'playoff' in captain.status:
                alliance['status'] = captain.status['playoff']
            else:
                alliance['status'] = 'unknown'
        return alliances
Beispiel #33
0
    def paginate(tickets, pages_info, current_page):
        """Paginates the results in self.tickets, and self.pages_info"""
        page_buttons_each_side = TicketsManager.MAX_TICKETS_PER_PAGE // 2
        num_tickets = len(tickets)
        num_pages = (num_tickets // TicketsManager.MAX_TICKETS_PER_PAGE) + 1
        current_page = min(num_pages - 1, max(0, current_page))

        pages_info["current"] = current_page
        pages_info["previous"] = max(0, current_page - 1)
        pages_info["last"] = num_pages - 1
        pages_info["next"] = min(num_pages - 1, current_page + 1)
        pages_info["relevant"] = range(
            max(0, current_page - page_buttons_each_side),
            min(num_pages - 1, current_page + page_buttons_each_side) + 1)

        first_ticket = current_page * TicketsManager.MAX_TICKETS_PER_PAGE
        last_ticket = first_ticket + TicketsManager.MAX_TICKETS_PER_PAGE
        tickets = tickets[first_ticket:last_ticket]
        return ndb.get_multi_async(tickets)
Beispiel #34
0
def _get_snapshot_as_dict_future(keys):
    """Gets post-processed entities referenced by keys.

  Returns:
    list of ndb.Future returning the to_dict() value (instead of the entity
    itself) of the entities present or None if the entity doesn't exist.
  """
    def _fix_future(future, out):
        """Converts a ndb.Future to a StatisticsFramework entity into a dict of the
    snapshot.
    """
        result = future.get_result()
        out.set_result(result.to_dict() if result else None)

    tmp = ndb.get_multi_async(keys, use_cache=False, use_memcache=False)
    out = [ndb.Future() for _ in xrange(len(tmp))]
    for i, f in enumerate(tmp):
        f.add_immediate_callback(_fix_future, f, out[i])
    return out
Beispiel #35
0
    def get(self):
        team_key = self.request.get("team_key")
        year_str = self.request.get("year")

        self._require_registration()

        if not team_key or not year_str:
            self.redirect("/", abort=True)

        year = int(year_str)
        team_future = Team.get_by_id_async(self.request.get("team_key"))
        team = team_future.get_result()
        if not team:
            self.redirect("/", abort=True)

        media_key_futures = Media.query(Media.references == team.key,
                                        Media.year == year).fetch_async(
                                            500, keys_only=True)
        social_media_future = media_query.TeamSocialMediaQuery(
            team.key.id()).fetch_async()

        media_futures = ndb.get_multi_async(media_key_futures.get_result())
        medias = [media_future.get_result() for media_future in media_futures]
        medias_by_slugname = MediaHelper.group_by_slugname(medias)

        social_medias = sorted(social_media_future.get_result(),
                               key=MediaHelper.social_media_sorter)
        social_medias = filter(
            lambda m: m.media_type_enum == MediaType.INSTAGRAM_PROFILE,
            social_medias)  # we only allow IG media, so only show IG profile

        self.template_values.update({
            "medias_by_slugname": medias_by_slugname,
            "social_medias": social_medias,
            "status": self.request.get("status"),
            "team": team,
            "year": year,
        })

        self.response.out.write(
            jinja2_engine.render('suggestions/suggest_team_media.html',
                                 self.template_values))
Beispiel #36
0
    def _CreateCertificatesFromJsonEvents(cls, json_events):
        """Creates Certificate entities associated with an event."""
        certs = itertools.chain.from_iterable(
            cls._GenerateCertificatesFromJsonEvent(event)
            for event in json_events)
        unique_cert_map = {cert.key: cert for cert in certs}
        existing_certs = yield ndb.get_multi_async(unique_cert_map.keys())
        unknown_certs = [
            cert
            for cert, existing in zip(unique_cert_map.values(), existing_certs)
            if existing is None
        ]

        for cert_entity in unknown_certs:
            # Insert a row into the Certificate table. Allow the timestamp to be
            # generated within InsertBigQueryRow(). The Blockable.recorded_dt Property
            # is set to auto_now_add, but this isn't filled in until persist time.
            cert_entity.InsertBigQueryRow(constants.BLOCK_ACTION.FIRST_SEEN)

        yield ndb.put_multi_async(unknown_certs)
Beispiel #37
0
def _update_builders_async(new_builds, now):
    """Creates/updates model.Builder entities."""
    keys = sorted(
        {model.Builder.make_key(nb.build.proto.builder)
         for nb in new_builds})
    builders = yield ndb.get_multi_async(keys)

    to_put = []
    for key, builder in zip(keys, builders):
        if not builder:
            # Register it!
            to_put.append(model.Builder(key=key, last_scheduled=now))
        else:
            since_last_update = now - builder.last_scheduled
            update_probability = since_last_update.total_seconds() / 3600.0
            if _should_update_builder(update_probability):
                builder.last_scheduled = now
                to_put.append(builder)
    if to_put:
        yield ndb.put_multi_async(to_put)
Beispiel #38
0
def tq_feed_poll():
    """Poll some feeds feed"""
    if not request.headers.get('X-AppEngine-QueueName'):
        raise ndb.Return(jsonify_error(message='Not a Task call'))

    keys = request.form.get('keys')
    if not keys:
        logger.info('Task Queue poll no keys')
        raise ndb.Return(jsonify_error(code=500))

    success = 0
    errors = 0
    entries_created = 0
    ndb_keys = [ndb.Key(urlsafe=key) for key in keys.split(',')]
    feeds = yield ndb.get_multi_async(ndb_keys)
    feeds = filter(lambda x: not getattr(x, 'use_external_poller', False), feeds)
    logger.info('Got %d feed(s) for polling', len(feeds))
    futures = []

    for i, feed in enumerate(feeds):
        if not feed:
            errors += 1
            logger.info("Couldn't find feed for key: %s", ndb_keys[i])
            continue
        futures.append((i, feed.process_feed(None, None)))

    for i, future in futures:
        parsed_feed = None
        try:
            parsed_feed, num_new_entries = yield future
            entries_created += num_new_entries
            success += 1
        except:
            errors += 1
            feed = feeds[i]
            logger.exception('Failed to update feed:%s, i=%s' % (feed.feed_url, i))

    yield write_epoch_to_stat(Stat, 'poll_job')
    logger.info('Polled feeds entries_created: %s success: %s errors: %s', entries_created, success, errors)

    raise ndb.Return(jsonify(status='ok'))
Beispiel #39
0
def _populate_tag_index_entry_bucket_id(indexes):
    """Populates indexes[i].entries[j].bucket_id."""
    to_migrate = {
        i
        for i, idx in enumerate(indexes)
        if any(not e.bucket_id for e in idx.entries)
    }
    if not to_migrate:
        return

    build_ids = sorted({
        e.build_id
        for i in to_migrate for e in indexes[i].entries if not e.bucket_id
    })
    builds = yield ndb.get_multi_async(
        ndb.Key(model.Build, bid) for bid in build_ids)
    bucket_ids = {
        build_id: build.bucket_id if build else None
        for build_id, build in zip(build_ids, builds)
    }

    @ndb.transactional_tasklet
    def txn_async(key):
        idx = yield key.get_async()
        new_entries = []
        for e in idx.entries:
            e.bucket_id = e.bucket_id or bucket_ids[e.build_id]
            if e.bucket_id:
                new_entries.append(e)
            else:  # pragma: no cover | pycoverage is confused
                # Such build does not exist.
                # Note: add_to_tag_index_async adds new entries with bucket_id.
                # This code runs only for old TagIndeEntries, so there is no race.
                pass
        idx.entries = new_entries
        yield idx.put_async()
        raise ndb.Return(idx)

    futs = [(i, txn_async(indexes[i].key)) for i in to_migrate]
    for i, fut in futs:
        indexes[i] = fut.get_result()
Beispiel #40
0
def get_buckets_async(bucket_ids=None):
    """Returns configured buckets.

  If bucket_ids is None, returns all buckets.
  Otherwise returns only specified buckets.
  If a bucket does not exist, returns a None map value.

  Returns:
    {bucket_id: project_config_pb2.Bucket} dict.
  """
    if bucket_ids is not None:
        bucket_ids = list(bucket_ids)
        keys = [Bucket.make_key(*parse_bucket_id(bid)) for bid in bucket_ids]
        buckets = yield ndb.get_multi_async(keys)
        raise ndb.Return({
            bid: b.config if b else None
            for bid, b in zip(bucket_ids, buckets)
        })
    else:
        buckets = yield Bucket.query().fetch_async()
        raise ndb.Return({b.bucket_id: b.config for b in buckets})
Beispiel #41
0
	def fetch_radial_cities(self,ghash,precision=3,n=2):
		'''
		Fetches cities around the specified ghash
		Sorts the cities by distance from the center ghash
		@param ghash: the ghash of the center point
		@type ghash: str
		'''
		# calc the center geo_point
		center_geo_point = geohash.decode(ghash)
		# create a list of ghashes around the 
		ghash = utils.chop_ghash(ghash, precision)
		ghash_list = utils.create_ghash_list(ghash, n)
		# get a list of all the city keys in the range of ghashes
		city_keys_set = set([])
		for ghash in ghash_list:
			city_keys = models.City.query(
						models.City.ghash >= ghash,
						models.City.ghash <= ghash+"{"
						).iter(
							batch_size = 50,
							keys_only = True)
			city_keys_set.update(city_keys)
		
		city_futures = ndb.get_multi_async(city_keys_set)
		cities = (c.get_result() for c in city_futures)
		
		cities_list = []
		# calculate the distance from the center ghash
		for city in cities:
			# package the city for the radius list
			city_dict = city.package_for_radius_list()
			# calc distance from center point
			distance = utils.distance_between_points(center_geo_point, city.geo_point)
			# add the distance 
			city_dict['distance'] = distance
			cities_list.append(city_dict)
		
		# sort the list of cities by distance
		cities_list = sorted(cities_list,key=lambda c: c['distance'])
		return cities_list[:10]
Beispiel #42
0
    def get(self):
        suggestions = Suggestion.query().filter(
            Suggestion.review_state == Suggestion.REVIEW_PENDING).filter(
            Suggestion.target_model == "social-media").fetch(limit=50)

        reference_keys = []
        for suggestion in suggestions:
            reference_key = suggestion.contents['reference_key']
            reference = Media.create_reference(
                suggestion.contents['reference_type'],
                reference_key)
            reference_keys.append(reference)

        reference_futures = ndb.get_multi_async(reference_keys)
        references = map(lambda r: r.get_result(), reference_futures)
        suggestions_and_references = zip(suggestions, references)

        self.template_values.update({
            "suggestions_and_references": suggestions_and_references,
        })

        self.response.out.write(jinja2_engine.render('suggestions/suggest_team_social_review.html', self.template_values))
Beispiel #43
0
    def get(self):
        suggestions = Suggestion.query().filter(
            Suggestion.review_state == Suggestion.REVIEW_PENDING).filter(
                Suggestion.target_model == "event_media").fetch(limit=50)

        # Quick and dirty way to group images together
        suggestions = sorted(
            suggestions,
            key=lambda x: 0
            if x.contents['media_type_enum'] in MediaType.image_types else 1)

        reference_keys = []
        for suggestion in suggestions:
            reference_key = suggestion.contents['reference_key']
            reference = Media.create_reference(
                suggestion.contents['reference_type'], reference_key)
            reference_keys.append(reference)

            if 'details_json' in suggestion.contents:
                suggestion.details = json.loads(
                    suggestion.contents['details_json'])
                if 'image_partial' in suggestion.details:
                    suggestion.details['thumbnail'] = suggestion.details[
                        'image_partial'].replace('_l', '_m')

        reference_futures = ndb.get_multi_async(reference_keys)
        references = map(lambda r: r.get_result(), reference_futures)

        suggestions_and_references = zip(suggestions, references)

        self.template_values.update({
            "suggestions_and_references":
            suggestions_and_references,
        })

        self.response.out.write(
            jinja2_engine.render(
                'suggestions/suggest_event_media_review_list.html',
                self.template_values))
Beispiel #44
0
    def getWeekEvents(self):
        """
        Get events this week
        In general, if an event is currently going on, it shows up in this query
        An event shows up in this query iff:
        a) The event is within_a_day
        OR
        b) The event.start_date is on or within 4 days after the closest Wednesday
        """
        today = datetime.datetime.today()

        # Make sure all events to be returned are within range
        two_weeks_of_events_keys_future = Event.query().filter(
            Event.start_date >= (today - datetime.timedelta(days=7))).filter(
                Event.start_date <= (today +
                                     datetime.timedelta(days=7))).order(
                                         Event.start_date).fetch_async(
                                             50, keys_only=True)

        events = []
        diff_from_wed = 2 - today.weekday(
        )  # 2 is Wednesday. diff_from_wed ranges from 3 to -3 (Monday thru Sunday)
        closest_wednesday = today + datetime.timedelta(days=diff_from_wed)

        two_weeks_of_event_futures = ndb.get_multi_async(
            two_weeks_of_events_keys_future.get_result())
        for event_future in two_weeks_of_event_futures:
            event = event_future.get_result()
            if event.within_a_day:
                events.append(event)
            else:
                offset = event.start_date.date() - closest_wednesday.date()
                if (offset == datetime.timedelta(0)) or (
                        offset > datetime.timedelta(0)
                        and offset < datetime.timedelta(4)):
                    events.append(event)

        EventHelper.sort_events(events)
        return events
Beispiel #45
0
    def get(self):

        @ndb.tasklet
        def get_projects_for_user_async(user_key, admin_permissions):
            # base projects query
            projects = Task.query().filter(Task.is_top_level == True)
            # filter projects if user is not an admin
            if not admin_permissions:
                projects = projects.filter(Task.users == user_key)
            # sort projects by creation date
            projects = projects.order(Task.creation_time)
            project_list = yield projects.map_async(lambda x: x.key)
            raise ndb.Return(project_list)

        # run async query to get projects
        admin_permissions = is_admin(self.user_entity)
        user_key = self.user_entity.key
        projects = get_projects_for_user_async(user_key, admin_permissions)
        # add projects to the template context
        context = {'projects': ndb.get_multi_async(projects.get_result())}
        # render and return login page
        return self.render_response('projects.html', context)
Beispiel #46
0
def event_updated(affected_refs):
    event_keys = _filter(affected_refs['key'])
    years = _filter(affected_refs['year'])
    event_district_keys = _filter(affected_refs['district_key'])

    event_team_keys_future = EventTeam.query(
        EventTeam.event.IN([event_key for event_key in event_keys
                            ])).fetch_async(None, keys_only=True)
    events_future = ndb.get_multi_async(event_keys)

    queries_and_keys = []
    for event_key in event_keys:
        queries_and_keys.append((EventQuery(event_key.id())))
        queries_and_keys.append(EventDivisionsQuery(event_key.id()))

    for year in years:
        queries_and_keys.append((EventListQuery(year)))

    for event_district_key in event_district_keys:
        queries_and_keys.append((DistrictEventsQuery(event_district_key.id())))

    if event_keys:
        for et_key in event_team_keys_future.get_result():
            team_key = et_key.id().split('_')[1]
            year = int(et_key.id()[:4])
            queries_and_keys.append((TeamEventsQuery(team_key)))
            queries_and_keys.append((TeamYearEventsQuery(team_key, year)))
            queries_and_keys.append((TeamYearEventTeamsQuery(team_key, year)))

    events_with_parents = filter(
        lambda e: e.get_result() is not None and e.get_result().parent_event is
        not None, events_future)
    parent_keys = set(
        [e.get_result().parent_event for e in events_with_parents])
    for parent_key in parent_keys:
        queries_and_keys.append((EventDivisionsQuery(parent_key.id())))

    return queries_and_keys
Beispiel #47
0
	def calc_major_cities(self):
		'''
		Tries to calc major cities with the most artists
		@param artists:
		@type artists:
		'''
		
		min_artists = 20
		popular_city_keys = []
		for key in models.City.query().iter(keys_only=True):
			artist_count = models.Artist.query(models.Artist.cities.city_key == key).count()
			if artist_count > min_artists:
				popular_city_keys.append((key,artist_count))
		# sort the keys
		popular_city_keys = sorted(popular_city_keys,key=lambda x: x[1])
		# grab only the city keys
		city_keys = (x[0] for x in popular_city_keys)
		# fetch them!
		city_futures = ndb.get_multi_async(city_keys)
		cities = (f.get_result() for f in city_futures)
		
		to_send = (c.to_dict() for c in cities)
		return to_send
Beispiel #48
0
    def get(self):
        query = self.request.get('q')
        location = None
        if self.request.get('nearby'):
            location = api.get_geo_point(self.request)

        user = api.get_user(self.request)
        if not user:
            api.write_error(self.response, 403, 'Unknown or missing user')
            return

        object_keys = search_objects(query, location, user.uuid)
        results = []
        result = ndb.get_multi_async(object_keys)
        ndb.Future.wait_all(result)
        for handle in result:
            data = {}
            try:
                obj = handle.get_result()
                if isinstance(obj,
                              Message) and api.is_user_allowed_message_view(
                                  user, obj):
                    data['message'] = get_message_json(obj)
                elif isinstance(obj, Group) and api.is_user_allowed_group_view(
                        user, obj):
                    data['group'] = get_group_json(obj)
                elif isinstance(obj, User):
                    data['user'] = get_user_json(obj)
                elif isinstance(obj, Card):
                    data['card'] = get_card_json(obj)
            except:
                pass

            if data:
                results.append(data)

        api.write_message(self.response, 'success', extra={'results': results})
Beispiel #49
0
def _get_count_async(name, key_gen, use_memcache=True):
    """Retrieve the value for a given sharded counter.

    Args:
        name: The name of the counter.
        num_shards: the number of shards
    Returns:
        Integer; the cumulative count of all sharded counters for the given
            counter name.
    """

    # only cache if it exists at all
    do_cache = False
    cached = False

    total = None

    if use_memcache:
        total = memcache.get(name)
        cached = True

    if total is None:
        total = 0
        all_keys = key_gen()
        counters = yield ndb.get_multi_async(all_keys,
                                             use_cache=False,
                                             use_memcache=False)
        for counter in counters:
            if counter is not None:
                total += counter.count
                do_cache = True

        if do_cache and use_memcache:
            memcache.set(name, total)

    logging.info("Value of {} is {} (cached: {})".format(name, total, cached))
    raise ndb.Return(total)
Beispiel #50
0
def tq_feed_post_job():
    """Post some feeds feed"""
    if not request.headers.get('X-AppEngine-QueueName'):
        raise ndb.Return(jsonify_error(message='Not a Task call'))

    keys = request.form.get('keys')
    if not keys:
        logger.info('Task Queue post no keys')
        raise ndb.Return(jsonify_error(code=500))

    success = 0
    errors = 0
    num_posted = 0
    ndb_keys = [ndb.Key(urlsafe=key) for key in keys.split(',')]
    feeds = yield ndb.get_multi_async(ndb_keys)
    logger.info('Got %d feed(s) for posting', len(feeds))
    futures = []

    for feed in feeds:
        futures.append((feed, Entry.publish_for_feed(feed)))

    for feed, future in futures:
        try:
            num_posts = yield future
            if num_posts is not None:
                num_posted += num_posts
            success += 1
        except:
            errors += 1
            if feed:
                logger.exception('Failed to Publish feed:%s' % (feed.feed_url, ))
            else:
                logger.exception('Failed to publish non-exsistant feed')

    logger.info('Post Feeds success:%s errors: %s num_posted: %s', success, errors, num_posted)
    raise ndb.Return(jsonify(status='ok'))
Beispiel #51
0
    def post(self, namespace, timestamp):
        saved = 0
        digests = []
        now = utils.timestamp_to_datetime(long(timestamp))
        expiration = config.settings().default_expiration
        try:
            digests = _payload_to_hashes(self, namespace)
            # Requests all the entities at once.
            fetch_futures = ndb.get_multi_async(
                model.get_entry_key(namespace, binascii.hexlify(d))
                for d in digests)

            save_futures = []
            while fetch_futures:
                # Return opportunistically the first entity that can be retrieved.
                fetch_futures, done = _throttle_futures(
                    fetch_futures,
                    len(fetch_futures) - 1)
                for f in done:
                    item = f.get_result()
                    if item and item.next_tag_ts < now:
                        # Update the timestamp. Add a bit of pseudo randomness.
                        item.expiration_ts, item.next_tag_ts = model.expiration_jitter(
                            now, expiration)
                        save_futures.append(item.put_async())
                        saved += 1
                save_futures, _ = _throttle_futures(save_futures, 100)

            for f in save_futures:
                f.get_result()
            logging.info('Timestamped %d entries out of %d', saved,
                         len(digests))
        except Exception as e:
            logging.error('Failed to stamp entries: %s\n%d entries', e,
                          len(digests))
            raise
Beispiel #52
0
  def _GetBundlesToUpload(cls, json_events):
    """Determine which bundles in this event upload require uploading.

    Args:
      json_events: The list of json events provided in this event upload.

    Returns:
      list<Key>, The keys of SantaBundles that require upload.
    """
    all_bundle_keys = [
        cls._GetBundleKeyFromJsonEvent(json_event)
        for json_event in json_events]
    unique_bundle_keys = list(set(filter(None, all_bundle_keys)))

    # NOTE: We're relying on a race condition here. All the Bundle
    # entity creations may not have finished by this point _but_ if we see that
    # some don't exist, we know we're the first to create them. If we're first
    # to create them, we should proactively request that they be uploaded.
    existing_bundles = yield ndb.get_multi_async(unique_bundle_keys)
    bundles_to_upload = [
        bundle_key
        for bundle_key, bundle in zip(unique_bundle_keys, existing_bundles)
        if not bundle or not bundle.has_been_uploaded]
    raise ndb.Return(bundles_to_upload)
Beispiel #53
0
    def post(self):
        """
        Add audio fingerprint hash records to the database in response to POST
        requests containing JSON encoded data in the body. Body data should
        be a dict containing the database key id for the song being added and
        a list of tuples containing a hash id and list of absolute offsets
        in the song: {"song_id": <int>, "hashes": [(<int>, [<int>, ...]), ...]}
        """
        entity = Key(urlsafe=API_ENTITY_KEY).get()
        if self.request.headers['API_KEY'] != entity.api_key:
            self.error(401)
            return
        body_data = json.loads(self.request.body)
        song_id_key = body_data["song_id"]
        hashes = body_data["hashes"]
        skey = Key(Songs, song_id_key).id()

        logging.info("POST /hashes - length: {}".format(len(hashes)))

        updates = []
        records = ndb.get_multi_async([Key(Hashes, k) for k, _ in hashes])
        for f, (fp_key, offsets) in zip(records, hashes):
            fp = f.get_result() or Hashes(id=fp_key, song_list=[])
            new_entries = [(skey, o) for o in offsets
                           if (skey, o) not in fp.song_list]

            if new_entries:
                fp.song_list.extend(new_entries)
                updates.append(fp)

        if updates:
            Future.wait_all(ndb.put_multi_async(updates))
            logging.info("Handled {} records.".format(len(updates)))

        self.response.headers.add_header('Content-Type', 'application/json')
        self.response.out.write(json.dumps(len(hashes)))
    def _render(self, district_abbrev, year=None, explicit_year=False):
        district_type = DistrictType.abbrevs[district_abbrev]

        event_keys = Event.query(Event.year == year, Event.event_district_enum == district_type).fetch(None, keys_only=True)
        if not event_keys:
            self.abort(404)

        # needed for district teams
        district_key = '{}{}'.format(year, district_abbrev)
        district_teams_future = DistrictTeamsQuery(district_key).fetch_async()

        # needed for valid_years
        all_cmp_event_keys_future = Event.query(Event.event_district_enum == district_type, Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(None, keys_only=True)

        # needed for valid_districts
        district_cmp_keys_future = Event.query(Event.year == year, Event.event_type_enum == EventType.DISTRICT_CMP).fetch_async(None, keys_only=True)  # to compute valid_districts

        # Needed for active team statuses
        live_events = EventHelper.getWeekEvents()
        live_eventteams_futures = []
        for event in live_events:
            live_eventteams_futures.append(EventTeamsQuery(event.key_name).fetch_async())

        event_futures = ndb.get_multi_async(event_keys)
        event_team_keys_future = EventTeam.query(EventTeam.event.IN(event_keys)).fetch_async(None, keys_only=True)
        team_futures = ndb.get_multi_async(set([ndb.Key(Team, et_key.id().split('_')[1]) for et_key in event_team_keys_future.get_result()]))

        events = [event_future.get_result() for event_future in event_futures]
        EventHelper.sort_events(events)

        district_cmp_futures = ndb.get_multi_async(district_cmp_keys_future.get_result())

        team_totals = DistrictHelper.calculate_rankings(events, team_futures, year)

        valid_districts = set()
        for district_cmp_future in district_cmp_futures:
            district_cmp = district_cmp_future.get_result()
            cmp_dis_type = district_cmp.event_district_enum
            if cmp_dis_type is None:
                logging.warning("District event {} has unknown district type!".format(district_cmp.key.id()))
            else:
                valid_districts.add((DistrictType.type_names[cmp_dis_type], DistrictType.type_abbrevs[cmp_dis_type]))
        valid_districts = sorted(valid_districts, key=lambda (name, _): name)

        teams = TeamHelper.sortTeams(district_teams_future.get_result())

        num_teams = len(teams)
        middle_value = num_teams / 2
        if num_teams % 2 != 0:
            middle_value += 1
        teams_a, teams_b = teams[:middle_value], teams[middle_value:]

        # Currently Competing Team Status
        live_events_with_teams = EventTeamStatusHelper.buildEventTeamStatus(live_events, live_eventteams_futures, teams)
        live_events_with_teams.sort(key=lambda x: x[0].name)

        self.template_values.update({
            'explicit_year': explicit_year,
            'year': year,
            'valid_years': sorted(set([int(event_key.id()[:4]) for event_key in all_cmp_event_keys_future.get_result()])),
            'valid_districts': valid_districts,
            'district_name': DistrictType.type_names[district_type],
            'district_abbrev': district_abbrev,
            'events': events,
            'team_totals': team_totals,
            'teams_a': teams_a,
            'teams_b': teams_b,
            'live_events_with_teams': live_events_with_teams,
        })

        path = os.path.join(os.path.dirname(__file__), '../templates/district_details.html')
        return template.render(path, self.template_values)
Beispiel #55
0
    def get(self):
        self._require_registration()

        user = self.user_bundle.account.key
        favorites = Favorite.query(ancestor=user).fetch()
        subscriptions = Subscription.query(ancestor=user).fetch()

        team_keys = set()
        team_fav = {}
        team_subs = {}
        event_keys = set()
        event_fav = {}
        event_subs = {}
        events = []
        match_keys = set()
        match_event_keys = set()
        match_fav = {}
        match_subs = {}
        for item in favorites + subscriptions:
            if item.model_type == ModelType.TEAM:
                team_keys.add(ndb.Key(Team, item.model_key))
                if type(item) == Favorite:
                    team_fav[item.model_key] = item
                elif type(item) == Subscription:
                    team_subs[item.model_key] = item
            elif item.model_type == ModelType.MATCH:
                match_keys.add(ndb.Key(Match, item.model_key))
                match_event_keys.add(
                    ndb.Key(Event,
                            item.model_key.split('_')[0]))
                if type(item) == Favorite:
                    match_fav[item.model_key] = item
                elif type(item) == Subscription:
                    match_subs[item.model_key] = item
            elif item.model_type == ModelType.EVENT:
                if item.model_key.endswith('*'):  # All year events wildcard
                    event_year = int(item.model_key[:-1])
                    events.append(
                        Event(  # add fake event for rendering
                            id=item.model_key,
                            short_name='ALL EVENTS',
                            event_short=item.model_key,
                            year=event_year,
                            start_date=datetime.datetime(event_year, 1, 1),
                            end_date=datetime.datetime(event_year, 1, 1)))
                else:
                    event_keys.add(ndb.Key(Event, item.model_key))
                if type(item) == Favorite:
                    event_fav[item.model_key] = item
                elif type(item) == Subscription:
                    event_subs[item.model_key] = item

        team_futures = ndb.get_multi_async(team_keys)
        event_futures = ndb.get_multi_async(event_keys)
        match_futures = ndb.get_multi_async(match_keys)
        match_event_futures = ndb.get_multi_async(match_event_keys)

        teams = sorted(
            [team_future.get_result() for team_future in team_futures],
            key=lambda x: x.team_number)
        team_fav_subs = []
        for team in teams:
            fav = team_fav.get(team.key.id(), None)
            subs = team_subs.get(team.key.id(), None)
            team_fav_subs.append((team, fav, subs))

        events += [event_future.get_result() for event_future in event_futures]
        EventHelper.sort_events(events)

        event_fav_subs = []
        for event in events:
            fav = event_fav.get(event.key.id(), None)
            subs = event_subs.get(event.key.id(), None)
            event_fav_subs.append((event, fav, subs))

        matches = [match_future.get_result() for match_future in match_futures]
        match_events = [
            match_event_future.get_result()
            for match_event_future in match_event_futures
        ]
        MatchHelper.natural_sort_matches(matches)

        match_fav_subs_by_event = {}
        for event in match_events:
            match_fav_subs_by_event[event.key.id()] = (event, [])

        for match in matches:
            event_key = match.key.id().split('_')[0]
            fav = match_fav.get(match.key.id(), None)
            subs = match_subs.get(match.key.id(), None)
            match_fav_subs_by_event[event_key][1].append((match, fav, subs))

        event_match_fav_subs = sorted(
            match_fav_subs_by_event.values(),
            key=lambda x: EventHelper.distantFutureIfNoStartDate(x[0]))
        event_match_fav_subs = sorted(
            event_match_fav_subs,
            key=lambda x: EventHelper.distantFutureIfNoEndDate(x[0]))

        self.template_values['team_fav_subs'] = team_fav_subs
        self.template_values['event_fav_subs'] = event_fav_subs
        self.template_values['event_match_fav_subs'] = event_match_fav_subs
        self.template_values['status'] = self.request.get('status')
        self.template_values['year'] = datetime.datetime.now().year

        self.response.out.write(
            jinja2_engine.render('mytba.html', self.template_values))
Beispiel #56
0
    def get(self):
        self._require_registration()

        user = self.user_bundle.account.key
        now = datetime.datetime.now()
        team_favorites_future = Favorite.query(
            Favorite.model_type == ModelType.TEAM,
            ancestor=user).fetch_async()

        favorite_team_keys = map(lambda f: ndb.Key(Team, f.model_key),
                                 team_favorites_future.get_result())
        favorite_teams_future = ndb.get_multi_async(favorite_team_keys)

        favorite_teams = [
            team_future.get_result() for team_future in favorite_teams_future
        ]

        favorite_teams_events_futures = []
        for team in favorite_teams:
            favorite_teams_events_futures.append(
                TeamYearEventsQuery(team.key_name, now.year).fetch_async())

        past_events_by_event = {}
        live_events_by_event = {}
        future_events_by_event = {}
        for team, events_future in zip(favorite_teams,
                                       favorite_teams_events_futures):
            events = events_future.get_result()
            if not events:
                continue
            EventHelper.sort_events(events)  # Sort by date
            for event in events:
                if event.within_a_day:
                    if event.key_name not in live_events_by_event:
                        live_events_by_event[event.key_name] = (event, [])
                    live_events_by_event[event.key_name][1].append(team)
                elif event.start_date < now:
                    if event.key_name not in past_events_by_event:
                        past_events_by_event[event.key_name] = (event, [])
                    past_events_by_event[event.key_name][1].append(team)
                else:
                    if event.key_name not in future_events_by_event:
                        future_events_by_event[event.key_name] = (event, [])
                    future_events_by_event[event.key_name][1].append(team)
                    break  # Only find one next event for each team

        past_events = []
        past_eventteams = []
        for past_event, past_eventteam in past_events_by_event.itervalues():
            past_events.append(past_event)
            past_eventteams.append(past_eventteam)
        past_events_with_teams = EventTeamStatusHelper.buildEventTeamStatus(
            past_events, past_eventteams, favorite_teams)
        past_events_with_teams.sort(key=lambda x: x[0].name)
        past_events_with_teams.sort(
            key=lambda x: EventHelper.distantFutureIfNoStartDate(x[0]))
        past_events_with_teams.sort(
            key=lambda x: EventHelper.distantFutureIfNoEndDate(x[0]))

        live_events = []
        live_eventteams = []
        for live_event, live_eventteam in live_events_by_event.itervalues():
            live_events.append(live_event)
            live_eventteams.append(live_eventteam)
        live_events_with_teams = EventTeamStatusHelper.buildEventTeamStatus(
            live_events, live_eventteams, favorite_teams)
        live_events_with_teams.sort(key=lambda x: x[0].name)

        future_events_with_teams = []
        for event_key, data in future_events_by_event.iteritems():
            future_events_with_teams.append(
                (data[0], TeamHelper.sortTeams(data[1])))
        future_events_with_teams.sort(key=lambda x: x[0].name)
        future_events_with_teams.sort(
            key=lambda x: EventHelper.distantFutureIfNoStartDate(x[0]))
        future_events_with_teams.sort(
            key=lambda x: EventHelper.distantFutureIfNoEndDate(x[0]))

        # Resolve future before rendering
        for _, teams_and_statuses_future in past_events_with_teams:
            for team_and_status_future in teams_and_statuses_future:
                team_and_status_future[1] = team_and_status_future[
                    1].get_result()
        for _, teams_and_statuses_future in live_events_with_teams:
            for team_and_status_future in teams_and_statuses_future:
                team_and_status_future[1] = team_and_status_future[
                    1].get_result()

        self.template_values.update({
            'past_events_with_teams':
            past_events_with_teams,
            'live_events_with_teams':
            live_events_with_teams,
            'future_events_with_teams':
            future_events_with_teams,
        })

        path = os.path.join(os.path.dirname(__file__),
                            '../templates/mytba_live.html')
        self.response.out.write(template.render(path, self.template_values))
Beispiel #57
0
    def _render(self):
        new_search = not self._year or (not self._award_types and not self._seed and not self._playoff_level and not self._cad_model)
        if new_search:
            result_models = []
            num_results = 0
            result_expressions = None
        else:
            # Construct query string
            sort_options_expressions = []
            returned_expressions = []
            partial_queries = []

            search_index = search.Index(name=SearchHelper.TEAM_AWARDS_INDEX)

            partial_queries.append('year={}'.format(self._year))
            award_filter = ' OR '.join(['award={}'.format(award_type) for award_type in self._award_types])
            if award_filter:
                partial_queries.append(award_filter)

            if self._seed:
                seed_field_name = 'seed_{}'.format(self._seed)
                partial_queries.append('{}>0'.format(seed_field_name))
                returned_expressions.append(search.FieldExpression(
                    name='seed_count', expression=seed_field_name))

                if self._sort_field == 'seed':
                    sort_options_expressions.append(
                        search.SortExpression(
                            expression=seed_field_name,
                            direction=search.SortExpression.DESCENDING))

            if self._playoff_level:
                comp_level_name = 'comp_level_{}'.format(self.PLAYOFF_MAP[self._playoff_level])
                partial_queries.append('{}>0'.format(comp_level_name))
                returned_expressions.append(search.FieldExpression(
                    name='comp_level_count', expression=comp_level_name))

                if self._sort_field == 'playoff_level':
                    sort_options_expressions.append(
                        search.SortExpression(
                            expression=comp_level_name,
                            direction=search.SortExpression.DESCENDING))

            if self._cad_model:
                partial_queries.append('has_cad=1')

            query_string = ' AND ' .join(partial_queries)

            # Tiebreak sorting by number
            sort_options_expressions.append(
                search.SortExpression(
                    expression='number',
                    direction=search.SortExpression.ASCENDING))

            # Perform query
            query = search.Query(
                query_string=query_string,
                options=search.QueryOptions(
                    limit=self.PAGE_SIZE,
                    number_found_accuracy=10000,  # Larger than the number of possible results
                    offset=self.PAGE_SIZE * self._page,
                    sort_options=search.SortOptions(
                        expressions=sort_options_expressions
                    ),
                    returned_expressions=returned_expressions
                )
            )

            docs = search_index.search(query)
            num_results = docs.number_found
            model_keys = []
            result_expressions = defaultdict(lambda: defaultdict(float))
            for result in docs.results:
                team_key = result.doc_id.split('_')[0]
                model_keys.append(ndb.Key('Team', team_key))
                for expression in result.expressions:
                    result_expressions[team_key][expression.name] = expression.value

            model_futures = ndb.get_multi_async(model_keys)

            result_models = [model_future.get_result() for model_future in model_futures]

        self.template_values.update({
            'valid_years': self.VALID_YEARS,
            'valid_award_types': self.VALID_AWARD_TYPES,
            'num_special_awards': len(SORT_ORDER),
            'valid_seeds': self.VALID_SEEDS,
            'seed': self._seed,
            'playoff_level': self._playoff_level,
            'page_size': self.PAGE_SIZE,
            'max_results': self.MAX_RESULTS,
            'page': self._page,
            'year': self._year,
            'award_types': self._award_types,
            'cad_model': self._cad_model,
            'new_search': new_search,
            'num_results': num_results,
            'capped_num_results': min(self.MAX_RESULTS, num_results),
            'result_models': result_models,
            'result_expressions': result_expressions,
            'sort_field': self._sort_field,
        })

        return jinja2_engine.render('advanced_search.html', self.template_values)
Beispiel #58
0
 def _query_async(self):
     team_key = self._query_args[0]
     event_teams = yield EventTeam.query(EventTeam.team == ndb.Key(Team, team_key)).fetch_async()
     event_keys = map(lambda event_team: event_team.event, event_teams)
     events = yield ndb.get_multi_async(event_keys)
     raise ndb.Return(events)
Beispiel #59
0
 def _query_async(self):
     event_key = self._query_args[0]
     event = yield Event.get_by_id_async(event_key)
     divisions = yield ndb.get_multi_async(event.divisions)
     raise ndb.Return(divisions)
Beispiel #60
0
  def get(self):
    cursor_str = self.request.get('cursor')
    limit = int(self.request.get('limit', 100))
    sort = self.request.get('sort', self.SORT_CHOICES[0][0])
    state = self.request.get('state', self.STATE_CHOICES[0][0][0])
    counts = self.request.get('counts', '').strip()
    task_tags = [
      line for line in self.request.get('task_tag', '').splitlines() if line
    ]

    if not any(sort == i[0] for i in self.SORT_CHOICES):
      self.abort(400, 'Invalid sort')
    if not any(any(state == i[0] for i in j) for j in self.STATE_CHOICES):
      self.abort(400, 'Invalid state')

    if sort != 'created_ts':
      # Zap all filters in this case to reduce the number of required indexes.
      # Revisit according to the user requests.
      state = 'all'

    now = utils.utcnow()
    # "Temporarily" disable the count. This is too slow on the prod server
    # (>10s). The fix is to have the web page do a XHR query to get the values
    # asynchronously.
    counts_future = None
    if counts == 'true':
      counts_future = self._get_counts_future(now)

    try:
      if task_tags:
        # Enforce created_ts when tags are used.
        sort = 'created_ts'
      query = task_result.get_result_summaries_query(
          None, None, sort, state, task_tags)
      tasks, cursor_str = datastore_utils.fetch_page(query, limit, cursor_str)

      # Prefetch the TaskRequest all at once, so that ndb's in-process cache has
      # it instead of fetching them one at a time indirectly when using
      # TaskResultSummary.request_key.get().
      futures = ndb.get_multi_async(t.request_key for t in tasks)

      # Evaluate the counts to print the filtering columns with the associated
      # numbers.
      state_choices = self._get_state_choices(counts_future)
    except ValueError as e:
      self.abort(400, str(e))

    def safe_sum(items):
      return sum(items, datetime.timedelta())

    def avg(items):
      if not items:
        return 0.
      return safe_sum(items) / len(items)

    def median(items):
      if not items:
        return 0.
      middle = len(items) / 2
      if len(items) % 2:
        return items[middle]
      return (items[middle-1]+items[middle]) / 2

    gen = (t.duration_now(now) for t in tasks)
    durations = sorted(t for t in gen if t is not None)
    gen = (t.pending_now(now) for t in tasks)
    pendings = sorted(t for t in gen if t is not None)
    total_cost_usd = sum(t.cost_usd for t in tasks)
    total_cost_saved_usd = sum(
        t.cost_saved_usd for t in tasks if t.cost_saved_usd)
    # Include the overhead in the total amount of time saved, since it's
    # overhead saved.
    # In theory, t.duration_as_seen_by_server should always be set when
    # t.deduped_from is set but there has some broken entities in the datastore.
    total_saved = safe_sum(
        t.duration_as_seen_by_server for t in tasks
        if t.deduped_from and t.duration_as_seen_by_server)
    duration_sum = safe_sum(durations)
    total_saved_percent = (
        (100. * total_saved.total_seconds() / duration_sum.total_seconds())
        if duration_sum else 0.)

    try_link = '/tasklist?l=%d' % limit
    if task_tags:
      try_link += '&f=' + '&f='.join(task_tags)
    params = {
      'cursor': cursor_str,
      'duration_average': avg(durations),
      'duration_median': median(durations),
      'duration_sum': duration_sum,
      'has_pending': any(t.is_pending for t in tasks),
      'has_running': any(t.is_running for t in tasks),
      'is_admin': acl.is_admin(),
      'is_privileged_user': acl.is_privileged_user(),
      'limit': limit,
      'now': now,
      'pending_average': avg(pendings),
      'pending_median': median(pendings),
      'pending_sum': safe_sum(pendings),
      'show_footer': bool(pendings or durations),
      'sort': sort,
      'sort_choices': self.SORT_CHOICES,
      'state': state,
      'state_choices': state_choices,
      'task_tag': '\n'.join(task_tags),
      'tasks': tasks,
      'total_cost_usd': total_cost_usd,
      'total_cost_saved_usd': total_cost_saved_usd,
      'total_saved': total_saved,
      'total_saved_percent': total_saved_percent,
      'try_link': try_link,
      'xsrf_token': self.generate_xsrf_token(),
    }
    # TODO(maruel): If admin or if the user is task's .user, show the Cancel
    # button. Do not show otherwise.
    self.response.write(template.render('swarming/user_tasks.html', params))

    # Do not let dangling futures linger around.
    ndb.Future.wait_all(futures)