Ejemplo n.º 1
0
def get_tasks(task_name, task_tags, cursor_str, limit, sort, state):
    """Returns TaskResultSummary entities for this query.

  This function is synchronous.

  Arguments:
    task_name: search for task name whole word.
    task_tags: list of search for one or multiple task tags.
    cursor_str: query-dependent string encoded cursor to continue a previous
        search.
    limit: Maximum number of items to return.
    sort: get_result_summary_query() argument. Only used if both task_name and
        task_tags are empty.
    state: get_result_summary_query() argument. Only used if both task_name and
        task_tags are empty.

  Returns:
    tuple(list of tasks, str encoded cursor, updated sort, updated state)
  """
    # TODO(vadimsh): Use tags with get_result_summary_query. Will require existing
    # entities to be updated first to include 'tags' fields (otherwise they'll
    # disappear from the search).
    if task_tags:
        # Tag based search. Override the flags.
        sort = 'created_ts'
        state = 'all'
        # Only the TaskRequest has the tags. So first query all the keys to
        # requests; then fetch the TaskResultSummary.
        order = _sort_property(sort)
        query = task_request.TaskRequest.query().order(order)
        task_tags = task_tags[:]
        tags_filter = task_request.TaskRequest.tags == task_tags.pop(0)
        while task_tags:
            tags_filter = ndb.AND(
                tags_filter, task_request.TaskRequest.tags == task_tags.pop(0))
        query = query.filter(tags_filter)
        cursor = datastore_query.Cursor(urlsafe=cursor_str)
        requests, cursor, more = query.fetch_page(limit,
                                                  start_cursor=cursor,
                                                  keys_only=True)
        keys = [
            task_pack.request_key_to_result_summary_key(k) for k in requests
        ]
        tasks = ndb.get_multi(keys)
        cursor_str = cursor.urlsafe() if cursor and more else None
    elif task_name:
        # Task name based word based search. Override the flags.
        sort = 'created_ts'
        state = 'all'
        tasks, cursor_str = search_by_name(task_name, cursor_str, limit)
    else:
        # Normal listing.
        query = get_result_summary_query(sort, state, None)
        cursor = datastore_query.Cursor(urlsafe=cursor_str)
        tasks, cursor, more = query.fetch_page(limit, start_cursor=cursor)
        cursor_str = cursor.urlsafe() if cursor and more else None

    return tasks, cursor_str, sort, state
Ejemplo n.º 2
0
    def get_question_ids_linked_to_skill_ids(cls, question_count, skill_ids,
                                             start_cursor):
        """Fetches the list of question ids linked to the skill in batches.

        Args:
            question_count: int. The number of questions to be returned.
            skill_ids: list(str). The ids of skills for which the linked
                question ids are to be retrieved.
            start_cursor: str. The starting point from which the batch of
                questions are to be returned. This value should be urlsafe.

        Returns:
            list(str), str|None. The question ids linked to given skills and the
                next cursor value to be used for the next page (or None if no
                more pages are left). The returned next cursor value is urlsafe.
        """
        if not start_cursor == '':
            cursor = datastore_query.Cursor(urlsafe=start_cursor)
            question_skill_link_models, next_cursor, more = cls.query(
                cls.skill_id.IN(skill_ids)).order(cls.key).fetch_page(
                    question_count, start_cursor=cursor)
        else:
            question_skill_link_models, next_cursor, more = cls.query(
                cls.skill_id.IN(skill_ids)).order(
                    cls.key).fetch_page(question_count)
        question_ids = [
            model.question_id for model in question_skill_link_models
        ]
        next_cursor_str = (next_cursor.urlsafe() if
                           (next_cursor and more) else None)
        return question_ids, next_cursor_str
Ejemplo n.º 3
0
def fetch_exploration_task_history_page(exploration,
                                        urlsafe_start_cursor=None):
    """Fetches a page from the given exploration's history of resolved tasks.

    Args:
        exploration: exp_domain.Exploration. The exploration to fetch the
            history page for.
        urlsafe_start_cursor: str or None. Starting point for the search. When
            None, the starting point is the very beginning of the history
            results (i.e. starting from the most recently resolved task entry).

    Returns:
        tuple. Contains the following 3 items:
            results: list(improvements_domain.TaskEntry). The query results.
            urlsafe_cursor: str or None. a query cursor pointing to the "next"
                batch of results. If there are no more results, this might be
                None.
            more: bool. Indicates whether there are (likely) more results after
                this batch. If False, there are no more results; if True, there
                are probably more results.
    """
    start_cursor = (urlsafe_start_cursor
                    and datastore_query.Cursor(urlsafe=urlsafe_start_cursor))
    results, cursor, more = (improvements_models.TaskEntryModel.query(
        improvements_models.TaskEntryModel.entity_type == (
            improvements_models.TASK_ENTITY_TYPE_EXPLORATION),
        improvements_models.TaskEntryModel.entity_id == exploration.id,
        improvements_models.TaskEntryModel.status == (
            improvements_models.TASK_STATUS_RESOLVED
        )).order(-improvements_models.TaskEntryModel.resolved_on).fetch_page(
            feconf.MAX_TASK_MODELS_PER_HISTORY_PAGE,
            start_cursor=start_cursor))
    return ([get_task_entry_from_model(model) for model in results], cursor
            and cursor.urlsafe(), more)
Ejemplo n.º 4
0
    def get_question_ids_linked_to_skill_ids(cls, question_count, skill_ids,
                                             start_cursor):
        """Fetches the list of question ids linked to the skill in batches.

        Args:
            question_count: int. The number of questions to be returned.
            skill_ids: list(str). The ids of skills for which the linked
                question ids are to be retrieved.
            start_cursor: str. The starting point from which the batch of
                questions are to be returned. This value should be urlsafe.

        Returns:
            list(str), str|None. The question ids linked to given skills and the
                next cursor value to be used for the next page (or None if no
                more pages are left). The returned next cursor value is urlsafe.
        """
        if not start_cursor == '':
            cursor = datastore_query.Cursor(urlsafe=start_cursor)
            question_skill_link_models, next_cursor, more = cls.query(
                cls.skill_id.IN(skill_ids)
                # Order by cls.key is needed alongside cls.last_updated so as to
                # resolve conflicts, if any.
                # Reference SO link: https://stackoverflow.com/q/12449197
            ).order(-cls.last_updated, cls.key).fetch_page(question_count,
                                                           start_cursor=cursor)
        else:
            question_skill_link_models, next_cursor, more = cls.query(
                cls.skill_id.IN(skill_ids)).order(
                    -cls.last_updated, cls.key).fetch_page(question_count)
        question_ids = [
            model.question_id for model in question_skill_link_models
        ]
        next_cursor_str = (next_cursor.urlsafe() if
                           (next_cursor and more) else None)
        return question_ids, next_cursor_str
Ejemplo n.º 5
0
    def get_skill_opportunities(cls, page_size, urlsafe_start_cursor):
        """Returns a list of skill opportunities available for adding questions.

        Args:
            page_size: int. The maximum number of entities to be returned.
            urlsafe_start_cursor: str or None. If provided, the list of
                returned entities starts from this datastore cursor.
                Otherwise, the returned entities start from the beginning
                of the full list of entities.

        Returns:
            3-tuple of (results, cursor, more) as described in fetch_page() at:
            https://developers.google.com/appengine/docs/python/ndb/queryclass,
            where:
                results: list(SkillOpportunityModel)|None. A list
                    of query results.
                cursor: str or None. A query cursor pointing to the next
                    batch of results. If there are no more results, this might
                    be None.
                more: bool. If True, there are (probably) more results after
                    this batch. If False, there are no further results after
                    this batch.
        """
        if urlsafe_start_cursor:
            start_cursor = datastore_query.Cursor(urlsafe=urlsafe_start_cursor)
        else:
            start_cursor = None

        results, cursor, more = cls.get_all().order(cls.created_on).fetch_page(
            page_size, start_cursor=start_cursor)
        return (results, (cursor.urlsafe() if cursor else None), more)
Ejemplo n.º 6
0
def _Migrate(status, cursor=None):
    if cursor:
        cursor = datastore_query.Cursor(urlsafe=cursor)
    query = job.Job.query(job.Job.task == None)
    jobs, next_cursor, more = query.fetch_page(_BATCH_SIZE,
                                               start_cursor=cursor)

    # Because individual job instances might fail to be persisted for some reason
    # (e.g. entities exceeding the entity size limit) we'll perform the updates
    # one at a time. This is not an ideal state, since we'll want to be able to
    # migrate all jobs to an alternative structure in the future, but we recognise
    # that partial success is better than total failure.
    for j in jobs:
        try:
            j.put()
            status['count'] += 1
        except datastore_errors.BadRequestError as e:
            logging.error('Failed migrating job %s: %s', j.job_id, e)
            status['errors'] += 1

    if more:
        stored_object.Set(_STATUS_KEY, status)
        deferred.defer(_Migrate, status, next_cursor.urlsafe())
    else:
        stored_object.Set(_STATUS_KEY, None)
Ejemplo n.º 7
0
    def fetch_page(cls, page_size, cursor):
        """Fetches a list of all query_models sorted by creation date.

        Args:
            page_size: int. The maximum number of entities to be returned.
            cursor: str or None. The list of returned entities starts from this
                datastore cursor.

        Returns:
            3-tuple of (query_models, cursor, more) as described in fetch_page()
            at:
            https://developers.google.com/appengine/docs/python/ndb/queryclass,
            where:
                query_models: List of UserQueryModel instances.
                next_cursor: str or None. A query cursor pointing to the next
                    batch of results. If there are no more results, this might
                    be None.
                more: bool. If True, there are probably more results after
                    this batch. If False, there are no further results after
                    this batch.
        """
        cursor = datastore_query.Cursor(urlsafe=cursor)
        query_models, next_cursor, more = (
            cls.query().order(-cls.created_on).fetch_page(page_size,
                                                          start_cursor=cursor))
        next_cursor = next_cursor.urlsafe() if (next_cursor and more) else None
        return query_models, next_cursor, more
Ejemplo n.º 8
0
 def get(self):
     cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
     repo = self.request.get('repo')
     number = int(self.request.get('number', 0)) or None
     count = int(self.request.get('count', 500))
     if repo is not None and number is not None:
         q = models.GithubWebhookRaw.query(
             models.GithubWebhookRaw.repo == repo,
             models.GithubWebhookRaw.number == number)
     else:
         q = models.GithubWebhookRaw.query()
     q = q.order(models.GithubWebhookRaw.timestamp)
     events, next_cursor, more = q.fetch_page(count, start_cursor=cursor)
     out = []
     for event in events:
         out.append({
             'repo': event.repo,
             'event': event.event,
             'guid': event.guid,
             'timestamp': str(event.timestamp),
             'body': json.loads(event.body)
         })
     resp = {'next': more and next_cursor.urlsafe(), 'calls': out}
     self.response.headers['content-type'] = 'text/json'
     self.response.write(json.dumps(resp, indent=4, sort_keys=True))
Ejemplo n.º 9
0
def fetch_page(query, batch_size, cursor_str, **kwargs):
  """Fetches a page from a query.

  Arguments:
    query: ndb.Query.
    batch_size: Maximum number of items to return.
    cursor_str: query-dependent string encoded cursor to continue a previous
        search.

  Returns:
  - items
  - str encoded cursor if relevant or None.
  """
  assert isinstance(query, ndb.Query), query
  if not 0 < batch_size <= 1000 or not isinstance(batch_size, int):
    raise ValueError(
        'batch_size must be between 1 and 1000, got %r', batch_size)
  if cursor_str:
    if not isinstance(cursor_str, basestring):
      raise ValueError(
          'cursor must be between valid string, got %r', cursor_str)
    cursor = datastore_query.Cursor(urlsafe=cursor_str)
  else:
    cursor = None
  items, cursor, more = query.fetch_page(
      batch_size, start_cursor=cursor, **kwargs)
  if not more:
    return items, None
  return items, cursor.urlsafe()
Ejemplo n.º 10
0
    def get(self):
        """
        1) Filter by subject
        2) Filter by Completion
        """
        user = users.get_current_user()
        if check_creds(user, self.check_u(), admin=True):
            admin = hold_creds(user, self.check_u())
            cursor_str = self.request.get('c', None)
            cursor = None
            if cursor_str:
                cursor = datastore_query.Cursor(urlsafe=cursor_str)

            var1 = self.request.get('q')
            # var2 = self.request.get('o')
            subject_list = m.Standard.admin_get_subject_list()
            results, new_cursor, more = self.organise(var1, subject_list,
                                                      cursor)

            if more:
                urlcursor = new_cursor.urlsafe()
            else:
                urlcursor = None

            self.render('standards/all_standard_page_admin.html',
                        var=results,
                        subject_list=subject_list,
                        q=var1,
                        urlcursor=urlcursor,
                        admin=admin)
        else:
            self.redirect('/ouch')
Ejemplo n.º 11
0
 def post(self):
     """Query for tests, and put ones with no new data on the delete queue."""
     datastore_hooks.SetPrivilegedRequest()
     cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
     tests, next_cursor, more = graph_data.TestMetadata.query().fetch_page(
         _TESTS_TO_CHECK_AT_ONCE, keys_only=True, start_cursor=cursor)
     if more:
         taskqueue.add(url='/delete_old_tests',
                       params={'cursor': next_cursor.urlsafe()},
                       queue_name=_TASK_QUEUE_NAME)
     for test in tests:
         # Delete this test if:
         # 1) It has no Rows newer than the cutoff
         # 2) It has no descendant tests
         no_new_rows = False
         last_row = graph_data.Row.query(
             graph_data.Row.parent_test == utils.OldStyleTestKey(
                 test)).order(-graph_data.Row.timestamp).get()
         if last_row:
             if last_row.timestamp < datetime.datetime.today(
             ) - _CUTOFF_DATE:
                 no_new_rows = True
         else:
             no_new_rows = True
         descendants = list_tests.GetTestDescendants(test, keys_only=True)
         descendants.remove(test)
         if not descendants and no_new_rows:
             taskqueue.add(
                 url='/delete_test_data',
                 params={
                     'test_path':
                     utils.TestPath(test),  # For manual inspection.
                     'test_key': test.urlsafe(),
                 },
                 queue_name=_DELETE_TASK_QUEUE_NAME)
Ejemplo n.º 12
0
    def _fetch_page_sorted_by_last_updated(cls, query, page_size,
                                           urlsafe_start_cursor):
        """Fetches a page of entities sorted by their last_updated attribute in
        descending order (newly updated first).

        Args:
            query: ndb.Query.
            page_size: int. The maximum number of entities to be returned.
            urlsafe_start_cursor: str or None. If provided, the list of returned
                entities starts from this datastore cursor. Otherwise,
                the returned entities start from the beginning of the full
                list of entities.

        Returns:
            3-tuple of (results, cursor, more) as described in fetch_page() at:
            https://developers.google.com/appengine/docs/python/ndb/queryclass,
            where:
                results: List of query results.
                cursor: str or None. A query cursor pointing to the next batch
                    of results. If there are no more results, this will be None.
                more: bool. If True, there are (probably) more results after
                    this batch. If False, there are no further results after
                    this batch.
        """
        if urlsafe_start_cursor:
            start_cursor = datastore_query.Cursor(urlsafe=urlsafe_start_cursor)
        else:
            start_cursor = None

        result = query.order(-cls.last_updated).fetch_page(
            page_size, start_cursor=start_cursor)
        return (result[0], (result[1].urlsafe() if result[1] else None),
                result[2])
Ejemplo n.º 13
0
    def get(self):
        limit = int(self.request.get('limit', 100))
        cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
        sort_by = self.request.get('sort_by', '__key__')
        if sort_by not in self.ACCEPTABLE_BOTS_SORTS:
            self.abort(400, 'Invalid sort_by query parameter')

        if sort_by[0] == '-':
            order = datastore_query.PropertyOrder(
                sort_by[1:], datastore_query.PropertyOrder.DESCENDING)
        else:
            order = datastore_query.PropertyOrder(
                sort_by, datastore_query.PropertyOrder.ASCENDING)

        now = utils.utcnow()
        cutoff = now - datetime.timedelta(
            seconds=config.settings().bot_death_timeout_secs)

        num_bots_busy_future = bot_management.BotInfo.query(
            bot_management.BotInfo.is_busy == True).count_async()
        num_bots_dead_future = bot_management.BotInfo.query(
            bot_management.BotInfo.last_seen_ts < cutoff).count_async()
        num_bots_quarantined_future = bot_management.BotInfo.query(
            bot_management.BotInfo.quarantined == True).count_async()
        num_bots_total_future = bot_management.BotInfo.query().count_async()
        fetch_future = bot_management.BotInfo.query().order(
            order).fetch_page_async(limit, start_cursor=cursor)

        # TODO(maruel): self.request.host_url should be the default AppEngine url
        # version and not the current one. It is only an issue when
        # version-dot-appid.appspot.com urls are used to access this page.
        version = bot_code.get_bot_version(self.request.host_url)
        bots, cursor, more = fetch_future.get_result()
        # Prefetch the tasks. We don't actually use the value here, it'll be
        # implicitly used by ndb local's cache when refetched by the html template.
        tasks = filter(None, (b.task for b in bots))
        ndb.get_multi(tasks)
        num_bots_busy = num_bots_busy_future.get_result()
        num_bots_dead = num_bots_dead_future.get_result()
        num_bots_quarantined = num_bots_quarantined_future.get_result()
        num_bots_total = num_bots_total_future.get_result()
        params = {
            'bots': bots,
            'current_version': version,
            'cursor': cursor.urlsafe() if cursor and more else '',
            'is_admin': acl.is_admin(),
            'is_privileged_user': acl.is_privileged_user(),
            'limit': limit,
            'now': now,
            'num_bots_alive': num_bots_total - num_bots_dead,
            'num_bots_busy': num_bots_busy,
            'num_bots_dead': num_bots_dead,
            'num_bots_quarantined': num_bots_quarantined,
            'sort_by': sort_by,
            'sort_options': self.SORT_OPTIONS,
            'xsrf_token': self.generate_xsrf_token(),
        }
        self.response.write(
            template.render('swarming/restricted_botslist.html', params))
Ejemplo n.º 14
0
	def exportDb(self, cursor=None, backupkey=None, endcursor=None, kind=None, *args, **kwargs):
		global backupKey
		assert safeStringComparison(backupKey, backupkey)
		if cursor:
			c = datastore_query.Cursor(urlsafe=cursor)
		else:
			c = None
		if endcursor:
			endCursor = datastore_query.Cursor(urlsafe=endcursor)
		else:
			endCursor = None
		q = datastore.Query(kind, cursor=c, end_cursor=endCursor)
		logging.error((cursor, backupkey, endcursor, kind))
		r = []
		for res in q.Run(limit=5):
			r.append(self.genDict(res))
		return (pickle.dumps({"cursor": str(q.GetCursor().urlsafe()), "values": r}).encode("HEX"))
Ejemplo n.º 15
0
  def post(self, uuid):
    # Prepare the query
    cursor = self.parsed_json.get(santa_const.RULE_DOWNLOAD.CURSOR)

    if self.host.rule_sync_dt is None:
      logging.info('%s clean rule sync', 'Continuing' if cursor else 'Starting')

    # pylint:disable=g-explicit-bool-comparison
    query = santa_db.SantaRule.query(
        santa_db.SantaRule.in_effect == True,
        santa_db.SantaRule.updated_dt >= self.host.rule_sync_dt,
        santa_db.SantaRule.host_id.IN(['', uuid])
    ).order(santa_db.SantaRule.updated_dt, santa_db.SantaRule.key)
    # pylint:enable=g-explicit-bool-comparison

    # Fetch
    rules, next_cursor, more = query.fetch_page(
        settings.SANTA_RULE_BATCH_SIZE,
        start_cursor=datastore_query.Cursor(urlsafe=cursor))

    # Process the received rules.
    response_rules = []
    for rule in rules:
      creation_timestamp = common_utils.ToUtcTimestamp(rule.updated_dt)
      rule_dict = {
          santa_const.RULE_DOWNLOAD.SHA256: rule.key.parent().id(),
          santa_const.RULE_DOWNLOAD.RULE_TYPE: rule.rule_type,
          santa_const.RULE_DOWNLOAD.POLICY: rule.policy,
          santa_const.RULE_DOWNLOAD.CUSTOM_MSG: rule.custom_msg,
          santa_const.RULE_DOWNLOAD.CREATION_TIME: creation_timestamp}

      if rule.rule_type == common_const.RULE_TYPE.PACKAGE:
        # For Bundles, each binary member should have a separate rule generated
        # with a policy type matching that of the PACKAGE rule.
        binary_ids = rule.bundle_binary_ids
        binary_count = len(binary_ids)
        logging.debug('Syncing %s bundle rules', binary_ids)
        for id_ in binary_ids:
          dict_ = rule_dict.copy()
          dict_.update({
              santa_const.RULE_DOWNLOAD.SHA256: id_,
              santa_const.RULE_DOWNLOAD.RULE_TYPE:
                  common_const.RULE_TYPE.BINARY,
              santa_const.RULE_DOWNLOAD.FILE_BUNDLE_BINARY_COUNT: binary_count,
              santa_const.RULE_DOWNLOAD.FILE_BUNDLE_HASH: rule.key.parent().id()
          })
          response_rules.append(dict_)
      else:
        response_rules.append(rule_dict)

    # Prepare the response, include the cursor if there are more rules.
    response = {santa_const.RULE_DOWNLOAD.RULES: response_rules}
    if more:
      response[santa_const.RULE_DOWNLOAD.CURSOR] = next_cursor.urlsafe()

    self.respond_json(response)
Ejemplo n.º 16
0
    def get(self, bot_id):
        # pagination is currently for tasks, not events.
        limit = int(self.request.get('limit', 100))
        cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
        bot_future = bot_management.get_info_key(bot_id).get_async()
        run_results, cursor, more = task_result.TaskRunResult.query(
            task_result.TaskRunResult.bot_id == bot_id).order(
                -task_result.TaskRunResult.started_ts).fetch_page(
                    limit, start_cursor=cursor)

        events_future = bot_management.get_events_query(bot_id).fetch_async(
            100)

        now = utils.utcnow()
        bot = bot_future.get_result()
        # Calculate the time this bot was idle.
        idle_time = datetime.timedelta()
        run_time = datetime.timedelta()
        if run_results:
            run_time = run_results[0].duration_now(now) or datetime.timedelta()
            if not cursor and run_results[0].state != task_result.State.RUNNING:
                # Add idle time since last task completed. Do not do this when a cursor
                # is used since it's not representative.
                idle_time = now - run_results[0].ended_ts
            for index in xrange(1, len(run_results)):
                # .started_ts will always be set by definition but .ended_ts may be None
                # if the task was abandoned. We can't count idle time since the bot may
                # have been busy running *another task*.
                # TODO(maruel): One option is to add a third value "broken_time".
                # Looking at timestamps specifically could help too, e.g. comparing
                # ended_ts of this task vs the next one to see if the bot was assigned
                # two tasks simultaneously.
                if run_results[index].ended_ts:
                    idle_time += (run_results[index - 1].started_ts -
                                  run_results[index].ended_ts)
                    duration = run_results[index].duration
                    if duration:
                        run_time += duration

        params = {
            'bot': bot,
            'bot_id': bot_id,
            'current_version': bot_code.get_bot_version(self.request.host_url),
            'cursor': cursor.urlsafe() if cursor and more else None,
            'events': events_future.get_result(),
            'idle_time': idle_time,
            'is_admin': acl.is_admin(),
            'limit': limit,
            'now': now,
            'run_results': run_results,
            'run_time': run_time,
            'xsrf_token': self.generate_xsrf_token(),
        }
        self.response.write(
            template.render('swarming/restricted_bot.html', params))
Ejemplo n.º 17
0
    def get(self):
        """Fixes rows for one or more tests and queues the next task to fix more.

    Request parameters:
      ancestor: A slash-separated path to the ancestor to start from.
      cursor: An urlsafe string for a datastore_query.Cursor object.

    Outputs:
      Some indication of the results.
    """
        # Get the ancestor of the tests to change, and abort if not given.
        ancestor = self.request.get('ancestor')
        if not ancestor:
            self.ReportError('Missing ancestor parameter.')
            return
        ancestor_key = utils.TestKey(ancestor)

        # Get the query cursor if given.
        urlsafe_cursor = self.request.get('cursor')
        cursor = None
        if urlsafe_cursor:
            cursor = datastore_query.Cursor(urlsafe=urlsafe_cursor)
        more = False

        test_query = graph_data.Test.query(ancestor=ancestor_key)
        test_query = test_query.filter(graph_data.Test.has_rows == True)
        keys, next_cursor, more = test_query.fetch_page(_NUM_TESTS,
                                                        keys_only=True,
                                                        start_cursor=cursor)

        futures = []
        for key in keys:
            futures.extend(_FixTest(key))
        ndb.Future.wait_all(futures)

        if not futures:
            cursor = next_cursor

        urlsafe_cursor = cursor.urlsafe() if cursor else ''
        if more or futures:
            taskqueue.add(queue_name=_QUEUE_NAME,
                          url='/shrink_timestamp_revisions',
                          params={
                              'cursor': urlsafe_cursor or '',
                              'ancestor': ancestor
                          })
            logging.info('Task added, cursor: %s', urlsafe_cursor)

        # Display some information, to verify that something is happening.
        self.RenderHtml(
            'result.html',
            {'results': [{
                'name': 'cursor',
                'value': urlsafe_cursor
            }]})
Ejemplo n.º 18
0
    def cursor(self, cursor, endCursor=None):
        """
			Sets the start cursor for this query.

			The result set will only include results behind that cursor.
			The cursor is generated by an earlier query with exactly the same configuration.

			Its safe to use client-supplied cursors, a cursor can't be abused to access entities
			which don't match the current filters.

			:param cursor: The cursor key to set to the Query.
			:type cursor: str | datastore_query.Cursor

			:returns: Returns the query itself for chaining.
			:rtype: server.db.Query
		"""
        if isinstance(cursor, basestring):
            cursor = datastore_query.Cursor(urlsafe=cursor)
        elif isinstance(cursor, datastore_query.Cursor) or cursor == None:
            pass
        else:
            raise ValueError(
                "Cursor must be String, datastore_query.Cursor or None")
        if endCursor is not None:
            if isinstance(endCursor, basestring):
                endCursor = datastore_query.Cursor(urlsafe=endCursor)
            elif isinstance(cursor,
                            datastore_query.Cursor) or endCursor == None:
                pass
            else:
                raise ValueError(
                    "endCursor must be String, datastore_query.Cursor or None")

        qo = self.datastoreQuery.__query_options
        self.datastoreQuery.__query_options = datastore_query.QueryOptions(
            keys_only=qo.keys_only,
            produce_cursors=qo.produce_cursors,
            start_cursor=cursor,
            end_cursor=endCursor or qo.end_cursor,
            projection=qo.projection)
        self._origCursor = cursor
        return (self)
Ejemplo n.º 19
0
    def getListData(self, list_id, query, start=None, limit=50):
        """See ListDataReader.getListData for specification."""
        start_cursor = datastore_query.Cursor(urlsafe=start)
        entities, next_cursor, more = query.fetch_page(
            limit, start_cursor=start_cursor)

        next_cursor = next_cursor.urlsafe() if more else FINAL_BATCH

        col_funcs = [(c.col_id, c.getValue) for c in getList(list_id).columns]
        items = [toListItemDict(entity, col_funcs) for entity in entities]
        return ListData(items[:limit], next_cursor)
Ejemplo n.º 20
0
    def _fetch_page_sorted_by_last_updated(cls, query, page_size,
                                           urlsafe_start_cursor):
        if urlsafe_start_cursor:
            start_cursor = datastore_query.Cursor(urlsafe=urlsafe_start_cursor)
        else:
            start_cursor = None

        result = query.order(-cls.last_updated).fetch_page(
            page_size, start_cursor=start_cursor)
        return (result[0], (result[1].urlsafe() if result[1] else None),
                result[2])
Ejemplo n.º 21
0
 def search(cls, query_string=None, cursor=None, limit=None):
     limit = limit or 200
     start_cursor = datastore_query.Cursor(urlsafe=cursor) \
             if cursor else None
     query = cls.query()
     if query_string:
         # TODO: Support query language.
         query = query.filter(cls.email == query_string)
     query = query.order(-cls.created)
     results, next_cursor, has_more = \
             query.fetch_page(limit, start_cursor=start_cursor)
     return (results, next_cursor, has_more)
Ejemplo n.º 22
0
  def _Migrate(self, query, status):
    cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
    jobs, next_cursor, more = query.fetch_page(_BATCH_SIZE, start_cursor=cursor)
    ndb.put_multi(jobs)

    if more:
      status['count'] += len(jobs)
      stored_object.Set(_STATUS_KEY, status)
      params = {'cursor': next_cursor.urlsafe()}
      taskqueue.add(url='/api/migrate', params=params)
    else:
      stored_object.Set(_STATUS_KEY, None)
Ejemplo n.º 23
0
 def get(self):
     limit = int(self.request.get('limit', 100))
     cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
     errors_found, cursor, more = models.Error.query().order(
         -models.Error.created_ts).fetch_page(limit, start_cursor=cursor)
     params = {
         'cursor': cursor.urlsafe() if cursor and more else None,
         'errors': errors_found,
         'limit': limit,
         'now': utils.utcnow(),
     }
     self.response.out.write(
         template.render('ereporter2/errors.html', params))
Ejemplo n.º 24
0
 def get(self):
   now = utils.utcnow()
   limit = int(self.request.get('limit', 1000))
   cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
   q = bot_management.BotInfo.query().order(bot_management.BotInfo.key)
   bots, cursor, more = q.fetch_page(limit, start_cursor=cursor)
   data = {
     'cursor': cursor.urlsafe() if cursor and more else None,
     'death_timeout': config.settings().bot_death_timeout_secs,
     'items': [b.to_dict_with_now(now) for b in bots],
     'limit': limit,
     'now': now,
   }
   self.send_response(utils.to_json_encodable(data))
Ejemplo n.º 25
0
  def convertProposals(self, request, *args, **kwargs):
    """Start tasks to convert proposals for all organizations.

    POST Args:
      program_key: the key of the program whose proposals should be converted
      org_cursor: the cursor indicating at which org we currently are
    """
    params = dicts.merge(request.POST, request.GET)

    if 'program_key' not in params:
      logging.error("missing program_key in params: '%s'", params)
      return responses.terminateTask()

    program = GSoCProgram.get_by_key_name(params['program_key'])

    if not program:
      logging.error("invalid program_key in params: '%s'", params)
      return responses.terminateTask()

    query = soc_org_model.SOCOrganization.query(
        soc_org_model.SOCOrganization.program ==
            ndb.Key.from_old_key(program.key()),
        soc_org_model.SOCOrganization.status == org_model.Status.ACCEPTED)

    org_cursor = params.get('org_cursor')
    start_cursor = (
        datastore_query.Cursor(urlsafe=urllib.unquote_plus(org_cursor))
        if org_cursor else None)

    # Add a task for a single organization
    organizations, next_cursor, _ = query.fetch_page(
        1, start_cursor=start_cursor)

    if organizations:
      organization = organizations[0]
      logging.info(
          'Enqueing task to accept proposals for %s.', organization.name)
      # Compounded accept/reject taskflow
      taskqueue.add(
        url='/tasks/gsoc/accept_proposals/accept',
        params={
          'org_key': organization.key.id(),
        })

      # Enqueue a new task to do the next organization
      params['org_cursor'] = next_cursor.urlsafe()
      taskqueue.add(url=request.path, params=params)

    # Exit this task successfully
    return responses.terminateTask()
Ejemplo n.º 26
0
    def get_all_translation_opportunities(cls, page_size, urlsafe_start_cursor,
                                          language_code):
        """Returns a list of opportunities available for translation in a
        specific language.

        Args:
            page_size: int. The maximum number of entities to be returned.
            urlsafe_start_cursor: str or None. If provided, the list of
                returned entities starts from this datastore cursor.
                Otherwise, the returned entities start from the beginning
                of the full list of entities.
            language_code: str. The language for which translation opportunities
                are to be fetched.

        Returns:
            3-tuple of (results, cursor, more) as described in fetch_page() at:
            https://developers.google.com/appengine/docs/python/ndb/queryclass,
            where:
                results: list(ExplorationOpportunitySummaryModel)|None. A list
                    of query results.
                cursor: str or None. A query cursor pointing to the next
                    batch of results. If there are no more results, this might
                    be None.
                more: bool. If True, there are (probably) more results after
                    this batch. If False, there are no further results after
                    this batch.
        """
        if urlsafe_start_cursor:
            start_cursor = datastore_query.Cursor(urlsafe=urlsafe_start_cursor)
        else:
            start_cursor = datastore_query.Cursor()

        results, cursor, more = cls.query(
            cls.incomplete_translation_language_codes == language_code).order(
                cls.incomplete_translation_language_codes).fetch_page(
                    page_size, start_cursor=start_cursor)
        return (results, (cursor.urlsafe() if cursor else None), more)
Ejemplo n.º 27
0
  def list(self, request):
    """Provides list of known bots.

    Deleted bots will not be listed.
    """
    logging.info('%s', request)
    now = utils.utcnow()
    cursor = datastore_query.Cursor(urlsafe=request.cursor)
    q = bot_management.BotInfo.query().order(bot_management.BotInfo.key)
    bots, cursor, more = q.fetch_page(request.limit, start_cursor=cursor)
    return swarming_rpcs.BotList(
        cursor=cursor.urlsafe() if cursor and more else None,
        death_timeout=config.settings().bot_death_timeout_secs,
        items=[message_conversion.bot_info_to_rpc(bot, now) for bot in bots],
        now=now)
Ejemplo n.º 28
0
 def exportDb(self, cursor=None, key=None, *args, **kwargs):
     if not self._checkKey(key, export=True):
         raise errors.Forbidden()
     if cursor:
         c = datastore_query.Cursor(urlsafe=cursor)
     else:
         c = None
     q = datastore.Query(None, cursor=c)
     r = []
     for res in q.Run(limit=5):
         r.append(self.genDict(res))
     return (pickle.dumps({
         "cursor": str(q.GetCursor().urlsafe()),
         "values": r
     }).encode("HEX"))
Ejemplo n.º 29
0
 def list(self, request):
     """Provides list of bots."""
     now = utils.utcnow()
     cursor = datastore_query.Cursor(urlsafe=request.cursor)
     q = bot_management.BotInfo.query().order(bot_management.BotInfo.key)
     bots, cursor, more = q.fetch_page(request.limit, start_cursor=cursor)
     return swarming_rpcs.BotList(
         cursor=cursor.urlsafe() if cursor and more else None,
         death_timeout=config.settings().bot_death_timeout_secs,
         items=[
             message_conversion.bot_info_from_dict(
                 bot.to_dict_with_now(now)) for bot in bots
         ],
         limit=request.limit,
         now=now)
Ejemplo n.º 30
0
 def get(self, bot_id):
   limit = int(self.request.get('limit', 100))
   cursor = datastore_query.Cursor(urlsafe=self.request.get('cursor'))
   run_results, cursor, more = task_result.TaskRunResult.query(
       task_result.TaskRunResult.bot_id == bot_id).order(
           -task_result.TaskRunResult.started_ts).fetch_page(
               limit, start_cursor=cursor)
   now = utils.utcnow()
   data = {
     'cursor': cursor.urlsafe() if cursor and more else None,
     'items': run_results,
     'limit': limit,
     'now': now,
   }
   self.send_response(utils.to_json_encodable(data))