def UnExpandIPV6(ip_address):
    """Unexpands a full ipv6 address to a shorthand ipv6 address

  Inputs:
    ip_address: string of long ipv6 address

  Raises:
    InvalidInputError: Not a valid IP address.
    InvalidInputError: Not a valid IPV6 address.

  Outputs:
    string: string of short ipv6 address
  """
    try:
        ipv6_address = IPy.IP(ip_address)
    except ValueError:
        raise errors.InvalidInputError('%s is not a valid IP address' %
                                       ip_address)
    if (ipv6_address.version() != 6):
        raise errors.InvalidInputError('"%s" is not a valid IPV6 address.' %
                                       (ipv6_address))

    new_address = str(ipv6_address)
    new_address_parts = new_address.split(':')

    #What this does is changes 4321:0:1:2:3:4:567:89ab into 4321::1:2:3:4:567:89ab
    while ('0' in new_address_parts):
        zero_index = new_address_parts.index('0')
        new_address_parts.pop(zero_index)
        new_address_parts.insert(zero_index, '')

    return u':'.join(new_address_parts)
Exemplo n.º 2
0
def fix_max_builds(max_builds):
    max_builds = max_builds or 10
    if not isinstance(max_builds, int):
        raise errors.InvalidInputError('max_builds must be an integer')
    if max_builds < 0:
        raise errors.InvalidInputError('max_builds must be positive')
    return min(MAX_RETURN_BUILDS, max_builds)
Exemplo n.º 3
0
def validate_tags(tags, mode, builder=None):
    """Validates build tags.

  mode must be a string, one of:
    'new': tags are for a new build.
    'append': tags are to be appended to an existing build.
    'search': tags to search by.

  builder is the value of model.Build.proto.builder.builder.
  If specified, tags "builder:<v>" must have v equal to the builder.
  Relevant only in 'new' mode.
  """
    assert mode in ('new', 'append', 'search'), mode
    if tags is None:
        return
    if not isinstance(tags, list):
        raise errors.InvalidInputError('tags must be a list')
    seen_builder_tag = None
    seen_gitiles_commit = False
    for t in tags:  # pragma: no branch
        if not isinstance(t, basestring):
            raise errors.InvalidInputError(
                'Invalid tag "%s": must be a string' % (t, ))
        if ':' not in t:
            raise errors.InvalidInputError(
                'Invalid tag "%s": does not contain ":"' % t)
        if t[0] == ':':
            raise errors.InvalidInputError(
                'Invalid tag "%s": starts with ":"' % t)
        k, v = t.split(':', 1)
        if k == BUILDSET_KEY:
            try:
                validate_buildset(v)
            except errors.InvalidInputError as ex:
                raise errors.InvalidInputError('Invalid tag "%s": %s' %
                                               (t, ex))
            if RE_BUILDSET_GITILES_COMMIT.match(v):  # pragma: no branch
                if seen_gitiles_commit:
                    raise errors.InvalidInputError(
                        'More than one commits/gitiles buildset')
                seen_gitiles_commit = True
        if k == BUILDER_KEY:
            if mode == 'append':
                raise errors.InvalidInputError(
                    'Tag "builder" cannot be added to an existing build')
            if mode == 'new':  # pragma: no branch
                if builder is not None and v != builder:
                    raise errors.InvalidInputError(
                        'Tag "%s" conflicts with builder name "%s"' %
                        (t, builder))
                if seen_builder_tag is None:
                    seen_builder_tag = t
                elif t != seen_builder_tag:  # pragma: no branch
                    raise errors.InvalidInputError(
                        'Tag "%s" conflicts with tag "%s"' %
                        (t, seen_builder_tag))
        if mode != 'search' and k in RESERVED_KEYS:
            raise errors.InvalidInputError('Tag "%s" is reserved' % k)
Exemplo n.º 4
0
def validate_url(url):
    if url is None:
        return
    if not isinstance(url, basestring):
        raise errors.InvalidInputError('url must be string')
    parsed = urlparse.urlparse(url)
    if not parsed.netloc:
        raise errors.InvalidInputError('url must be absolute')
    if parsed.scheme.lower() not in ('http', 'https'):
        raise errors.InvalidInputError('Unexpected url scheme: "%s"' %
                                       parsed.scheme)
Exemplo n.º 5
0
def parse_json_object(json_data, param_name):
    if not json_data:
        return None
    try:
        rv = json.loads(json_data)
    except ValueError as ex:
        raise errors.InvalidInputError('Could not parse %s: %s' %
                                       (param_name, ex))
    if rv is not None and not isinstance(rv, dict):
        raise errors.InvalidInputError(
            'Invalid %s: not a JSON object or null' % param_name)
    return rv
Exemplo n.º 6
0
def validate_tags(tags):
    if tags is None:
        return
    if not isinstance(tags, list):
        raise errors.InvalidInputError('tags must be a list')
    for t in tags:
        if not isinstance(t, basestring):
            raise errors.InvalidInputError(
                'Invalid tag "%s": must be a string')
        if ':' not in t:
            raise errors.InvalidInputError(
                'Invalid tag "%s": does not contain ":"')
Exemplo n.º 7
0
def validate_build(build):
    """Raises errors.InvalidInputError if swarming constraints are violated."""
    if build.lease_key:
        raise errors.InvalidInputError(
            'Swarming buckets do not support creation of leased builds')

    expirations = set()
    for dim in build.proto.infra.swarming.task_dimensions:
        assert not dim.expiration.nanos
        expirations.add(dim.expiration.seconds)

    if len(expirations) > 6:
        raise errors.InvalidInputError(
            'swarming supports up to 6 unique expirations')
Exemplo n.º 8
0
def validate_lease_expiration_date(expiration_date):
    """Raises errors.InvalidInputError if |expiration_date| is invalid."""
    if expiration_date is None:
        return
    if not isinstance(expiration_date, datetime.datetime):
        raise errors.InvalidInputError(
            'Lease expiration date must be datetime.datetime')
    duration = expiration_date - utils.utcnow()
    if duration <= datetime.timedelta(0):
        raise errors.InvalidInputError(
            'Lease expiration date cannot be in the past')
    if duration > MAX_LEASE_DURATION:
        raise errors.InvalidInputError('Lease duration cannot exceed %s' %
                                       MAX_LEASE_DURATION)
Exemplo n.º 9
0
def prepare_task_def_async(build, fake_build=False):
    """Prepares a swarming task definition.

  Validates the new build.
  If configured, generates a build number and updates the build.
  Creates a swarming task definition.

  Returns a tuple (bucket_cfg, builder_cfg, task_def).
  """
    if build.lease_key:
        raise errors.InvalidInputError(
            'Swarming buckets do not support creation of leased builds')
    if not build.parameters:
        raise errors.InvalidInputError(
            'A build for bucket %r must have parameters' % build.bucket)
    builder_name = build.parameters.get(BUILDER_PARAMETER)
    if not isinstance(builder_name, basestring):
        raise errors.InvalidInputError('Invalid builder name %r' %
                                       builder_name)
    project_id, bucket_cfg = yield config.get_bucket_async(build.bucket)

    if not bucket_cfg.HasField('swarming'):
        raise errors.InvalidInputError(
            'Bucket %s is not configured for swarming' % build.bucket)

    builder_cfg = None
    for b in bucket_cfg.swarming.builders:  # pragma: no branch
        if b.name == builder_name:  # pragma: no branch
            builder_cfg = b
            break
    if not builder_cfg:
        raise errors.InvalidInputError(
            'Builder %r is not defined in bucket %r' %
            (builder_name, build.bucket))

    build_number = None
    if builder_cfg.build_numbers:  # pragma: no branch
        seq_name = '%s/%s' % (build.bucket, builder_name)
        if fake_build:  # pragma: no cover | covered by swarmbucket_api_test
            build_number = 0
        else:
            build_number = yield sequence.generate_async(seq_name, 1)
        build.tags.append('build_address:%s/%d' % (seq_name, build_number))

    task_def = yield _create_task_def_async(project_id, bucket_cfg.swarming,
                                            builder_cfg, build, build_number,
                                            fake_build)
    raise ndb.Return(bucket_cfg, builder_cfg, task_def)
Exemplo n.º 10
0
def parse_datetime(timestamp):
  if timestamp is None:
    return None
  try:
    return utils.timestamp_to_datetime(timestamp)
  except OverflowError:
    raise errors.InvalidInputError('Could not parse timestamp: %s' % timestamp)
Exemplo n.º 11
0
    def heartbeat_async(self, build_id, lease_key, lease_expiration_date):
        """Extends build lease.

    Args:
      build_id: id of the build.
      lease_key: current lease key.
      lease_expiration_date (datetime.timedelta): new lease expiration date.

    Returns:
      The updated Build as Future.
    """
        try:
            validate_lease_key(lease_key)
            if lease_expiration_date is None:
                raise errors.InvalidInputError(
                    'Lease expiration date not specified')
            validate_lease_expiration_date(lease_expiration_date)
            build = yield model.Build.get_by_id_async(build_id)
            if build is None:
                raise errors.BuildNotFoundError()
            if build.status == model.BuildStatus.COMPLETED:
                raise errors.BuildIsCompletedError()
            self._check_lease(build, lease_key)
            build.lease_expiration_date = lease_expiration_date
            yield build.put_async()
        except Exception as ex:
            logging.warning('Heartbeat for build %s failed: %s', build_id, ex)
            raise
        raise ndb.Return(build)
Exemplo n.º 12
0
    def MakeRow(self, table_name, row_dict):
        """Creates a row in the database using the table name and row dict
    
    Inputs:
      table_name: string of valid table name from constants
      row_dict: dictionary that coresponds to table_name

    Raises:
      InvalidInputError: Table name not valid
      TransactionError: Must run StartTansaction before inserting

    Outputs:
      int: last insert id
    """
        if (not table_name in helpers_lib.GetValidTables()):
            raise errors.InvalidInputError('Table name not valid: %s' %
                                           table_name)
        if (not self.transaction_init):
            raise errors.TransactionError('Must run StartTansaction before '
                                          'inserting.')
        if (self.data_validation_instance is None):
            self.InitDataValidation()
        self.data_validation_instance.ValidateRowDict(table_name, row_dict)

        column_names = []
        column_assignments = []
        for k in row_dict.iterkeys():
            column_names.append(k)
            column_assignments.append('%s%s%s' % ('%(', k, ')s'))

        query = 'INSERT INTO %s (%s) VALUES (%s)' % (
            table_name, ','.join(column_names), ','.join(column_assignments))
        self.cursor_execute(query, row_dict)
        return self.cursor.lastrowid
Exemplo n.º 13
0
    def _fetch_page(self, query, page_size, start_cursor, predicate=None):
        assert query
        assert isinstance(page_size, int)
        assert start_cursor is None or isinstance(start_cursor, basestring)

        curs = None
        if start_cursor:
            try:
                curs = ndb.Cursor(urlsafe=start_cursor)
            except db.BadValueError as ex:
                msg = 'Bad cursor "%s": %s' % (start_cursor, ex)
                logging.warning(msg)
                raise errors.InvalidInputError(msg)

        query_iter = query.iter(start_cursor=curs,
                                produce_cursors=True,
                                batch_size=page_size)
        entities = []
        for entity in query_iter:
            if predicate is None or predicate(entity):  # pragma: no branch
                entities.append(entity)
                if len(entities) >= page_size:
                    break

        next_cursor_str = None
        if query_iter.has_next():
            next_cursor_str = query_iter.cursor_after().urlsafe()
        return entities, next_cursor_str
Exemplo n.º 14
0
    def GetRecordArgsDict(self, record_type):
        """Get args for a specific record type from the db and shove them into
    a dictionary.

    Inputs:
      record_type: string of record type
    
    Raises:
      InvalidInputError: Unknown record type

    Outputs:
      dictionary: keyed by argument name with values of data type of that arg
        example: {'mail_host': 'Hostname'
                  'priority': 'UnsignedInt'}
    """
        search_record_arguments_dict = self.GetEmptyRowDict('record_arguments')
        search_record_arguments_dict['record_arguments_type'] = record_type

        self.StartTransaction()
        try:
            record_arguments = self.ListRow('record_arguments',
                                            search_record_arguments_dict)
        finally:
            self.EndTransaction()

        record_arguments_dict = {}
        if (not record_arguments):
            raise errors.InvalidInputError('Unknown record type: %s' %
                                           record_type)
        for record_argument in record_arguments:
            record_arguments_dict[record_argument['argument_name']] = (
                record_argument['argument_data_type'])

        return record_arguments_dict
Exemplo n.º 15
0
    def RemoveRow(self, table_name, row_dict):
        """Removes a row in the database using the table name and row dict

    Inputs:
      table_name: string of valid table name from constants
      row_dict: dictionary that coresponds to table_name

    Raises:
      InvalidInputError: Table name not valid
      TransactionError: Must run StartTansaction before deleting

    Outputs:
      int: number of rows affected
    """
        if (not table_name in helpers_lib.GetValidTables()):
            raise errors.InvalidInputError('Table name not valid: %s' %
                                           table_name)
        if (not self.transaction_init):
            raise errors.TransactionError(
                'Must run StartTansaction before deleting.')
        if (self.data_validation_instance is None):
            self.InitDataValidation()
        self.data_validation_instance.ValidateRowDict(table_name, row_dict)

        where_list = []
        for k in row_dict.iterkeys():
            where_list.append('%s=%s%s%s' % (k, '%(', k, ')s'))

        query = 'DELETE FROM %s WHERE %s' % (table_name,
                                             ' AND '.join(where_list))
        self.cursor_execute(query, row_dict)
        return self.cursor.rowcount
Exemplo n.º 16
0
def _reject_swarming_bucket(bucket_id):
    config.validate_bucket_id(bucket_id)
    _, cfg = config.get_bucket(bucket_id)
    assert cfg, 'permission check should have failed'
    if config.is_swarming_config(cfg):
        raise errors.InvalidInputError(
            'Invalid operation on a Swarming bucket')
def ReverseIP(ip_address):
    """Reverse an IP address

  Inputs:
    ip_address: either an ipv4 or ipv6 string

  Raises:
    InvalidInputError: Not a valid IP address.

  Outputs:
    string: reverse ip address
  """
    try:
        ip_object = IPy.IP(ip_address)
    except ValueError:
        raise errors.InvalidInputError('%s is not a valid IP address' %
                                       ip_address)
    reverse_ip_string = ip_object.reverseName()
    if (ip_object.version() == 4):
        ip_parts = reverse_ip_string.split('.')
        if ('-' in ip_parts[0]):
            ip_range = ip_parts.pop(0).split('-')
            num_ips = int(ip_range[1]) - int(ip_range[0]) + 1
            netmask = int(32 - (math.log(num_ips) / math.log(2)))
            last_octet = ip_parts.pop(0)
            reverse_ip_string = '.'.join(ip_parts)
            reverse_ip_string = '%s/%s.%s' % (last_octet, netmask,
                                              reverse_ip_string)
    return unicode(reverse_ip_string)
Exemplo n.º 18
0
def parse_json(json_data, param_name):
    if not json_data:
        return None
    try:
        return json.loads(json_data)
    except ValueError as ex:
        raise errors.InvalidInputError('Could not parse %s: %s' %
                                       (param_name, ex))
Exemplo n.º 19
0
def validate_bucket_id(bucket_id):
    """Raises errors.InvalidInputError if bucket_id is invalid."""
    assert not is_legacy_bucket_id(bucket_id)

    try:
        project_id, bucket_name = parse_bucket_id(bucket_id)
        validate_project_id(project_id)
        validate_bucket_name(bucket_name)
    except errors.InvalidInputError as ex:
        raise errors.InvalidInputError('invalid bucket_id %r: %s' %
                                       (bucket_id, ex))

    parts = bucket_name.split('.', 2)
    if len(parts) == 3 and parts[0] == 'luci' and parts[1] == project_id:
        expected_bucket_id = '%s/%s' % (project_id, parts[2])
        raise errors.InvalidInputError(
            'invalid bucket_id string %r. Did you mean %r?' %
            (bucket_id, expected_bucket_id))
Exemplo n.º 20
0
def _get_leasable_build(build_id):
    build = model.Build.get_by_id(build_id)
    if build is None:
        raise errors.BuildNotFoundError()
    if not user.can_lease_build_async(build).get_result():
        raise user.current_identity_cannot('lease build %s', build.key.id())
    if build.is_luci:
        raise errors.InvalidInputError('cannot lease a swarmbucket build')
    return build
Exemplo n.º 21
0
def peek(bucket_ids, max_builds=None, start_cursor=None):
    """Returns builds available for leasing in the specified |bucket_ids|.

  Builds are sorted by creation time, oldest first.

  Args:
    bucket_ids (list of string): fetch only builds in any of |bucket_ids|.
    max_builds (int): maximum number of builds to return. Defaults to 10.
    start_cursor (string): a value of "next" cursor returned by previous
      peek call. If not None, return next builds in the query.

  Returns:
    A tuple:
      builds (list of Builds): available builds.
      next_cursor (str): cursor for the next page.
        None if there are no more builds.
  """
    if not bucket_ids:
        raise errors.InvalidInputError('No buckets specified')
    bucket_ids = sorted(set(bucket_ids))
    search.check_acls_async(
        bucket_ids, inc_metric=PEEK_ACCESS_DENIED_ERROR_COUNTER).get_result()
    for bid in bucket_ids:
        _reject_swarming_bucket(bid)
    max_builds = search.fix_max_builds(max_builds)

    # Prune any buckets that are paused.
    bucket_states = _get_bucket_states(bucket_ids)
    active_buckets = []
    for b in bucket_ids:
        if bucket_states[b].is_paused:
            logging.warning('Ignoring paused bucket: %s.', b)
            continue
        active_buckets.append(b)

    # Short-circuit: if there are no remaining buckets to query, then we're done.
    if not active_buckets:
        return ([], None)

    q = model.Build.query(
        model.Build.status_legacy == model.BuildStatus.SCHEDULED,
        model.Build.is_leased == False,
        model.Build.bucket_id.IN(active_buckets),
    )
    q = q.order(-model.Build.key)  # oldest first.

    # Check once again locally because an ndb query may return an entity not
    # satisfying the query.
    def local_predicate(b):
        return (b.status_legacy == model.BuildStatus.SCHEDULED
                and not b.is_leased and b.bucket_id in active_buckets)

    return search.fetch_page_async(q,
                                   max_builds,
                                   start_cursor,
                                   predicate=local_predicate).get_result()
Exemplo n.º 22
0
    def _check_search_acls(self, buckets):
        if not buckets:
            raise errors.InvalidInputError('No buckets specified')
        for bucket in buckets:
            validate_bucket_name(bucket)

        for bucket in buckets:
            if not acl.can_search_builds(bucket):
                raise current_identity_cannot('search builds in bucket %s',
                                              bucket)
Exemplo n.º 23
0
def parse_identity(identity):
    """Parses an identity string if it is a string."""
    if isinstance(identity, basestring):
        if not identity:  # pragma: no cover
            return None
        if ':' not in identity:  # pragma: no branch
            identity = 'user:%s' % identity
        try:
            identity = auth.Identity.from_bytes(identity)
        except ValueError as ex:
            raise errors.InvalidInputError('Invalid identity: %s' % ex)
    return identity
Exemplo n.º 24
0
    def validate(self):
        """Raises errors.InvalidInputError if self is invalid."""
        assert isinstance(self.status,
                          (type(None), StatusFilter, int)), self.status
        assert isinstance(self.bucket_ids, (type(None), list)), self.bucket_ids

        if self.bucket_ids and self.project:
            raise errors.InvalidInputError(
                'project and bucket_ids are mutually exclusive')
        if self.builder and not self.bucket_ids:
            raise errors.InvalidInputError(
                'builder requires non-empty bucket_ids')

        buildtags.validate_tags(self.tags, 'search')

        create_time_range = (self.create_time_low is not None
                             or self.create_time_high is not None)
        build_range = self.build_low is not None or self.build_high is not None
        if create_time_range and build_range:
            raise errors.InvalidInputError(
                'create_time_low and create_time_high are mutually exclusive with '
                'build_low and build_high')
def ExpandIPV6(ip_address):
    """Expands a shorthand ipv6 address to a full ipv6 address

  Inputs:
    ip_address: string of short ipv6 address

  Raises:
    InvalidInputError: Not a valid IP address.
    InvalidInputError: Not a valid IPV6 address.

  Outputs:
    string: string of long ipv6 address
  """
    try:
        ipv6_address = IPy.IP(ip_address)
    except ValueError:
        raise errors.InvalidInputError('%s is not a valid IP address' %
                                       ip_address)
    if (ipv6_address.version() != 6):
        raise errors.InvalidInputError('"%s" is not a valid IPV6 address.' %
                                       (ipv6_address))

    return ipv6_address.strFullsize()
Exemplo n.º 26
0
    def UpdateRow(self, table_name, search_row_dict, update_row_dict):
        """Updates a row in the database using search and update dictionaries.

    Inputs:
      table_name: string of valid table name from constants
      search_row_dict: dictionary that coresponds to table_name containing
                       search args
      update_row_dict: dictionary that coresponds to table_name containing
                       update args

    Raises:
      InvalidInputError: Table name not valid
      TransactionError: Must run StartTansaction before inserting

    Outputs:
      int: number of rows affected
    """
        if (not table_name in helpers_lib.GetValidTables()):
            raise errors.InvalidInputError('Table name not valid: %s' %
                                           table_name)
        if (not self.transaction_init):
            raise errors.TransactionError(
                'Must run StartTansaction before deleting.')
        if (self.data_validation_instance is None):
            self.InitDataValidation()
        self.data_validation_instance.ValidateRowDict(table_name,
                                                      search_row_dict,
                                                      none_ok=True)
        self.data_validation_instance.ValidateRowDict(table_name,
                                                      update_row_dict,
                                                      none_ok=True)

        query_updates = []
        query_searches = []
        combined_dict = {}
        for k, v in update_row_dict.iteritems():
            if (v is not None):
                query_updates.append('%s%s%s%s' % (k, '=%(update_', k, ')s'))
                combined_dict['update_%s' % k] = v

        for k, v in search_row_dict.iteritems():
            if (v is not None):
                query_searches.append('%s=%s%s%s' % (k, '%(search_', k, ')s'))
                combined_dict['search_%s' % k] = v

        query = 'UPDATE %s SET %s WHERE %s' % (
            table_name, ','.join(query_updates), ' AND '.join(query_searches))
        self.cursor_execute(query, combined_dict)
        return self.cursor.rowcount
Exemplo n.º 27
0
def validate_buildset(bs):
    """Raises errors.InvalidInputError if the buildset is invalid."""
    if len(BUILDSET_KEY) + len(DELIMITER) + len(bs) > BUILDSET_MAX_LENGTH:
        raise errors.InvalidInputError('too long')

    # Verify that a buildset with a known prefix is well formed.
    if bs.startswith('commit/gitiles/'):
        m = RE_BUILDSET_GITILES_COMMIT.match(bs)
        if not m:
            raise errors.InvalidInputError(
                'does not match regex "%s"' %
                (RE_BUILDSET_GITILES_COMMIT.pattern))
        project = m.group(2)
        if project.startswith('a/'):
            raise errors.InvalidInputError(
                'gitiles project must not start with "a/"')
        if project.endswith('.git'):
            raise errors.InvalidInputError(
                'gitiles project must not end with ".git"')

    elif bs.startswith('patch/gerrit/'):
        if not RE_BUILDSET_GERRIT_CL.match(bs):
            raise errors.InvalidInputError('does not match regex "%s"' %
                                           RE_BUILDSET_GERRIT_CL.pattern)
Exemplo n.º 28
0
    def test_schedule_build_requests(self, add_many_async):
        add_many_async.return_value = future([
            (test_util.build(id=42), None),
            (test_util.build(id=43), None),
            (None, errors.InvalidInputError('bad')),
            (None, Exception('unexpected')),
            (None, auth.AuthorizationError('bad')),
        ])

        user.can_async.side_effect = (
            lambda bucket_id, _: future('forbidden' not in bucket_id))

        linux_builder = dict(project='chromium', bucket='try', builder='linux')
        win_builder = dict(project='chromium', bucket='try', builder='windows')
        req = rpc_pb2.BatchRequest(
            requests=[
                dict(schedule_build=dict(builder=linux_builder)),
                dict(schedule_build=dict(builder=linux_builder,
                                         fields=dict(paths=['tags']))),
                dict(schedule_build=dict(builder=linux_builder,
                                         fields=dict(paths=['wrong-field']))),
                dict(schedule_build=dict(builder=win_builder)),
                dict(schedule_build=dict(builder=win_builder)),
                dict(schedule_build=dict(builder=win_builder)),
                dict(schedule_build=dict(builder=dict(project='chromium',
                                                      bucket='forbidden',
                                                      builder='nope'), )),
                dict(
                    schedule_build=dict(),  # invalid request
                ),
            ], )

        res = self.call(self.api.Batch, req)

        codes = [r.error.code for r in res.responses]
        self.assertEqual(codes, [
            prpc.StatusCode.OK.value,
            prpc.StatusCode.OK.value,
            prpc.StatusCode.INVALID_ARGUMENT.value,
            prpc.StatusCode.INVALID_ARGUMENT.value,
            prpc.StatusCode.INTERNAL.value,
            prpc.StatusCode.PERMISSION_DENIED.value,
            prpc.StatusCode.PERMISSION_DENIED.value,
            prpc.StatusCode.INVALID_ARGUMENT.value,
        ])
        self.assertEqual(res.responses[0].schedule_build.id, 42)
        self.assertFalse(len(res.responses[0].schedule_build.tags))
        self.assertTrue(len(res.responses[1].schedule_build.tags))
Exemplo n.º 29
0
def fetch_page_async(query, page_size, start_cursor, predicate=None):
    """Fetches a page of Build entities."""
    assert query
    assert isinstance(page_size, int)
    assert start_cursor is None or isinstance(start_cursor, basestring)

    curs = None
    if start_cursor:
        try:
            curs = ndb.Cursor(urlsafe=start_cursor)
        except db.BadValueError as ex:
            msg = 'Bad cursor "%s": %s' % (start_cursor, ex)
            logging.warning(msg)
            raise errors.InvalidInputError(msg)

    entities = []
    skipped = 0
    pages = 0
    started = utils.utcnow()
    while len(entities) < page_size:
        # It is important not to request more than needed in query.fetch_page,
        # otherwise the cursor we return to the user skips fetched, but not returned
        # entities, and the user will never see them.
        to_fetch = page_size - len(entities)

        logging.debug('fetch_page: ds query: %s', query)
        page, curs, more = yield query.fetch_page_async(to_fetch,
                                                        start_cursor=curs)
        pages += 1
        for entity in page:
            if predicate and not predicate(entity):  # pragma: no cover
                skipped += 1
                continue
            entities.append(entity)
            if len(entities) >= page_size:
                break
        if not more:
            break
    logging.debug(
        'fetch_page: %dms ellapsed ',
        (utils.utcnow() - started).total_seconds() * 1000,
    )

    curs_str = None
    if more:
        curs_str = curs.urlsafe()
    raise ndb.Return(entities, curs_str)
Exemplo n.º 30
0
    def test_create_sync_task(self):
        expected_ex1 = errors.InvalidInputError()

        def create_sync_task(build, *_args, **_kwargs):
            if 'buildset:a' in build.tags:
                raise expected_ex1

        self.create_sync_task.side_effect = create_sync_task

        ((b1, ex1), (b2, ex2)) = creation.add_many_async([
            self.build_request(dict(tags=[dict(key='buildset', value='a')])),
            self.build_request(dict(tags=[dict(key='buildset', value='b')])),
        ]).get_result()

        self.assertEqual(ex1, expected_ex1)
        self.assertIsNone(b1)
        self.assertIsNone(ex2)
        self.assertIsNotNone(b2)