示例#1
0
def parse_luci_bucket(bucket):
  """Converts V1 LUCI bucket to a bucket ID string.

  Returns '' if bucket is not a LUCI bucket.
  """
  parts = bucket.split('.', 2)
  if len(parts) == 3 and parts[0] == 'luci':
    return config.format_bucket_id(parts[1], parts[2])
  return ''
示例#2
0
文件: api.py 项目: xinghun61/infra
def schedule_build_async(req, res, _ctx, mask):
    """Schedules one build."""
    validation.validate_schedule_build_request(req)

    bucket_id = config.format_bucket_id(req.builder.project,
                                        req.builder.bucket)
    if not (yield user.can_add_build_async(bucket_id)):
        raise current_identity_cannot('schedule builds to bucket %s',
                                      bucket_id)

    build_req = creation.BuildRequest(schedule_build_request=req)
    build = yield creation.add_async(build_req)
    yield build_to_proto_async(build, res, mask)
示例#3
0
def update_global_metrics():
    """Updates the metrics in GLOBAL_METRICS."""
    start = utils.utcnow()

    builder_ids = set()  # {(bucket_id, builder)}
    for key in model.Builder.query().iter(keys_only=True):
        project_id, bucket, builder = key.id().split(':', 2)
        bucket_id = (
            # TODO(crbug.com/851036): remove parse_luci_bucket call
            # once we don't have Builder entities with legacy bucket names.
            api_common.parse_luci_bucket(bucket)
            or config.format_bucket_id(project_id, bucket))
        builder_ids.add((bucket_id, builder))

    all_luci_bucket_ids = {
        bid
        for bid, config in config.get_buckets_async().get_result().iteritems()
        if config and config.swarming.builders
    }

    # Collect a list of counting/latency queries.
    count_query_queue = []
    latency_query_queue = []
    # TODO(crbug.com/851036): join with the loop above and remove builder_ids set.
    for bucket_id, builder in builder_ids:
        legacy_bucket_name = api_common.legacy_bucket_name(
            bucket_id, bucket_id in all_luci_bucket_ids)
        latency_query_queue.extend([
            (bucket_id, legacy_bucket_name, builder, True),
            (bucket_id, legacy_bucket_name, builder, False),
        ])
        for status in (model.BuildStatus.SCHEDULED, model.BuildStatus.STARTED):
            for experimental in (False, True):
                count_query_queue.append((bucket_id, legacy_bucket_name,
                                          builder, status, experimental))

    # Process counting/latency queries with _CONCURRENT_QUERY_LIMIT workers.

    @ndb.tasklet
    def worker():
        while count_query_queue:
            item = count_query_queue.pop()
            yield set_build_count_metric_async(*item)
        while latency_query_queue:
            item = latency_query_queue.pop()
            yield set_build_latency(*item)

    for w in [worker() for _ in xrange(_CONCURRENT_QUERY_LIMIT)]:
        w.check_success()

    logging.info('global metric computation took %s', utils.utcnow() - start)
示例#4
0
    def get_task_def(self, request):
        """Returns a swarming task definition for a build request."""
        try:
            # Checks access too.
            request.build_request.bucket = api.convert_bucket(
                request.build_request.bucket)

            build_request = api.put_request_message_to_build_request(
                request.build_request)

            # Find builder config.
            builder_id = build_request.schedule_build_request.builder
            builder_cfg = None
            bucket_id = config.format_bucket_id(builder_id.project,
                                                builder_id.bucket)
            _, bucket_cfg = config.get_bucket_async(bucket_id).get_result()
            assert bucket_cfg, 'if there is no bucket, access check would fail'
            for cfg in bucket_cfg.swarming.builders:  # pragma: no branch
                if cfg.name == builder_id.builder:
                    builder_cfg = cfg
                    break
            if not builder_cfg:
                raise endpoints.NotFoundException(
                    'Builder %s/%s/%s not found' %
                    (builder_id.project, builder_id.bucket,
                     builder_id.builder))

            settings = config.get_settings_async().get_result()

            # Create a fake build and prepare a task definition.
            identity = auth.get_current_identity()
            build = build_request.create_build_async(
                1, settings, builder_cfg, identity,
                utils.utcnow()).get_result()
            assert build.proto.HasField('infra')
            build.proto.number = 1
            settings = config.get_settings_async().get_result()
            task_def = swarming.compute_task_def(build,
                                                 settings,
                                                 fake_build=True)
            task_def_json = json.dumps(task_def)

            return GetTaskDefinitionResponseMessage(
                task_definition=task_def_json,
                swarming_host=build.proto.infra.swarming.hostname,
            )
        except errors.InvalidInputError as ex:
            raise endpoints.BadRequestException('invalid build request: %s' %
                                                ex.message)
示例#5
0
    def setUp(self):
        super(UserTest, self).setUp()
        self.current_identity = auth.Identity.from_bytes('user:[email protected]')
        self.patch('components.auth.get_current_identity',
                   autospec=True,
                   side_effect=lambda: self.current_identity)
        user.clear_request_cache()

        self.patch('components.auth.is_admin',
                   autospec=True,
                   return_value=False)

        bucket_a = Bucket(name='a',
                          acls=[
                              Acl(role=Acl.WRITER, group='a-writers'),
                              Acl(role=Acl.READER, group='a-readers'),
                          ])
        bucket_b = Bucket(name='b',
                          acls=[
                              Acl(role=Acl.WRITER, group='b-writers'),
                              Acl(role=Acl.READER, group='b-readers'),
                          ])
        bucket_c = Bucket(name='c',
                          acls=[
                              Acl(role=Acl.READER, group='c-readers'),
                              Acl(role=Acl.READER,
                                  identity='user:[email protected]'),
                              Acl(role=Acl.WRITER, group='c-writers'),
                              Acl(role=Acl.READER, identity='project:p1'),
                          ])
        all_buckets = [('p1', bucket_a), ('p2', bucket_b), ('p3', bucket_c)]
        self.patch('config.get_buckets_async',
                   autospec=True,
                   return_value=future(all_buckets))

        bucket_map = {
            config.format_bucket_id(pid, b.name): b
            for pid, b in all_buckets
        }
        self.patch('config.get_bucket_async',
                   autospec=True,
                   side_effect=lambda bid: future(
                       ('deadbeef', bucket_map.get(bid))))
示例#6
0
文件: api.py 项目: xinghun61/infra
 def get_bucket_id(req):
     return config.format_bucket_id(req.builder.project, req.builder.bucket)
示例#7
0
文件: api.py 项目: xinghun61/infra
def bucket_id_string(builder_id):
    return config.format_bucket_id(builder_id.project, builder_id.bucket)
示例#8
0
 def bucket_id(self):
     builder_id = self.schedule_build_request.builder
     return config.format_bucket_id(builder_id.project, builder_id.bucket)
示例#9
0
文件: model.py 项目: xinghun61/infra
class Build(ndb.Model):
    """Describes a build.

  Build key:
    Build keys are autogenerated, monotonically decreasing integers.
    That is, when sorted by key, new builds are first.
    Build has no parent.

    Build id is a 64 bits integer represented as a string to the user.
    - 1 highest order bit is set to 0 to keep value positive.
    - 43 bits are 43 lower bits of bitwise-inverted time since
      BEGINING_OF_THE_WORLD at 1ms resolution.
      It is good for 2**43 / 365.3 / 24 / 60 / 60 / 1000 = 278 years
      or 2010 + 278 = year 2288.
    - 16 bits are set to a random value. Assuming an instance is internally
      consistent with itself, it can ensure to not reuse the same 16 bits in two
      consecutive requests and/or throttle itself to one request per
      millisecond. Using random value reduces to 2**-15 the probability of
      collision on exact same timestamp at 1ms resolution, so a maximum
      theoretical rate of 65536000 requests/sec but an effective rate in the
      range of ~64k qps without much transaction conflicts. We should be fine.
    - 4 bits are 0. This is to represent the 'version' of the entity
      schema.

    The idea is taken from Swarming TaskRequest entity:
    https://code.google.com/p/swarming/source/browse/appengine/swarming/server/task_request.py#329
  """

    # ndb library sometimes silently ignores memcache errors
    # => memcache is not synchronized with datastore
    # => a build never finishes from the app code perspective
    # => builder is stuck for days.
    # We workaround this problem by setting a timeout.
    _memcache_timeout = 600  # 10m

    # Stores the build proto. The primary property of this entity.
    # Majority of the other properties are either derivatives of this field or
    # legacy.
    #
    # Does not include:
    #   output.properties: see BuildOutputProperties
    #   steps: see BuildSteps.
    #   tags: stored in tags attribute, because we have to index them anyway.
    #   input.properties: see BuildInputProperties.
    #     CAVEAT: field input.properties does exist during build creation, and
    #     moved into BuildInputProperties right before initial datastore.put.
    #   infra: see BuildInfra.
    #     CAVEAT: field infra does exist during build creation, and moved into
    #     BuildInfra right before initial datastore.put.
    #
    # Transition period: proto is either None or complete, i.e. created by
    # creation.py or fix_builds.py.
    proto = datastore_utils.ProtobufProperty(build_pb2.Build)

    # A randomly generated key associated with the created swarming task.
    # Embedded in a build token provided to a swarming task in secret bytes.
    # Needed in case Buildbucket unintentionally creates multiple swarming tasks
    # associated with the build.
    # Populated by swarming.py on swarming task creation.
    # A part of the message in build token (tokens.py) required for UpdateBuild
    # api.
    swarming_task_key = ndb.StringProperty(indexed=False)

    # == proto-derived properties ================================================
    #
    # These properties are derived from "proto" properties.
    # They are used to index builds.

    status = ndb.ComputedProperty(lambda self: self.proto.status,
                                  name='status_v2')

    @property
    def is_ended(self):  # pragma: no cover
        return is_terminal_status(self.proto.status)

    incomplete = ndb.ComputedProperty(lambda self: not self.is_ended)

    # ID of the LUCI project to which this build belongs.
    project = ndb.ComputedProperty(lambda self: self.proto.builder.project)

    # Indexed string "<project_id>/<bucket_name>".
    # Example: "chromium/try".
    # Prefix "luci.<project_id>." is stripped from bucket name.
    bucket_id = ndb.ComputedProperty(lambda self: config.format_bucket_id(
        self.proto.builder.project, self.proto.builder.bucket))

    # Indexed string "<project_id>/<bucket_name>/<builder_name>".
    # Example: "chromium/try/linux-rel".
    # Prefix "luci.<project_id>." is stripped from bucket name.
    builder_id = ndb.ComputedProperty(
        lambda self: config.builder_id_string(self.proto.builder))

    canary = ndb.ComputedProperty(lambda self: self.proto.canary)

    # Value of proto.create_time.
    # Making this property computed is not-entirely trivial because
    # ComputedProperty saves it as int, as opposed to datetime.datetime.
    # TODO(nodir): remove usages of create_time indices, rely on build id ordering
    # instead.
    create_time = ndb.DateTimeProperty()

    # A list of colon-separated key-value pairs. Indexed.
    # Used to populate tags in builds_to_protos_async, if requested.
    tags = ndb.StringProperty(repeated=True)

    # If True, the build won't affect monitoring and won't be surfaced in
    # search results unless explicitly requested.
    experimental = ndb.ComputedProperty(
        lambda self: self.proto.input.experimental)

    # Value of proto.created_by.
    # Making this property computed is not-entirely trivial because
    # ComputedProperty saves it as string, but IdentityProperty stores it
    # as a blob property.
    created_by = auth.IdentityProperty()

    is_luci = ndb.BooleanProperty()

    @property
    def is_ended(self):  # pragma: no cover
        return self.proto.status not in (common_pb2.STATUS_UNSPECIFIED,
                                         common_pb2.SCHEDULED,
                                         common_pb2.STARTED)

    # == Legacy properties =======================================================

    status_legacy = msgprop.EnumProperty(BuildStatus,
                                         default=BuildStatus.SCHEDULED,
                                         name='status')

    status_changed_time = ndb.DateTimeProperty(auto_now_add=True)

    # immutable arbitrary build parameters.
    parameters = datastore_utils.DeterministicJsonProperty(json_type=dict)

    # PubSub message parameters for build status change notifications.
    # TODO(nodir): replace with notification_pb2.NotificationConfig.
    pubsub_callback = ndb.StructuredProperty(PubSubCallback, indexed=False)

    # id of the original build that this build was derived from.
    retry_of = ndb.IntegerProperty()

    # a URL to a build-system-specific build, viewable by a human.
    url = ndb.StringProperty(indexed=False)

    # V1 status properties. Computed by _pre_put_hook.
    result = msgprop.EnumProperty(BuildResult)
    result_details = datastore_utils.DeterministicJsonProperty(json_type=dict)
    cancelation_reason = msgprop.EnumProperty(CancelationReason)
    failure_reason = msgprop.EnumProperty(FailureReason)

    # Lease-time properties.

    # TODO(nodir): move lease to a separate entity under Build.
    # It would be more efficient.
    # current lease expiration date.
    # The moment the build is leased, |lease_expiration_date| is set to
    # (utcnow + lease_duration).
    lease_expiration_date = ndb.DateTimeProperty()
    # None if build is not leased, otherwise a random value.
    # Changes every time a build is leased. Can be used to verify that a client
    # is the leaseholder.
    lease_key = ndb.IntegerProperty(indexed=False)
    # True if the build is currently leased. Otherwise False
    is_leased = ndb.ComputedProperty(lambda self: self.lease_key is not None)
    leasee = auth.IdentityProperty()
    never_leased = ndb.BooleanProperty()

    # ============================================================================

    def _pre_put_hook(self):
        """Checks Build invariants before putting."""
        super(Build, self)._pre_put_hook()

        config.validate_project_id(self.proto.builder.project)
        config.validate_bucket_name(self.proto.builder.bucket)

        self.update_v1_status_fields()
        self.proto.update_time.FromDatetime(utils.utcnow())

        is_started = self.proto.status == common_pb2.STARTED
        is_ended = self.is_ended
        is_leased = self.lease_key is not None
        assert not (is_ended and is_leased)
        assert (self.lease_expiration_date is not None) == is_leased
        assert (self.leasee is not None) == is_leased

        tag_delm = buildtags.DELIMITER
        assert not self.tags or all(tag_delm in t for t in self.tags)

        assert self.proto.HasField('create_time')
        assert self.proto.HasField('end_time') == is_ended
        assert not is_started or self.proto.HasField('start_time')

        def _ts_less(ts1, ts2):
            return ts1.seconds and ts2.seconds and ts1.ToDatetime(
            ) < ts2.ToDatetime()

        assert not _ts_less(self.proto.start_time, self.proto.create_time)
        assert not _ts_less(self.proto.end_time, self.proto.create_time)
        assert not _ts_less(self.proto.end_time, self.proto.start_time)
        self.tags = sorted(set(self.tags))

    def update_v1_status_fields(self):
        """Updates V1 status fields."""
        self.status_legacy = None
        self.result = None
        self.failure_reason = None
        self.cancelation_reason = None

        status = self.proto.status
        if status == common_pb2.SCHEDULED:
            self.status_legacy = BuildStatus.SCHEDULED
        elif status == common_pb2.STARTED:
            self.status_legacy = BuildStatus.STARTED
        elif status == common_pb2.SUCCESS:
            self.status_legacy = BuildStatus.COMPLETED
            self.result = BuildResult.SUCCESS
        elif status == common_pb2.FAILURE:
            self.status_legacy = BuildStatus.COMPLETED
            self.result = BuildResult.FAILURE
            self.failure_reason = FailureReason.BUILD_FAILURE
        elif status == common_pb2.INFRA_FAILURE:
            self.status_legacy = BuildStatus.COMPLETED
            if self.proto.status_details.HasField('timeout'):
                self.result = BuildResult.CANCELED
                self.cancelation_reason = CancelationReason.TIMEOUT
            else:
                self.result = BuildResult.FAILURE
                self.failure_reason = FailureReason.INFRA_FAILURE
        elif status == common_pb2.CANCELED:
            self.status_legacy = BuildStatus.COMPLETED
            self.result = BuildResult.CANCELED
            self.cancelation_reason = CancelationReason.CANCELED_EXPLICITLY
        else:  # pragma: no cover
            assert False, status

    def regenerate_lease_key(self):
        """Changes lease key to a different random int."""
        while True:
            new_key = random.randint(0, 1 << 31)
            if new_key != self.lease_key:  # pragma: no branch
                self.lease_key = new_key
                break

    def clear_lease(self):  # pragma: no cover
        """Clears build's lease attributes."""
        self.lease_key = None
        self.lease_expiration_date = None
        self.leasee = None

    def tags_to_protos(self, dest):
        """Adds non-hidden self.tags to a repeated StringPair container."""
        for t in self.tags:
            k, v = buildtags.parse(t)
            if k not in buildtags.HIDDEN_TAG_KEYS:
                dest.add(key=k, value=v)