def impl(): ctx = ndb.get_context() cache_key = 'role/%s/%s' % (identity_str, bucket_id) cache = yield ctx.memcache_get(cache_key) if cache is not None: raise ndb.Return(cache[0]) _, bucket_cfg = yield config.get_bucket_async(bucket_id) if not bucket_cfg: raise ndb.Return(None) if auth.is_admin(identity): raise ndb.Return(project_config_pb2.Acl.WRITER) # A LUCI service calling us in the context of some project is allowed to # do anything it wants in that project. We trust all LUCI services to do # authorization on their own for this case. A cross-project request must be # explicitly authorized in Buildbucket ACLs though (so we proceed to the # bucket_cfg check below). if identity.is_project: project_id, _ = config.parse_bucket_id(bucket_id) if project_id == identity.name: raise ndb.Return(project_config_pb2.Acl.WRITER) # Roles are just numbers. The higher the number, the more permissions # the identity has. We exploit this here to get the single maximally # permissive role for the current identity. role = None for rule in bucket_cfg.acls: if rule.role <= role: continue if (rule.identity == identity_str or (rule.group and auth.is_group_member(rule.group, identity))): role = rule.role yield ctx.memcache_set(cache_key, (role, ), time=60) raise ndb.Return(role)
def legacy_bucket_name(bucket_id, is_luci): if is_luci: # In V1, LUCI builds use a "long" bucket name, e.g. "luci.chromium.try" # as opposed to just "try". This is because in the past bucket names # were globally unique, as opposed to unique per project. return format_luci_bucket(bucket_id) _, bucket_name = config.parse_bucket_id(bucket_id) return bucket_name
def get_bucket(self, request): """Returns bucket information.""" bucket_id = convert_bucket(request.bucket) # checks access project_id, _ = config.parse_bucket_id(bucket_id) rev, bucket_cfg = config.get_bucket(bucket_id) assert bucket_cfg # access check would have failed. return BucketMessage( name=request.bucket, project_id=project_id, config_file_content=protobuf.text_format.MessageToString(bucket_cfg), config_file_rev=rev, config_file_url=config.get_buildbucket_cfg_url(project_id), )
def set_next_build_number(self, request): """Sets the build number that will be used for the next build.""" bucket_id = api.convert_bucket(request.bucket) if not user.can_set_next_number_async(bucket_id).get_result(): raise endpoints.ForbiddenException('access denied') _, bucket = config.get_bucket(bucket_id) if not any(b.name == request.builder for b in bucket.swarming.builders): raise endpoints.BadRequestException( 'builder "%s" not found in bucket "%s"' % (request.builder, bucket_id)) project, bucket = config.parse_bucket_id(bucket_id) builder_id = build_pb2.BuilderID(project=project, bucket=bucket, builder=request.builder) seq_name = sequence.builder_seq_name(builder_id) try: sequence.set_next(seq_name, request.next_number) except ValueError as ex: raise endpoints.BadRequestException(str(ex)) return message_types.VoidMessage()
def put_request_message_to_build_request(put_request): """Converts PutRequest to BuildRequest. Raises errors.InvalidInputError if the put_request is invalid. """ lease_expiration_date = parse_datetime(put_request.lease_expiration_ts) errors.validate_lease_expiration_date(lease_expiration_date) # Read parameters. parameters = parse_json_object(put_request.parameters_json, 'parameters_json') parameters = parameters or {} validate_known_build_parameters(parameters) builder = parameters.get(model.BUILDER_PARAMETER) or '' # Validate tags. buildtags.validate_tags(put_request.tags, 'new', builder=builder) # Read properties. Remove them from parameters. props = parameters.pop(model.PROPERTIES_PARAMETER, None) if props is not None and not isinstance(props, dict): raise errors.InvalidInputError( '"properties" parameter must be a JSON object or null') props = props or {} changes = parameters.get(_PARAM_CHANGES) if changes: # pragma: no branch # Buildbucket-Buildbot integration passes repo_url of the first change in # build parameter "changes" as "repository" attribute of SourceStamp. # https://chromium.googlesource.com/chromium/tools/build/+/2c6023d # /scripts/master/buildbucket/changestore.py#140 # Buildbot passes repository of the build source stamp as "repository" # build property. Recipes, in partiular bot_update recipe module, rely on # "repository" property and it is an almost sane property to support in # swarmbucket. repo_url = changes[0].get('repo_url') if repo_url: # pragma: no branch props['repository'] = repo_url # Buildbot-Buildbucket integration converts emails in changes to blamelist # property. emails = [c.get('author', {}).get('email') for c in changes] props['blamelist'] = filter(None, emails) # Create a v2 request. sbr = rpc_pb2.ScheduleBuildRequest( builder=build_pb2.BuilderID(builder=builder), properties=bbutil.dict_to_struct(props), request_id=put_request.client_operation_id, experimental=bbutil.BOOLISH_TO_TRINARY[put_request.experimental], canary=api_common.CANARY_PREFERENCE_TO_TRINARY.get( put_request.canary_preference, common_pb2.UNSET), ) sbr.builder.project, sbr.builder.bucket = config.parse_bucket_id( put_request.bucket) # Parse tags. Extract gitiles commit and gerrit changes. tags, gitiles_commit, gerrit_changes = parse_v1_tags(put_request.tags) sbr.tags.extend(tags) if gitiles_commit: sbr.gitiles_commit.CopyFrom(gitiles_commit) # Gerrit changes explicitly passed via "gerrit_changes" parameter win. gerrit_change_list = parameters.pop('gerrit_changes', None) if gerrit_change_list is not None: if not isinstance(gerrit_change_list, list): # pragma: no cover raise errors.InvalidInputError('gerrit_changes must be a list') try: gerrit_changes = [ json_format.ParseDict(c, common_pb2.GerritChange()) for c in gerrit_change_list ] except json_format.ParseError as ex: # pragma: no cover raise errors.InvalidInputError('Invalid gerrit_changes: %s' % ex) sbr.gerrit_changes.extend(gerrit_changes) if (not gerrit_changes and not sbr.builder.bucket.startswith('master.')): # pragma: no cover changes = parameters.get('changes') if isinstance(changes, list) and changes and not gitiles_commit: legacy_revision = changes[0].get('revision') if legacy_revision: raise errors.InvalidInputError( 'legacy revision without gitiles buildset tag') # Populate Gerrit project from patch_project property. # V2 API users will have to provide this. patch_project = props.get('patch_project') if len(sbr.gerrit_changes) == 1 and isinstance(patch_project, basestring): sbr.gerrit_changes[0].project = patch_project # Read PubSub callback. pubsub_callback_auth_token = None if put_request.pubsub_callback: pubsub_callback_auth_token = put_request.pubsub_callback.auth_token pubsub_callback_to_notification_config(put_request.pubsub_callback, sbr.notify) # Validate the resulting v2 request before continuing. with _wrap_validation_error(): validation.validate_schedule_build_request(sbr, legacy=True) return creation.BuildRequest( schedule_build_request=sbr, parameters=parameters, lease_expiration_date=lease_expiration_date, pubsub_callback_auth_token=pubsub_callback_auth_token, override_builder_cfg=_override_builder_cfg_func(parameters), )
def get_project_id(bucket_id): project_id, _ = config.parse_bucket_id(bucket_id) return project_id
def format_luci_bucket(bucket_id): """Returns V1 luci bucket name, e.g. "luci.chromium.try".""" return 'luci.%s.%s' % config.parse_bucket_id(bucket_id)