Exemplo n.º 1
0
 def test_create_build_id_generates_monotonically_decreasing_ids(self):
     now = datetime.datetime(2015, 2, 24)
     ids = []
     for i in xrange(1000):
         now += datetime.timedelta(seconds=i)
         ids.extend(model.create_build_ids(now, 5))
     self.assertEqual(ids, sorted(ids, reverse=True))
Exemplo n.º 2
0
 def put_many_builds(self, count=100, **build_proto_fields):
     builds = []
     build_ids = model.create_build_ids(utils.utcnow(), count)
     for build_id in build_ids:
         builds.append(self.classic_build(id=build_id,
                                          **build_proto_fields))
         self.now += datetime.timedelta(seconds=1)
     ndb.put_multi(builds)
     return builds
Exemplo n.º 3
0
    def put_build(self, **build_proto_fields):
        """Puts a build and updates tag index."""
        build_proto_fields.setdefault('id',
                                      model.create_build_ids(self.now, 1)[0])
        self.now += datetime.timedelta(seconds=1)

        build = test_util.build(**build_proto_fields)
        build.put()

        index_entry = search.TagIndexEntry(
            build_id=build.key.id(),
            bucket_id=build.bucket_id,
        )
        for t in search.indexed_tags(build.tags):
            search.add_to_tag_index_async(t, [index_entry]).get_result()
        return build
Exemplo n.º 4
0
def add_many_async(build_requests):
    """Adds many builds in a batch.

  Does not check permissions.
  Assumes build_requests is valid.

  Returns:
    A list of (new_build, exception) tuples in the same order.
    Exactly one item of a tuple will be non-None.
    The exception can be errors.InvalidInputError.

  Raises:
    Any exception that datastore operations can raise.
  """
    # When changing this code, make corresponding changes to
    # swarmbucket_api.SwarmbucketApi.get_task_def.

    now = utils.utcnow()
    identity = auth.get_current_identity()

    logging.info('%s is creating %d builds', identity.to_bytes(),
                 len(build_requests))

    settings = yield config.get_settings_async()

    # Fetch and index configs.
    bucket_ids = {br.bucket_id for br in build_requests}
    bucket_cfgs = yield config.get_buckets_async(bucket_ids)
    builder_cfgs = {}  # {bucket_id: {builder_name: cfg}}
    for bucket_id, bucket_cfg in bucket_cfgs.iteritems():
        builder_cfgs[bucket_id] = {
            b.name: b
            for b in bucket_cfg.swarming.builders
        }

    # Prepare NewBuild objects.
    new_builds = []
    for r in build_requests:
        builder = r.schedule_build_request.builder.builder
        bucket_builder_cfgs = builder_cfgs[r.bucket_id]
        builder_cfg = bucket_builder_cfgs.get(builder)

        # Apply builder config overrides, if any.
        # Exists for backward compatibility, runs only in V1 code path.
        if builder_cfg and r.override_builder_cfg:  # pragma: no cover
            builder_cfg = copy.deepcopy(builder_cfg)
            r.override_builder_cfg(builder_cfg)

        nb = NewBuild(r, builder_cfg)
        if bucket_builder_cfgs and not builder_cfg:
            nb.exception = errors.BuilderNotFoundError(
                'builder "%s" not found in bucket "%s"' %
                (builder, r.bucket_id))
        new_builds.append(nb)

    # Check memcache.
    yield [nb.check_cache_async() for nb in new_builds if not nb.final]

    # Create and put builds.
    to_create = [nb for nb in new_builds if not nb.final]
    if to_create:
        build_ids = model.create_build_ids(now, len(to_create))
        builds = yield [
            nb.request.create_build_async(build_id, settings, nb.builder_cfg,
                                          identity, now)
            for nb, build_id in zip(to_create, build_ids)
        ]
        for nb, build in zip(to_create, builds):
            nb.build = build

        yield _update_builders_async(to_create, now)
        yield _generate_build_numbers_async(to_create)
        yield search.update_tag_indexes_async([nb.build for nb in to_create])
        yield [nb.put_and_cache_async() for nb in to_create]

    raise ndb.Return([nb.result() for nb in new_builds])
Exemplo n.º 5
0
    def test_filter_by_creation_time_range(self):
        too_old = model.BEGINING_OF_THE_WORLD - datetime.timedelta(
            milliseconds=1)
        old_time = datetime.datetime(2010, 2, 4)
        new_time = datetime.datetime(2012, 2, 4)

        old_build = self.put_build(
            id=model.create_build_ids(old_time, 1)[0],
            create_time=test_util.dt2ts(old_time),
        )
        new_build = self.put_build(
            id=model.create_build_ids(new_time, 1)[0],
            create_time=test_util.dt2ts(new_time),
        )

        # Test lower bound

        builds, _ = self.search(create_time_low=too_old)
        self.assertEqual(builds, [new_build, old_build])

        builds, _ = self.search(create_time_low=old_time)
        self.assertEqual(builds, [new_build, old_build])

        builds, _ = self.search(
            create_time_low=old_time,
            tags=[self.INDEXED_TAG],
        )
        self.assertEqual(builds, [new_build, old_build])

        builds, _ = self.search(create_time_low=new_time)
        self.assertEqual(builds, [new_build])

        builds, _ = self.search(
            create_time_low=new_time,
            tags=[self.INDEXED_TAG],
        )
        self.assertEqual(builds, [new_build])

        # Test upper bound

        builds, _ = self.search(create_time_high=too_old)
        self.assertEqual(builds, [])

        builds, _ = self.search(create_time_high=old_time)
        self.assertEqual(builds, [])

        builds, _ = self.search(
            create_time_high=old_time,
            tags=[self.INDEXED_TAG],
        )
        self.assertEqual(builds, [])

        builds, _ = self.search(create_time_high=new_time)
        self.assertEqual(builds, [old_build])
        builds, _ = self.search(
            create_time_high=new_time,
            tags=[self.INDEXED_TAG],
        )
        self.assertEqual(builds, [old_build])

        builds, _ = self.search(
            create_time_high=(new_time + datetime.timedelta(milliseconds=1)),
            tags=[self.INDEXED_TAG],
        )
        self.assertEqual(builds, [new_build, old_build])

        # Test both sides bounded

        builds, _ = self.search(
            create_time_low=new_time,
            create_time_high=old_time,
        )
        self.assertEqual(builds, [])

        builds, _ = self.search(
            create_time_low=old_time,
            create_time_high=new_time,
        )
        self.assertEqual(builds, [old_build])

        builds, _ = self.search(
            create_time_low=old_time,
            create_time_high=new_time,
            tags=[self.INDEXED_TAG],
        )
        self.assertEqual(builds, [old_build])

        # Test reversed bounds

        builds, _ = self.search(
            create_time_low=new_time,
            create_time_high=old_time,
            tags=[self.INDEXED_TAG],
        )
        self.assertEqual(builds, [])
Exemplo n.º 6
0
def build_bundle(for_creation=False, **build_proto_fields):  # pragma: no cover
    """Creates a model.BuildBundle from proto fields, with reasonable defaults.

  If for_creation is True, returned Build.proto.{infra, input.properties} will
  be set.
  """
    now = utils.utcnow()

    # Compute defaults.
    proto = copy.deepcopy(BUILD_DEFAULTS)
    if not proto.HasField('create_time'):
        proto.create_time.FromDatetime(now)
    proto.MergeFrom(build_pb2.Build(**build_proto_fields))
    proto.id = proto.id or model.create_build_ids(
        proto.create_time.ToDatetime(), 1, randomness=False)[0]

    with_start_time = (common_pb2.STARTED, common_pb2.SUCCESS)
    if not proto.HasField('start_time') and proto.status in with_start_time:
        proto.start_time.FromDatetime(now)
    completed = proto.status not in (common_pb2.SCHEDULED, common_pb2.STARTED)
    if not proto.HasField('end_time') and completed:
        proto.end_time.FromDatetime(now)
    proto.update_time.FromDatetime(now)

    if (proto.input.properties
            and not proto.infra.buildbucket.HasField('requested_properties')):
        proto.infra.buildbucket.requested_properties.CopyFrom(
            proto.input.properties)

    tags = {buildtags.unparse(t.key, t.value) for t in proto.tags}
    tags.add('builder:%s' % proto.builder.builder)
    if proto.number:
        tags.add(buildtags.build_address_tag(proto.builder, proto.number))
    proto.ClearField('tags')

    b = model.Build(
        id=proto.id,
        proto=proto,
        created_by=auth.Identity.from_bytes(proto.created_by),
        create_time=proto.create_time.ToDatetime(),
        status_changed_time=now,
        tags=sorted(tags),
        parameters={},
        url='https://ci.example.com/%d' % proto.id,
        is_luci=True,
        swarming_task_key='swarming_task_key',
    )
    b.update_v1_status_fields()
    if proto.input.HasField('gitiles_commit'):
        b.parameters['changes'] = [{
            'author': {
                'email': '*****@*****.**'
            },
            'repo_url':
            'https://chromium.googlesource.com/chromium/src',
        }]

    ret = model.BuildBundle(
        b,
        infra=model.BuildInfra(key=model.BuildInfra.key_for(b.key),
                               infra=proto.infra.SerializeToString()),
        input_properties=model.BuildInputProperties(
            key=model.BuildInputProperties.key_for(b.key),
            properties=proto.input.properties.SerializeToString(),
        ),
        output_properties=model.BuildOutputProperties(
            key=model.BuildOutputProperties.key_for(b.key),
            properties=proto.output.properties.SerializeToString(),
        ),
        steps=model.BuildSteps(
            key=model.BuildSteps.key_for(b.key),
            step_container_bytes=(build_pb2.Build(
                steps=proto.steps).SerializeToString()),
        ),
    )

    if not for_creation:
        proto.ClearField('infra')
        proto.input.ClearField('properties')
    proto.output.ClearField('properties')
    proto.ClearField('steps')
    return ret