def test_failed(self, on_build_completed, on_build_completing_async): steps = model.BuildSteps.make( build_pb2.Build( id=123, steps=[dict(name='step', status=common_pb2.SCHEDULED)], )) steps.put() on_build_completing_async.return_value = future(None) build = test_util.build(id=123) build.put() req, ctx = self._mk_update_req( build_pb2.Build( id=123, status=common_pb2.FAILURE, summary_markdown='bad', ), paths=['build.status', 'build.summary_markdown'], ) self.call(self.api.UpdateBuild, req, ctx=ctx) build = build.key.get() self.assertEqual(build.proto.status, common_pb2.FAILURE) self.assertEqual(build.proto.summary_markdown, 'bad') self.assertEqual(build.proto.end_time.ToDatetime(), self.now) on_build_completing_async.assert_called_once_with(build) on_build_completed.assert_called_once_with(build) steps = steps.key.get() step_container = build_pb2.Build() steps.read_steps(step_container) self.assertEqual(step_container.steps[0].status, common_pb2.CANCELED)
def test_cancel_incomplete(self): steps = model.BuildSteps.make( build_pb2.Build( id=123, steps=[ dict( name='a', status=common_pb2.SUCCESS, ), dict( name='b', status=common_pb2.STARTED, summary_markdown='running', start_time=dict(seconds=123), ), ], )) steps.put() end_ts = timestamp_pb2.Timestamp(seconds=12345) self.cancel_incomplete_steps(123, end_ts) steps = steps.key.get() step_container = build_pb2.Build() steps.read_steps(step_container) self.assertEqual(step_container.steps[0].status, common_pb2.SUCCESS) self.assertEqual(step_container.steps[1].status, common_pb2.CANCELED) self.assertEqual(step_container.steps[1].end_time, end_ts) self.assertEqual( step_container.steps[1].summary_markdown, 'running\nstep was canceled because it did not end before build ended' )
def test_large(self): container = build_pb2.Build(steps=[dict(name='x' * 1000)]) entity = model.BuildSteps() entity.write_steps(container) self.assertTrue(entity.step_container_bytes_zipped) entity.put() entity = entity.key.get() actual = build_pb2.Build() entity.read_steps(actual) self.assertEqual(actual, container)
def test_canceled(self): self.compare( model.Build(proto=build_pb2.Build(status=common_pb2.CANCELED), status_legacy=model.BuildStatus.COMPLETED, result=model.BuildResult.CANCELED, cancelation_reason=model.CancelationReason. CANCELED_EXPLICITLY), )
def cancel_incomplete_steps_async(cls, build_id, end_ts): """Marks incomplete steps as canceled in the Datastore, if any.""" assert end_ts.seconds assert ndb.in_transaction() entity = yield cls.key_for(ndb.Key(Build, build_id)).get_async() if not entity: return container = build_pb2.Build() entity.read_steps(container) changed = False for s in container.steps: if not is_terminal_status(s.status): s.status = common_pb2.CANCELED s.end_time.CopyFrom(end_ts) if s.summary_markdown: # pragma: no branch s.summary_markdown += '\n' s.summary_markdown += ( 'step was canceled because it did not end before build ended' ) changed = True if changed: # pragma: no branch entity.write_steps(container) yield entity.put_async()
def test_update_steps(self): build = test_util.build(id=123, status=common_pb2.STARTED) build.put() build_proto = build_pb2.Build(id=123) with open(os.path.join(THIS_DIR, 'steps.pb.txt')) as f: text = protoutil.parse_multiline(f.read()) text_format.Merge(text, build_proto) req, ctx = self._mk_update_req(build_proto, paths=['build.steps']) self.call(self.api.UpdateBuild, req, ctx=ctx) persisted = model.BuildSteps.key_for(build.key).get() persisted_container = build_pb2.Build() persisted.read_steps(persisted_container) self.assertEqual(persisted_container.steps, build_proto.steps)
def test_timeout(self): self.compare( model.Build(proto=build_pb2.Build( status=common_pb2.INFRA_FAILURE, status_details=dict(timeout=dict())), status_legacy=model.BuildStatus.COMPLETED, result=model.BuildResult.CANCELED, cancelation_reason=model.CancelationReason.TIMEOUT), )
def _export_builds(dataset, table_name, builds, deadline): """Saves builds to BigQuery. Logs insert errors and returns a list of ids of builds that could not be inserted. """ # BigQuery API doc: # https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll logging.info('sending %d rows', len(builds)) pairs = [(b, build_pb2.Build()) for b in builds] model.builds_to_protos_async( pairs, load_tags=True, load_input_properties=True, load_output_properties=True, load_steps=True, load_infra=True, ).get_result() # Clear fields that we don't want in BigQuery. for _, proto in pairs: proto.infra.buildbucket.hostname = '' for s in proto.steps: s.summary_markdown = '' s.ClearField('logs') res = net.json_request( url=(('https://www.googleapis.com/bigquery/v2/' 'projects/%s/datasets/%s/tables/%s/insertAll') % (app_identity.get_application_id(), dataset, table_name)), method='POST', payload={ 'kind': 'bigquery#tableDataInsertAllRequest', # Do not fail entire request because of one bad build. # We handle invalid rows below. 'skipInvalidRows': True, 'ignoreUnknownValues': False, 'rows': [{ 'insertId': str(p.id), 'json': bqh.message_to_dict(p), } for _, p in pairs], }, scopes=bqh.INSERT_ROWS_SCOPE, # deadline parameter here is duration in seconds. deadline=(deadline - utils.utcnow()).total_seconds(), ) failed_ids = [] for err in res.get('insertErrors', []): _, bp = pairs[err['index']] failed_ids.append(bp.id) logging.error('failed to insert row for build %d: %r', bp.id, err['errors']) return failed_ids
def create_build_proto_async(self, build_id, settings, builder_cfg, created_by, now): """Converts the request to a build_pb2.Build. Assumes self is valid. """ sbr = self.schedule_build_request bp = build_pb2.Build() _apply_global_settings(settings, bp) if builder_cfg: # pragma: no branch yield _apply_builder_config_async(builder_cfg, bp) bp.id = build_id bp.builder.CopyFrom(sbr.builder) bp.status = common_pb2.SCHEDULED bp.created_by = created_by.to_bytes() bp.create_time.FromDatetime(now) bp.critical = sbr.critical bp.exe.cipd_version = sbr.exe.cipd_version or bp.exe.cipd_version # If the SBR expressed canary preference, override what the config said. if sbr.canary != common_pb2.UNSET: bp.canary = sbr.canary == common_pb2.YES # Populate input. # Override properties from the config with values in the request. bbutil.update_struct(bp.input.properties, sbr.properties) if sbr.HasField('gitiles_commit'): bp.input.gitiles_commit.CopyFrom(sbr.gitiles_commit) bp.input.gerrit_changes.extend(sbr.gerrit_changes) # Populate infra fields. bp.infra.buildbucket.requested_properties.CopyFrom(sbr.properties) bp.infra.buildbucket.requested_dimensions.extend(sbr.dimensions) if sbr.experimental != common_pb2.UNSET: bp.input.experimental = sbr.experimental == common_pb2.YES bp.infra.logdog.project = bp.builder.project bp.infra.logdog.prefix = 'buildbucket/%s/%s' % ( app_identity.get_default_version_hostname(), bp.id) # Populate swarming-specific fields. sw = bp.infra.swarming configured_task_dims = list(sw.task_dimensions) sw.ClearField('task_dimensions') sw.task_dimensions.extend( _apply_dimension_overrides(configured_task_dims, sbr.dimensions)) if sbr.priority: sw.priority = sbr.priority elif bp.input.experimental: sw.priority = min(255, sw.priority * 2) self._ensure_builder_cache(bp) raise ndb.Return(bp)
def test_update_steps_of_scheduled_build(self): test_util.build(id=123, status=common_pb2.SCHEDULED).put() build_proto = build_pb2.Build(id=123) req, ctx = self._mk_update_req(build_proto, paths=['build.steps']) self.call( self.api.UpdateBuild, req, ctx=ctx, expected_code=prpc.StatusCode.INVALID_ARGUMENT, )
def test_steps_too_big(self): msg = rpc_pb2.UpdateBuildRequest( build=build_pb2.Build(id=1, steps=[ step_pb2.Step( name='x' * 1000, status=common_pb2.SCHEDULED), ]), update_mask=field_mask_pb2.FieldMask(paths=['build.steps']), ) self.assert_invalid(msg, r'build\.steps: too big to accept')
def test_steps(self): build = test_util.build(id=1) steps = [ step_pb2.Step(name='a', status=common_pb2.SUCCESS), step_pb2.Step(name='b', status=common_pb2.STARTED), ] build_steps = model.BuildSteps.make(build_pb2.Build(id=1, steps=steps)) build_steps.put() actual = self.to_proto(build, load_steps=True) self.assertEqual(list(actual.steps), steps)
def test_ended_build(self): test_util.build(id=123, status=common_pb2.SUCCESS).put() req, ctx = self._mk_update_req(build_pb2.Build(id=123)) self.call( self.api.UpdateBuild, req, ctx=ctx, expected_code=prpc.StatusCode.FAILED_PRECONDITION, expected_details='Cannot update an ended build', )
def test_empty_property_value(self): msg = rpc_pb2.UpdateBuildRequest( build=build_pb2.Build( id=1, output=dict(properties=dict(fields=dict(a=struct_pb2.Value()))), ), update_mask=field_mask_pb2.FieldMask(paths=['build.output.properties']), ) self.assert_invalid( msg, r'build\.output\.properties\.a: value is not set; ' 'for null, initialize null_value' )
def write_steps(self, build_proto): """Serializes build_proto.steps into self.""" container = build_pb2.Build(steps=build_proto.steps) container_bytes = container.SerializeToString() # Compress only if necessary. zipped = len(container_bytes) > self.MAX_STEPS_LEN if zipped: container_bytes = zlib.compress(container_bytes) self.step_container_bytes = container_bytes self.step_container_bytes_zipped = zipped
def error_handling_test(self, ex, expected_code, expected_details): @api.rpc_impl_async('GetBuild') @ndb.tasklet def get_build_async(_req, _res, _ctx, _mask): raise ex ctx = prpc_context.ServicerContext() req = rpc_pb2.GetBuildRequest(id=1) res = build_pb2.Build() # pylint: disable=no-value-for-parameter get_build_async(req, res, ctx).get_result() self.assertEqual(ctx.code, expected_code) self.assertEqual(ctx.details, expected_details)
def test_invalid_id(self): req, ctx = self._mk_update_req( build_pb2.Build( id=123, status=common_pb2.STARTED, )) self.call( self.api.UpdateBuild, req, ctx=ctx, expected_code=prpc.StatusCode.NOT_FOUND, expected_details='Cannot update nonexisting build with id 123', )
def test_invalid_build_proto(self, mock_validation): mock_validation.side_effect = validation.Error('invalid build proto') build = build_pb2.Build(id=123) req, ctx = self._mk_update_req(build) self.call( self.api.UpdateBuild, req, ctx=ctx, expected_code=prpc.StatusCode.INVALID_ARGUMENT, expected_details='invalid build proto', )
def test_missing_token(self): test_util.build(id=123).put() build = build_pb2.Build( id=123, status=common_pb2.STARTED, ) req, ctx = self._mk_update_req(build, token=None) self.call( self.api.UpdateBuild, req, ctx=ctx, expected_code=prpc.StatusCode.UNAUTHENTICATED, expected_details='missing token in build update request', )
def test_empty_summary(self): build = test_util.build(id=123, status=common_pb2.STARTED, summary_markdown='ok') build.put() req, ctx = self._mk_update_req( # No summary in the build. build_pb2.Build(id=123), paths=['build.summary_markdown'], ) self.call(self.api.UpdateBuild, req, ctx=ctx) build = build.key.get() self.assertEqual(build.proto.summary_markdown, '')
def test_update_properties(self): build = test_util.build(id=123, status=common_pb2.STARTED) build.put() expected_props = {'a': 1} build_proto = build_pb2.Build(id=123) build_proto.output.properties.update(expected_props) req, ctx = self._mk_update_req(build_proto, paths=['build.output.properties']) self.call(self.api.UpdateBuild, req, ctx=ctx) out_props = model.BuildOutputProperties.key_for(build.key).get() self.assertEqual(test_util.msg_to_dict(out_props.parse()), expected_props)
def test_started(self, on_build_started, on_build_starting_async): on_build_starting_async.return_value = future(None) build = test_util.build(id=123) build.put() req, ctx = self._mk_update_req( build_pb2.Build(id=123, status=common_pb2.STARTED), paths=['build.status'], ) self.call(self.api.UpdateBuild, req, ctx=ctx) build = build.key.get() self.assertEqual(build.proto.status, common_pb2.STARTED) self.assertEqual(build.proto.start_time.ToDatetime(), self.now) on_build_starting_async.assert_called_once_with(build) on_build_started.assert_called_once_with(build)
def test_invalid_token(self): test_util.build(id=123).put() self.validate_build_token.side_effect = auth.InvalidTokenError build = build_pb2.Build( id=123, status=common_pb2.STARTED, ) req, ctx = self._mk_update_req(build) self.call( self.api.UpdateBuild, req, ctx=ctx, expected_code=prpc.StatusCode.UNAUTHENTICATED, )
def to_proto(self, build, load_tags=False, load_input_properties=False, load_output_properties=False, load_steps=False, load_infra=False): proto = build_pb2.Build() model.builds_to_protos_async( [(build, proto)], load_tags=load_tags, load_input_properties=load_input_properties, load_output_properties=load_output_properties, load_steps=load_steps, load_infra=load_infra, ).get_result() return proto
def test_invalid_user(self): test_util.build(id=123).put() self.can_update_build_async.return_value = future(False) build = build_pb2.Build( id=123, status=common_pb2.STARTED, ) req, ctx = self._mk_update_req(build) self.call( self.api.UpdateBuild, req, ctx=ctx, expected_code=prpc.StatusCode.PERMISSION_DENIED, expected_details= 'anonymous:anonymous not permitted to update build', )
def test_valid(self): # Comprehensive validity test. Some specific cases are covered later. build = build_pb2.Build() with open(os.path.join(THIS_DIR, 'steps.pb.txt')) as f: text_format.Merge(f.read(), build) msg = self._mk_req( ['build.status', 'build.steps', 'build.output.gitiles_commit'], status=common_pb2.SUCCESS, steps=build.steps, output=dict(gitiles_commit=dict( host='gerrit.example.com', project='project', ref='refs/heads/master', id='a' * 40, position=1, ), ), ) self.assert_valid(msg)
def CancelBuild(self, req, ctx): res = build_pb2.Build() cancel_build_async(req, res, ctx).get_result() return self._res_if_ok(res, ctx)
def ScheduleBuild(self, req, ctx): res = build_pb2.Build() schedule_build_async(req, res, ctx).get_result() return self._res_if_ok(res, ctx)
def UpdateBuild(self, req, ctx): res = build_pb2.Build() update_build_async(req, res, ctx).get_result() return self._res_if_ok(res, ctx)
def GetBuild(self, req, ctx): res = build_pb2.Build() get_build_async(req, res, ctx).get_result() return self._res_if_ok(res, ctx)