def test_delete_many_scheduled_builds(self): scheduled = test_util.build_bundle(id=1, status=common_pb2.SCHEDULED) completed = test_util.build_bundle(id=2, status=common_pb2.SUCCESS) scheduled.put() completed.put() self.assertIsNotNone(scheduled.build.key.get()) self.assertIsNotNone(completed.build.key.get()) service._task_delete_many_builds(scheduled.build.bucket_id, model.BuildStatus.SCHEDULED) self.assertIsNone(scheduled.build.key.get()) self.assertIsNotNone(completed.build.key.get())
def test_delete_many_builds_created_by(self): bundle1 = test_util.build_bundle(id=1, created_by='user:[email protected]') bundle2 = test_util.build_bundle(id=2, created_by='user:[email protected]') bundle1.put() bundle2.put() service._task_delete_many_builds( bundle1.build.bucket_id, model.BuildStatus.SCHEDULED, created_by=bundle2.build.created_by, ) self.assertIsNone(bundle2.build.key.get()) self.assertIsNotNone(bundle1.build.key.get())
def test_out_props(self): props = bbutil.dict_to_struct({'a': 'b'}) bundle = test_util.build_bundle(output=dict(properties=props)) bundle.put() actual = self.to_proto(bundle.build, load_output_properties=True) self.assertEqual(actual.output.properties, props)
def test_cron_export_builds_to_bq_insert_errors(self, delete_tasks): bundles = [ test_util.build_bundle(id=i + 1, status=common_pb2.SUCCESS) for i in xrange(3) ] for b in bundles: b.put() builds = [b.build for b in bundles] tasks = [ taskqueue.Task(method='PULL', payload=json.dumps({'id': b.key.id()})) for b in builds ] self.queue.add(tasks) net.json_request.return_value = { 'insertErrors': [{ 'index': 1, 'errors': [{ 'reason': 'bad', 'message': ':(' }], }] } bq._process_pull_task_batch(self.queue.name, 'raw', 'completed_builds') self.assertTrue(net.json_request.called) # assert second task is not deleted deleted = delete_tasks.call_args[0][1] self.assertEqual( [t.payload for t in deleted], [tasks[0].payload, tasks[2].payload], )
def setUp(self): super(SyncBuildTest, self).setUp() self.patch('components.net.json_request_async', autospec=True) self.patch('components.auth.delegate_async', return_value=future('blah')) self.build_token = 'beeff00d' self.patch( 'tokens.generate_build_token', autospec=True, return_value='deadbeef' ) self.task_def = {'is_task_def': True, 'task_slices': [{ 'properties': {}, }]} self.patch( 'swarming.compute_task_def', autospec=True, return_value=self.task_def ) self.patch( 'google.appengine.api.app_identity.get_default_version_hostname', return_value='cr-buildbucket.appspot.com' ) self.build_bundle = test_util.build_bundle( id=1, created_by='user:[email protected]' ) self.build_bundle.build.swarming_task_key = None with self.build_bundle.infra.mutate() as infra: infra.swarming.task_id = '' self.build_bundle.put()
def test_infra(self): bundle = test_util.build_bundle(infra=dict(swarming=dict( hostname='swarming.example.com'))) bundle.put() actual = self.to_proto(bundle.build, load_infra=True) self.assertEqual(actual.infra.swarming.hostname, 'swarming.example.com')
def setUp(self): super(V1ApiTest, self).setUp() gae_ts_mon.reset_for_unittest(disable=True) auth.disable_process_cache() user.clear_request_cache() self.patch( 'components.utils.utcnow', return_value=datetime.datetime(2017, 1, 1) ) self.future_date = utils.utcnow() + datetime.timedelta(days=1) # future_ts is str because INT64 values are formatted as strings. self.future_ts = str(utils.datetime_to_timestamp(self.future_date)) config.put_bucket( 'chromium', 'a' * 40, test_util.parse_bucket_cfg( ''' name: "luci.chromium.try" acls { role: SCHEDULER identity: "anonymous:anonymous" } ''' ), ) self.build_infra = test_util.build_bundle(id=1).infra self.build_infra.put()
def test_expired_build_to_message(self): yesterday = utils.utcnow() - datetime.timedelta(days=1) yesterday_timestamp = utils.datetime_to_timestamp(yesterday) bundle = test_util.build_bundle() bundle.build.lease_key = 1 bundle.build.lease_expiration_date = yesterday msg = api_common.build_to_message(bundle) self.assertEqual(msg.lease_expiration_ts, yesterday_timestamp)
def test_trimming_include(self, get_async): bundle = test_util.build_bundle(input=dict( properties=bbutil.dict_to_struct({'a': 'b'}))) bundle.put() get_async.return_value = future(bundle.build) req = rpc_pb2.GetBuildRequest(id=1, fields=dict(paths=['input.properties'])) res = self.call(self.api.GetBuild, req) self.assertEqual(res.input.properties.items(), [('a', 'b')])
def test_cancel(self, cancel_task_async): bundle = test_util.build_bundle(id=1) bundle.put() build = service.cancel_async(1, summary_markdown='nope').get_result() self.assertEqual(build.proto.status, common_pb2.CANCELED) self.assertEqual(build.proto.end_time.ToDatetime(), utils.utcnow()) self.assertEqual(build.proto.summary_markdown, 'nope') self.assertEqual(build.proto.canceled_by, self.current_identity.to_bytes()) cancel_task_async.assert_called_with('swarming.example.com', 'deadbeef') self.assertEqual(build.status_changed_time, utils.utcnow())
def test_delete_many_builds_with_tags(self): bundle = test_util.build_bundle(tags=[dict(key='tag', value='1')]) bundle.put() build = bundle.build service._task_delete_many_builds(build.bucket_id, model.BuildStatus.SCHEDULED, tags=['tag:0']) self.assertIsNotNone(build.key.get()) service._task_delete_many_builds(build.bucket_id, model.BuildStatus.SCHEDULED, tags=['tag:1']) self.assertIsNone(build.key.get())
def test_sync_with_task_result(self, case): logging.info('test case: %s', case) bundle = test_util.build_bundle(id=1) bundle.put() self.patch( 'swarming._load_task_result', autospec=True, return_value=case['task_result'], ) swarming._sync_build_and_swarming(1, 1) build = bundle.build.key.get() build_infra = bundle.infra.key.get() bp = build.proto self.assertEqual(bp.status, case['status']) self.assertEqual( bp.status_details.HasField('timeout'), case.get('is_timeout', False), ) self.assertEqual( bp.status_details.HasField('resource_exhaustion'), case.get('is_resource_exhaustion', False) ) self.assertEqual(bp.start_time, case.get('start_time', tspb(0))) self.assertEqual(bp.end_time, case.get('end_time', tspb(0))) self.assertEqual( list(build_infra.parse().swarming.bot_dimensions), case.get('bot_dimensions', []) ) expected_continuation_payload = { 'id': 1, 'generation': 2, } expected_continuation = { 'name': 'sync-task-1-2', 'url': '/internal/task/swarming/sync-build/1', 'payload': json.dumps(expected_continuation_payload, sort_keys=True), 'retry_options': { 'task_age_limit': model.BUILD_TIMEOUT.total_seconds() }, 'countdown': 60, } tq.enqueue_async.assert_called_with( swarming.SYNC_QUEUE_NAME, [expected_continuation], transactional=False )
def test_no_pubsub_callback(self): bundle = test_util.build_bundle(id=1) @ndb.transactional def txn(): bundle.put() notifications.enqueue_notifications_async( bundle.build).get_result() txn() global_task_payload = { 'id': 1, 'mode': 'global', } tq.enqueue_async.assert_called_with('backend-default', [ { 'url': '/internal/task/buildbucket/notify/1', 'payload': global_task_payload, 'retry_options': { 'task_age_limit': model.BUILD_TIMEOUT.total_seconds(), }, }, ]) self.app.post_json( '/internal/task/buildbucket/notify/1', params=global_task_payload, headers={'X-AppEngine-QueueName': 'backend-default'}) pubsub.publish.assert_called_with( 'projects/testbed-test/topics/builds', json.dumps( { 'build': api_common.build_to_dict(bundle), 'hostname': 'buildbucket.example.com', }, sort_keys=True), {'build_id': '1'}, )
def test_succeed_with_result_details(self, succeed): props = {'p': '0'} bundle = test_util.build_bundle( id=1, tags=[dict(key='t', value='0')], output=dict(properties=bbutil.dict_to_struct(props)), ) succeed.return_value = bundle.build bundle.output_properties.put() result_details = {'properties': props} req = { 'id': '1', 'lease_key': 42, 'result_details_json': json.dumps(result_details), } res = self.call_api('succeed', req).json_body _, kwargs = service.succeed.call_args self.assertEqual(kwargs['result_details'], result_details) self.assertEqual( res['build']['result_details_json'], req['result_details_json'] ) self.assertIn('t:0', res['build']['tags'])
def test_pubsub_callback(self): bundle = test_util.build_bundle( id=1, output=dict(properties=bbutil.dict_to_struct({'a': 'b'}), ), ) build = bundle.build build.pubsub_callback = model.PubSubCallback( topic='projects/example/topics/buildbucket', user_data='hello', auth_token='secret', ) @ndb.transactional def txn(): bundle.put() notifications.enqueue_notifications_async(build).get_result() txn() build = build.key.get() global_task_payload = { 'id': 1, 'mode': 'global', } callback_task_payload = { 'id': 1, 'mode': 'callback', } tq.enqueue_async.assert_called_with('backend-default', [ { 'url': '/internal/task/buildbucket/notify/1', 'payload': global_task_payload, 'retry_options': { 'task_age_limit': model.BUILD_TIMEOUT.total_seconds(), }, }, { 'url': '/internal/task/buildbucket/notify/1', 'payload': callback_task_payload, 'retry_options': { 'task_age_limit': model.BUILD_TIMEOUT.total_seconds(), }, }, ]) self.app.post_json( '/internal/task/buildbucket/notify/1', params=global_task_payload, headers={'X-AppEngine-QueueName': 'backend-default'}) pubsub.publish.assert_called_with( 'projects/testbed-test/topics/builds', json.dumps( { 'build': api_common.build_to_dict(bundle), 'hostname': 'buildbucket.example.com', }, sort_keys=True), {'build_id': '1'}, ) self.app.post_json( '/internal/task/buildbucket/notify/1', params=callback_task_payload, headers={'X-AppEngine-QueueName': 'backend-default'}) pubsub.publish.assert_called_with( 'projects/example/topics/buildbucket', json.dumps( { 'build': api_common.build_to_dict(bundle), 'hostname': 'buildbucket.example.com', 'user_data': 'hello', }, sort_keys=True), { 'build_id': '1', 'auth_token': 'secret', }, )
def test_put_batch(self, add_many_async): bundle1 = test_util.build_bundle(id=1, tags=[dict(key='a', value='b')]) bundle2 = test_util.build_bundle(id=2) bundle1.infra.put() bundle2.infra.put() config.put_bucket( 'chromium', 'a' * 40, test_util.parse_bucket_cfg( ''' name: "luci.chromium.try" acls { role: SCHEDULER identity: "anonymous:anonymous" } ''' ), ) add_many_async.return_value = future([ (bundle1.build, None), (bundle2.build, None), (None, errors.InvalidInputError('bad')), ]) req = { 'builds': [ { 'bucket': 'luci.chromium.try', 'tags': ['a:b'], 'client_operation_id': '0', }, { 'bucket': 'luci.chromium.try', 'client_operation_id': '1', }, { 'bucket': 'luci.chromium.try', 'tags': ['bad tag'], 'client_operation_id': '2', }, { 'bucket': 'luci.chromium.try', 'client_operation_id': '3', }, ], } resp = self.call_api('put_batch', req).json_body add_many_async.assert_called_once_with([ creation.BuildRequest( schedule_build_request=rpc_pb2.ScheduleBuildRequest( builder=dict(project='chromium', bucket='try'), tags=[dict(key='a', value='b')], request_id='0', properties=dict(), ), parameters={}, ), creation.BuildRequest( schedule_build_request=rpc_pb2.ScheduleBuildRequest( builder=dict(project='chromium', bucket='try'), request_id='1', properties=dict(), ), parameters={}, ), creation.BuildRequest( schedule_build_request=rpc_pb2.ScheduleBuildRequest( builder=dict(project='chromium', bucket='try'), request_id='3', properties=dict(), ), parameters={}, ), ]) res0 = resp['results'][0] self.assertEqual(res0['client_operation_id'], '0') self.assertEqual(res0['build']['id'], '1') self.assertEqual(res0['build']['bucket'], 'luci.chromium.try') res1 = resp['results'][1] self.assertEqual(res1['client_operation_id'], '1') self.assertEqual(res1['build']['id'], '2') self.assertEqual(res1['build']['bucket'], 'luci.chromium.try') res2 = resp['results'][2] self.assertEqual( res2, { 'client_operation_id': '2', 'error': { 'reason': 'INVALID_INPUT', 'message': u'Invalid tag "bad tag": does not contain ":"', }, } ) res3 = resp['results'][3] self.assertEqual( res3, { 'client_operation_id': '3', 'error': { 'reason': 'INVALID_INPUT', 'message': 'bad', }, } )
def test_retry(self, add_async): props = bbutil.dict_to_struct({ 'foo': 'bar', 'recipe': 'recipe', }) orig_bundle = test_util.build_bundle( id=1, input=dict( properties=props, gitiles_commit=dict( host='gitiles.example.com', project='chromium/src', id='a' * 40, ), ), ) orig_build = orig_bundle.build orig_build.parameters.pop('changes') orig_build.tags = ['a:b'] ndb.put_multi([orig_build, orig_bundle.input_properties]) retried_build_bundle = test_util.build_bundle( id=2, input=dict( properties=orig_build.proto.input.properties, gitiles_commit=orig_build.proto.input.gitiles_commit, ), ) retried_build_bundle.infra.put() retried_build = retried_build_bundle.build retried_build.retry_of = 1 add_async.return_value = future(retried_build) req = { 'id': '1', 'client_operation_id': '42', 'pubsub_callback': { 'topic': 'projects/foo/topic/bar', 'user_data': 'hello', 'auth_token': 'secret', }, } resp = self.call_api('retry', req).json_body add_async.assert_called_once_with( creation.BuildRequest( schedule_build_request=rpc_pb2.ScheduleBuildRequest( builder=orig_build.proto.builder, request_id='42', notify=dict( pubsub_topic='projects/foo/topic/bar', user_data='hello', ), properties=props, tags=[dict(key='a', value='b')], canary=common_pb2.NO, gitiles_commit=orig_build.proto.input.gitiles_commit, ), parameters={}, lease_expiration_date=None, retry_of=1, pubsub_callback_auth_token='secret', ) ) self.assertEqual(resp['build']['id'], '2') self.assertEqual(resp['build']['bucket'], 'luci.chromium.try') self.assertEqual(resp['build']['retry_of'], '1')
def test_load_bundle_with_build_id(self): bundle = test_util.build_bundle(id=1) bundle.put() actual = model.BuildBundle.get(1, infra=True) self.assertEqual(actual.build.key.id(), 1) self.assertEqual(actual.infra, bundle.infra)
def setUp(self): super(SubNotifyTest, self).setUp() self.handler = swarming.SubNotify(response=webapp2.Response()) self.build_bundle = test_util.build_bundle(id=1)
def test_cron_export_builds_to_bq(self): bundles = [ test_util.build_bundle( id=1, status=common_pb2.SUCCESS, infra=dict(swarming=dict( task_dimensions=[ dict(key='a', value='1', expiration=dict(seconds=1)), ], caches=[ dict( path='a', name='1', wait_for_warm_cache=dict(seconds=1), ), ], ), ), ), test_util.build_bundle(id=2, status=common_pb2.FAILURE), test_util.build_bundle(id=3, status=common_pb2.SCHEDULED), test_util.build_bundle(id=4, status=common_pb2.STARTED), ] for b in bundles: b.put() builds = [b.build for b in bundles] build_steps = model.BuildSteps( key=model.BuildSteps.key_for(builds[0].key)) build_steps.write_steps( build_pb2.Build(steps=[ dict( name='bot_update', status=common_pb2.SUCCESS, summary_markdown='summary_markdown', logs=[dict(name='stdout')], ), ], )) build_steps.put() self.queue.add([ taskqueue.Task(method='PULL', payload=json.dumps({'id': b.key.id()})) for b in builds ]) bq._process_pull_task_batch(self.queue.name, 'raw', 'completed_builds') net.json_request.assert_called_once_with( url=('https://www.googleapis.com/bigquery/v2/' 'projects/testbed-test/datasets/raw/tables/' 'completed_builds/insertAll'), method='POST', payload={ 'kind': 'bigquery#tableDataInsertAllRequest', 'skipInvalidRows': True, 'ignoreUnknownValues': False, 'rows': [ { 'insertId': '1', 'json': mock.ANY }, { 'insertId': '2', 'json': mock.ANY }, ], }, scopes=bqh.INSERT_ROWS_SCOPE, deadline=5 * 60, ) actual_payload = net.json_request.call_args[1]['payload'] self.assertEqual( [r['json']['id'] for r in actual_payload['rows']], [1, 2], ) step = actual_payload['rows'][0]['json']['steps'][0] self.assertEqual(step['name'], 'bot_update') self.assertEqual(step['summary_markdown'], '') self.assertNotIn('logs', step)
def test_build_to_dict_non_luci(self): bundle = test_util.build_bundle(builder=dict(bucket='master.chromium')) bundle.build.is_luci = False actual = api_common.build_to_dict(bundle) self.assertEqual(actual['project'], 'chromium') self.assertEqual(actual['bucket'], 'master.chromium')
def setUp(self): super(BuildBucketServiceTest, self).setUp() user.clear_request_cache() self.current_identity = auth.Identity('service', 'unittest') self.patch('components.auth.get_current_identity', side_effect=lambda: self.current_identity) self.patch('user.can_async', return_value=future(True)) self.now = datetime.datetime(2015, 1, 1) self.patch('components.utils.utcnow', side_effect=lambda: self.now) config.put_bucket( 'chromium', 'a' * 40, test_util.parse_bucket_cfg(''' name: "try" acls { role: READER identity: "anonymous:anonymous" } '''), ) config.put_bucket( 'chromium', 'a' * 40, test_util.parse_bucket_cfg(''' name: "luci" acls { role: READER identity: "anonymous:anonymous" } swarming { builders { name: "linux" swarming_host: "chromium-swarm.appspot.com" build_numbers: YES recipe { cipd_package: "infra/recipe_bundle" cipd_version: "refs/heads/master" name: "recipe" } } } '''), ) self.patch('swarming.cancel_task_async', return_value=future(None)) self.patch( 'google.appengine.api.app_identity.get_default_version_hostname', autospec=True, return_value='buildbucket.example.com') self.patch('tq.enqueue_async', autospec=True, return_value=future(None)) self.patch('config.get_settings_async', autospec=True, return_value=future(service_config_pb2.SettingsCfg())) self.patch('swarming.cancel_task_transactionally_async', autospec=True, return_value=future(None)) self.patch('search.TagIndex.random_shard_index', return_value=0) test_util.build_bundle(id=1).infra.put()
def test_build_to_dict(self): params_json = json.dumps( { api_common.BUILDER_PARAMETER: 'linux', api_common.PROPERTIES_PARAMETER: { 'build-defined-property': 1.0, }, }, sort_keys=True, ) tags = [ 'build_address:luci.chromium.try/linux/1', 'builder:linux', 'buildset:1', 'swarming_hostname:swarming.example.com', ('swarming_tag:log_location:' 'logdog://logdog.example.com/chromium/bb/+/annotations'), 'swarming_tag:luci_project:chromium', 'swarming_tag:os:Ubuntu', 'swarming_tag:recipe_name:recipe', 'swarming_tag:recipe_package:infra/recipe_bundle', 'swarming_task_id:deadbeef', ] result_details = { 'properties': { 'a': 'b' }, 'swarming': { 'bot_dimensions': { 'dim1': ['v1', 'v2'], 'os': ['Ubuntu'], }, }, 'error': { 'message': 'bad' }, 'ui': { 'info': 'bad' }, } expected = { 'project': 'chromium', 'bucket': 'luci.chromium.try', 'created_by': 'anonymous:anonymous', 'created_ts': '1483228800000000', 'experimental': False, 'completed_ts': '1483228800000000', 'id': '8991715593768927232', 'parameters_json': params_json, 'result_details_json': json.dumps(result_details, sort_keys=True), 'status': 'COMPLETED', 'result': 'FAILURE', 'failure_reason': 'INFRA_FAILURE', 'status_changed_ts': '1483228800000000', 'tags': tags, 'utcnow_ts': '1483228800000000', 'updated_ts': '1483228800000000', 'canary_preference': 'PROD', 'canary': False, 'service_account': '*****@*****.**', 'url': 'https://ci.example.com/8991715593768927232', } bundle = test_util.build_bundle( status=common_pb2.INFRA_FAILURE, summary_markdown='bad', input=dict(properties=bbutil.dict_to_struct({ 'recipe': 'recipe', 'build-defined-property': 1, 'builder-defined-property': 2, }), ), output=dict(properties=bbutil.dict_to_struct({'a': 'b'}), ), infra=dict( swarming=dict(bot_dimensions=[ dict(key='dim1', value='v1'), dict(key='dim1', value='v2'), dict(key='os', value='Ubuntu'), ], ), buildbucket=dict(requested_properties=bbutil.dict_to_struct({ 'build-defined-property': 1, }), ), )) self.assertEqual(expected, test_util.ununicode(api_common.build_to_dict(bundle)))