Esempio n. 1
0
 def test_invalid_tags(self):
     msg = rpc_pb2.BuildPredicate(
         builder=build_pb2.BuilderID(project='chromium',
                                     bucket='try',
                                     builder='linux-rel'),
         tags=[common_pb2.StringPair(key='', value='')],
     )
     self.assert_invalid(msg, r'tags: Invalid tag')
Esempio n. 2
0
    def test_tags(self):
        build = test_util.build()
        self.assertFalse(build.proto.tags)
        build.tags = [
            'a:b',
            'builder:hidden',
        ]

        actual = self.to_proto(build, load_tags=True)
        self.assertEqual(list(actual.tags),
                         [common_pb2.StringPair(key='a', value='b')])
Esempio n. 3
0
def parse_v1_tags(v1_tags):
    """Parses V1 tags.

  Returns a tuple of:
    v2_tags: list of StringPair
    gitiles_commit: common_pb2.GitilesCommit or None
    gerrit_changes: list of common_pb2.GerritChange.
  """
    v2_tags = []
    gitiles_commit = None
    gitiles_ref = None
    gerrit_changes = []

    for t in v1_tags:
        key, value = buildtags.parse(t)

        if key == buildtags.GITILES_REF_KEY:
            gitiles_ref = value
            continue

        if key == buildtags.BUILDSET_KEY:
            commit = buildtags.parse_gitiles_commit_buildset(value)
            if commit:
                if gitiles_commit:  # pragma: no cover
                    raise errors.InvalidInputError('multiple gitiles commit')
                gitiles_commit = commit
                continue

            cl = buildtags.parse_gerrit_change_buildset(value)
            if cl:
                gerrit_changes.append(cl)
                continue

        v2_tags.append(common_pb2.StringPair(key=key, value=value))

    if gitiles_commit and not gitiles_commit.ref:
        gitiles_commit.ref = gitiles_ref or 'refs/heads/master'

    return v2_tags, gitiles_commit, gerrit_changes
Esempio n. 4
0
class SyncBuildTest(BaseTest):
    def test_validate(self):
        build = test_util.build()
        swarming.validate_build(build)

    def test_validate_lease_key(self):
        build = test_util.build()
        build.lease_key = 123
        with self.assertRaises(errors.InvalidInputError):
            swarming.validate_build(build)

    @parameterized.expand([
        (dict(infra=dict(swarming=dict(task_dimensions=[
            dict(key='a', value='b', expiration=dict(seconds=60 * i))
            for i in xrange(7)
        ], ), ), ), ),
    ])
    def test_validate_fails(self, build_params):
        build = test_util.build(for_creation=True, **build_params)
        with self.assertRaises(errors.InvalidInputError):
            swarming.validate_build(build)

    @parameterized.expand([
        ({
            'task_result': None,
            'status': common_pb2.INFRA_FAILURE,
            'end_time': test_util.dt2ts(NOW),
        }, ),
        ({
            'task_result': {
                'state': 'PENDING'
            },
            'status': common_pb2.SCHEDULED,
        }, ),
        ({
            'task_result': {
                'state': 'RUNNING',
                'started_ts': '2018-01-29T21:15:02.649750',
            },
            'status': common_pb2.STARTED,
            'start_time': tspb(seconds=1517260502, nanos=649750000),
        }, ),
        ({
            'task_result': {
                'state': 'COMPLETED',
                'started_ts': '2018-01-29T21:15:02.649750',
                'completed_ts': '2018-01-30T00:15:18.162860',
            },
            'status': common_pb2.SUCCESS,
            'start_time': tspb(seconds=1517260502, nanos=649750000),
            'end_time': tspb(seconds=1517271318, nanos=162860000),
        }, ),
        ({
            'task_result': {
                'state':
                'COMPLETED',
                'started_ts':
                '2018-01-29T21:15:02.649750',
                'completed_ts':
                '2018-01-30T00:15:18.162860',
                'bot_dimensions': [
                    {
                        'key': 'os',
                        'value': ['Ubuntu', 'Trusty']
                    },
                    {
                        'key': 'pool',
                        'value': ['luci.chromium.try']
                    },
                    {
                        'key': 'id',
                        'value': ['bot1']
                    },
                ],
            },
            'status':
            common_pb2.SUCCESS,
            'bot_dimensions': [
                common_pb2.StringPair(key='id', value='bot1'),
                common_pb2.StringPair(key='os', value='Trusty'),
                common_pb2.StringPair(key='os', value='Ubuntu'),
                common_pb2.StringPair(key='pool', value='luci.chromium.try'),
            ],
            'start_time':
            tspb(seconds=1517260502, nanos=649750000),
            'end_time':
            tspb(seconds=1517271318, nanos=162860000),
        }, ),
        ({
            'task_result': {
                'state': 'COMPLETED',
                'failure': True,
                'started_ts': '2018-01-29T21:15:02.649750',
                'completed_ts': '2018-01-30T00:15:18.162860',
            },
            'status': common_pb2.FAILURE,
            'start_time': tspb(seconds=1517260502, nanos=649750000),
            'end_time': tspb(seconds=1517271318, nanos=162860000),
        }, ),
        ({
            'task_result': {
                'state': 'COMPLETED',
                'failure': True,
                'internal_failure': True,
                'started_ts': '2018-01-29T21:15:02.649750',
                'completed_ts': '2018-01-30T00:15:18.162860',
            },
            'status': common_pb2.INFRA_FAILURE,
            'start_time': tspb(seconds=1517260502, nanos=649750000),
            'end_time': tspb(seconds=1517271318, nanos=162860000),
        }, ),
        ({
            'task_result': {
                'state': 'BOT_DIED',
                'started_ts': '2018-01-29T21:15:02.649750',
                'abandoned_ts': '2018-01-30T00:15:18.162860',
            },
            'status': common_pb2.INFRA_FAILURE,
            'start_time': tspb(seconds=1517260502, nanos=649750000),
            'end_time': tspb(seconds=1517271318, nanos=162860000),
        }, ),
        ({
            'task_result': {
                'state': 'TIMED_OUT',
                'started_ts': '2018-01-29T21:15:02.649750',
                'completed_ts': '2018-01-30T00:15:18.162860',
            },
            'status': common_pb2.INFRA_FAILURE,
            'is_timeout': True,
            'start_time': tspb(seconds=1517260502, nanos=649750000),
            'end_time': tspb(seconds=1517271318, nanos=162860000),
        }, ),
        ({
            'task_result': {
                'state': 'EXPIRED',
                'abandoned_ts': '2018-01-30T00:15:18.162860',
            },
            'status': common_pb2.INFRA_FAILURE,
            'is_resource_exhaustion': True,
            'is_timeout': True,
            'end_time': tspb(seconds=1517271318, nanos=162860000),
        }, ),
        ({
            'task_result': {
                'state': 'KILLED',
                'abandoned_ts': '2018-01-30T00:15:18.162860',
            },
            'status': common_pb2.CANCELED,
            'end_time': tspb(seconds=1517271318, nanos=162860000),
        }, ),
        ({
            'task_result': {
                'state': 'CANCELED',
                'abandoned_ts': '2018-01-30T00:15:18.162860',
            },
            'status': common_pb2.CANCELED,
            'end_time': tspb(seconds=1517271318, nanos=162860000),
        }, ),
        ({
            'task_result': {
                'state': 'NO_RESOURCE',
                'abandoned_ts': '2018-01-30T00:15:18.162860',
            },
            'status': common_pb2.INFRA_FAILURE,
            'is_resource_exhaustion': True,
            'end_time': tspb(seconds=1517271318, nanos=162860000),
        }, ),
        # NO_RESOURCE with abandoned_ts before creation time.
        (
            {
                'task_result': {
                    'state': 'NO_RESOURCE',
                    'abandoned_ts': '2015-11-29T00:15:18.162860',
                },
                'status': common_pb2.INFRA_FAILURE,
                'is_resource_exhaustion': True,
                'end_time': test_util.dt2ts(NOW),
            }, ),
    ])
    def test_sync(self, case):
        logging.info('test case: %s', case)
        build = test_util.build(id=1)
        build.put()

        swarming._sync_build_async(1, case['task_result']).get_result()

        build = build.key.get()
        bp = build.proto
        self.assertEqual(bp.status, case['status'])
        self.assertEqual(
            bp.status_details.HasField('timeout'),
            case.get('is_timeout', False),
        )
        self.assertEqual(bp.status_details.HasField('resource_exhaustion'),
                         case.get('is_resource_exhaustion', False))

        self.assertEqual(bp.start_time, case.get('start_time', tspb(0)))
        self.assertEqual(bp.end_time, case.get('end_time', tspb(0)))
        self.assertEqual(list(build.parse_infra().swarming.bot_dimensions),
                         case.get('bot_dimensions', []))
Esempio n. 5
0
 def test_no_key(self):
     pairs = [common_pb2.StringPair(key='', value='a')]
     self.assert_invalid(pairs, r'Invalid tag ":a": starts with ":"')
Esempio n. 6
0
 def test_key_has_colon(self):
     pairs = [common_pb2.StringPair(key='a:b', value='c')]
     self.assert_invalid(pairs, r'tag key "a:b" cannot have a colon')
Esempio n. 7
0
 def test_valid(self):
     pairs = [common_pb2.StringPair(key='a', value='b')]
     self.assert_valid(pairs)
Esempio n. 8
0
    """Converts a protobuf message to dict.

  Very inefficient. Use only in tests.
  Useful to compare protobuf messages, because unittest.assertEqual has special
  support for dicts, but not protobuf messages.
  """
    return json.loads(json_format.MessageToJson(message))


def parse_bucket_cfg(text):
    cfg = project_config_pb2.Bucket()
    text_format.Merge(text, cfg)
    return cfg


INDEXED_TAG = common_pb2.StringPair(key='buildset', value='1')
INDEXED_TAG_STRING = 'buildset:1'
BUILD_DEFAULTS = build_pb2.Build(
    builder=dict(project='chromium', bucket='try', builder='linux'),
    number=1,
    status=common_pb2.SCHEDULED,
    created_by='anonymous:anonymous',
    tags=[INDEXED_TAG],
    canary=False,
    exe=dict(
        cipd_package='infra/recipe_bundle',
        cipd_version='refs/heads/master',
    ),
    infra=dict(
        swarming=dict(
            hostname='swarming.example.com',
Esempio n. 9
0
class SyncBuildTest(BaseTest):

  def setUp(self):
    super(SyncBuildTest, self).setUp()
    self.patch('components.net.json_request_async', autospec=True)
    self.patch('components.auth.delegate_async', return_value=future('blah'))

    self.build_token = 'beeff00d'
    self.patch(
        'tokens.generate_build_token', autospec=True, return_value='deadbeef'
    )

    self.task_def = {'is_task_def': True, 'task_slices': [{
        'properties': {},
    }]}
    self.patch(
        'swarming.compute_task_def', autospec=True, return_value=self.task_def
    )
    self.patch(
        'google.appengine.api.app_identity.get_default_version_hostname',
        return_value='cr-buildbucket.appspot.com'
    )

    self.build_bundle = test_util.build_bundle(
        id=1, created_by='user:[email protected]'
    )
    self.build_bundle.build.swarming_task_key = None
    with self.build_bundle.infra.mutate() as infra:
      infra.swarming.task_id = ''
    self.build_bundle.put()

  @property
  def build(self):
    return self.build_bundle.build

  def _create_task(self):
    self.build_bundle.build.proto.infra.ParseFromString(
        self.build_bundle.infra.infra
    )
    self.build_bundle.build.proto.input.properties.ParseFromString(
        self.build_bundle.input_properties.properties
    )
    swarming._create_swarming_task(self.build_bundle.build)

  def test_create_task(self):
    expected_task_def = self.task_def.copy()
    expected_secrets = launcher_pb2.BuildSecrets(build_token=self.build_token)
    expected_task_def[u'task_slices'][0][u'properties'][u'secret_bytes'] = (
        base64.b64encode(expected_secrets.SerializeToString())
    )

    net.json_request_async.return_value = future({'task_id': 'x'})
    swarming._sync_build_and_swarming(1, 0)

    actual_task_def = net.json_request_async.call_args[1]['payload']
    self.assertEqual(actual_task_def, expected_task_def)

    self.assertEqual(
        net.json_request_async.call_args[0][0],
        'https://swarming.example.com/_ah/api/swarming/v1/tasks/new'
    )

    # Test delegation token params.
    self.assertEqual(
        auth.delegate_async.mock_calls, [
            mock.call(
                services=[u'https://swarming.example.com'],
                audience=[auth.Identity('user', 'test@localhost')],
                impersonate=auth.Identity('user', '*****@*****.**'),
                tags=['buildbucket:bucket:chromium/try'],
            )
        ]
    )

    # Assert that we've persisted information about the new task.
    bundle = model.BuildBundle.get(1, infra=True)
    self.assertIsNotNone(bundle)
    self.assertTrue(bundle.build.swarming_task_key)
    self.assertTrue(bundle.infra.parse().swarming.task_id)

    expected_continuation_payload = {
        'id': 1,
        'generation': 1,
    }
    expected_continuation = {
        'name': 'sync-task-1-1',
        'url': '/internal/task/swarming/sync-build/1',
        'payload': json.dumps(expected_continuation_payload, sort_keys=True),
        'retry_options': {
            'task_age_limit': model.BUILD_TIMEOUT.total_seconds()
        },
        'countdown': 60,
    }
    tq.enqueue_async.assert_called_with(
        swarming.SYNC_QUEUE_NAME, [expected_continuation], transactional=False
    )

  @mock.patch('swarming.cancel_task', autospec=True)
  def test_already_exists_after_creation(self, cancel_task):

    @ndb.tasklet
    def json_request_async(*_args, **_kwargs):
      with self.build_bundle.infra.mutate() as infra:
        infra.swarming.task_id = 'deadbeef'
      yield self.build_bundle.infra.put_async()

      raise ndb.Return({'task_id': 'new task'})

    net.json_request_async.side_effect = json_request_async

    self._create_task()
    cancel_task.assert_called_with('swarming.example.com', 'new task')

  def test_http_400(self):
    net.json_request_async.return_value = future_exception(
        net.Error('HTTP 401', 400, 'invalid request')
    )

    self._create_task()

    build = self.build.key.get()
    self.assertEqual(build.status, common_pb2.INFRA_FAILURE)
    self.assertEqual(
        build.proto.summary_markdown,
        r'Swarming task creation API responded with HTTP 400: `invalid request`'
    )

  def test_http_500(self):
    net.json_request_async.return_value = future_exception(
        net.Error('internal', 500, 'Internal server error')
    )

    with self.assertRaises(net.Error):
      self._create_task()

  def test_validate(self):
    build = test_util.build()
    swarming.validate_build(build)

  def test_validate_lease_key(self):
    build = test_util.build()
    build.lease_key = 123
    with self.assertRaises(errors.InvalidInputError):
      swarming.validate_build(build)

  @parameterized.expand([
      (
          dict(
              infra=dict(
                  swarming=dict(
                      task_dimensions=[
                          dict(
                              key='a',
                              value='b',
                              expiration=dict(seconds=60 * i)
                          ) for i in xrange(7)
                      ],
                  ),
              ),
          ),
      ),
  ])
  def test_validate_fails(self, build_params):
    build = test_util.build(for_creation=True, **build_params)
    with self.assertRaises(errors.InvalidInputError):
      swarming.validate_build(build)

  @parameterized.expand([
      ({
          'task_result': None,
          'status': common_pb2.INFRA_FAILURE,
          'end_time': test_util.dt2ts(NOW),
      },),
      ({
          'task_result': {'state': 'PENDING'},
          'status': common_pb2.SCHEDULED,
      },),
      ({
          'task_result': {
              'state': 'RUNNING',
              'started_ts': '2018-01-29T21:15:02.649750',
          },
          'status': common_pb2.STARTED,
          'start_time': tspb(seconds=1517260502, nanos=649750000),
      },),
      ({
          'task_result': {
              'state': 'COMPLETED',
              'started_ts': '2018-01-29T21:15:02.649750',
              'completed_ts': '2018-01-30T00:15:18.162860',
          },
          'status': common_pb2.SUCCESS,
          'start_time': tspb(seconds=1517260502, nanos=649750000),
          'end_time': tspb(seconds=1517271318, nanos=162860000),
      },),
      ({
          'task_result': {
              'state':
                  'COMPLETED',
              'started_ts':
                  '2018-01-29T21:15:02.649750',
              'completed_ts':
                  '2018-01-30T00:15:18.162860',
              'bot_dimensions': [
                  {'key': 'os', 'value': ['Ubuntu', 'Trusty']},
                  {'key': 'pool', 'value': ['luci.chromium.try']},
                  {'key': 'id', 'value': ['bot1']},
              ],
          },
          'status':
              common_pb2.SUCCESS,
          'bot_dimensions': [
              common_pb2.StringPair(key='id', value='bot1'),
              common_pb2.StringPair(key='os', value='Trusty'),
              common_pb2.StringPair(key='os', value='Ubuntu'),
              common_pb2.StringPair(key='pool', value='luci.chromium.try'),
          ],
          'start_time':
              tspb(seconds=1517260502, nanos=649750000),
          'end_time':
              tspb(seconds=1517271318, nanos=162860000),
      },),
      ({
          'task_result': {
              'state': 'COMPLETED',
              'failure': True,
              'started_ts': '2018-01-29T21:15:02.649750',
              'completed_ts': '2018-01-30T00:15:18.162860',
          },
          'status': common_pb2.INFRA_FAILURE,
          'start_time': tspb(seconds=1517260502, nanos=649750000),
          'end_time': tspb(seconds=1517271318, nanos=162860000),
      },),
      ({
          'task_result': {
              'state': 'BOT_DIED',
              'started_ts': '2018-01-29T21:15:02.649750',
              'abandoned_ts': '2018-01-30T00:15:18.162860',
          },
          'status': common_pb2.INFRA_FAILURE,
          'start_time': tspb(seconds=1517260502, nanos=649750000),
          'end_time': tspb(seconds=1517271318, nanos=162860000),
      },),
      ({
          'task_result': {
              'state': 'TIMED_OUT',
              'started_ts': '2018-01-29T21:15:02.649750',
              'completed_ts': '2018-01-30T00:15:18.162860',
          },
          'status': common_pb2.INFRA_FAILURE,
          'is_timeout': True,
          'start_time': tspb(seconds=1517260502, nanos=649750000),
          'end_time': tspb(seconds=1517271318, nanos=162860000),
      },),
      ({
          'task_result': {
              'state': 'EXPIRED',
              'abandoned_ts': '2018-01-30T00:15:18.162860',
          },
          'status': common_pb2.INFRA_FAILURE,
          'is_resource_exhaustion': True,
          'is_timeout': True,
          'end_time': tspb(seconds=1517271318, nanos=162860000),
      },),
      ({
          'task_result': {
              'state': 'KILLED',
              'abandoned_ts': '2018-01-30T00:15:18.162860',
          },
          'status': common_pb2.CANCELED,
          'end_time': tspb(seconds=1517271318, nanos=162860000),
      },),
      ({
          'task_result': {
              'state': 'CANCELED',
              'abandoned_ts': '2018-01-30T00:15:18.162860',
          },
          'status': common_pb2.CANCELED,
          'end_time': tspb(seconds=1517271318, nanos=162860000),
      },),
      ({
          'task_result': {
              'state': 'NO_RESOURCE',
              'abandoned_ts': '2018-01-30T00:15:18.162860',
          },
          'status': common_pb2.INFRA_FAILURE,
          'is_resource_exhaustion': True,
          'end_time': tspb(seconds=1517271318, nanos=162860000),
      },),
      # NO_RESOURCE with abandoned_ts before creation time.
      (
          {
              'task_result': {
                  'state': 'NO_RESOURCE',
                  'abandoned_ts': '2015-11-29T00:15:18.162860',
              },
              'status': common_pb2.INFRA_FAILURE,
              'is_resource_exhaustion': True,
              'end_time': test_util.dt2ts(NOW),
          },
      ),
  ])
  def test_sync_with_task_result(self, case):
    logging.info('test case: %s', case)
    bundle = test_util.build_bundle(id=1)
    bundle.put()

    self.patch(
        'swarming._load_task_result',
        autospec=True,
        return_value=case['task_result'],
    )

    swarming._sync_build_and_swarming(1, 1)

    build = bundle.build.key.get()
    build_infra = bundle.infra.key.get()
    bp = build.proto
    self.assertEqual(bp.status, case['status'])
    self.assertEqual(
        bp.status_details.HasField('timeout'),
        case.get('is_timeout', False),
    )
    self.assertEqual(
        bp.status_details.HasField('resource_exhaustion'),
        case.get('is_resource_exhaustion', False)
    )

    self.assertEqual(bp.start_time, case.get('start_time', tspb(0)))
    self.assertEqual(bp.end_time, case.get('end_time', tspb(0)))

    self.assertEqual(
        list(build_infra.parse().swarming.bot_dimensions),
        case.get('bot_dimensions', [])
    )

    expected_continuation_payload = {
        'id': 1,
        'generation': 2,
    }
    expected_continuation = {
        'name': 'sync-task-1-2',
        'url': '/internal/task/swarming/sync-build/1',
        'payload': json.dumps(expected_continuation_payload, sort_keys=True),
        'retry_options': {
            'task_age_limit': model.BUILD_TIMEOUT.total_seconds()
        },
        'countdown': 60,
    }
    tq.enqueue_async.assert_called_with(
        swarming.SYNC_QUEUE_NAME, [expected_continuation], transactional=False
    )

  def test_termination(self):
    self.build.proto.status = common_pb2.SUCCESS
    self.build.proto.end_time.FromDatetime(utils.utcnow())
    self.build.put()

    swarming._sync_build_and_swarming(1, 1)
    self.assertFalse(tq.enqueue_async.called)