Beispiel #1
0
def auth_db_snapshot_to_proto(snapshot, auth_db_proto=None):
    """Writes AuthDBSnapshot into replication_pb2.AuthDB message.

  Args:
    snapshot: instance of AuthDBSnapshot with entities to convert to protobuf.
    auth_db_proto: optional instance of replication_pb2.AuthDB to update.

  Returns:
    Instance of replication_pb2.AuthDB (same as |auth_db_proto| if passed).
  """
    auth_db_proto = auth_db_proto or replication_pb2.AuthDB()

    auth_db_proto.oauth_client_id = snapshot.global_config.oauth_client_id or ''
    auth_db_proto.oauth_client_secret = (
        snapshot.global_config.oauth_client_secret or '')
    if snapshot.global_config.oauth_additional_client_ids:
        auth_db_proto.oauth_additional_client_ids.extend(
            snapshot.global_config.oauth_additional_client_ids)

    auth_db_proto.token_server_url = snapshot.global_config.token_server_url or ''

    for ent in snapshot.groups:
        msg = auth_db_proto.groups.add()
        msg.name = ent.key.id()
        msg.members.extend(ident.to_bytes() for ident in ent.members)
        msg.globs.extend(glob.to_bytes() for glob in ent.globs)
        msg.nested.extend(ent.nested)
        msg.description = ent.description
        msg.created_ts = utils.datetime_to_timestamp(ent.created_ts)
        msg.created_by = ent.created_by.to_bytes()
        msg.modified_ts = utils.datetime_to_timestamp(ent.modified_ts)
        msg.modified_by = ent.modified_by.to_bytes()
        msg.owners = ent.owners

    for ent in snapshot.ip_whitelists:
        msg = auth_db_proto.ip_whitelists.add()
        msg.name = ent.key.id()
        msg.subnets.extend(ent.subnets)
        msg.description = ent.description
        msg.created_ts = utils.datetime_to_timestamp(ent.created_ts)
        msg.created_by = ent.created_by.to_bytes()
        msg.modified_ts = utils.datetime_to_timestamp(ent.modified_ts)
        msg.modified_by = ent.modified_by.to_bytes()

    for ent in snapshot.ip_whitelist_assignments.assignments:
        msg = auth_db_proto.ip_whitelist_assignments.add()
        msg.identity = ent.identity.to_bytes()
        msg.ip_whitelist = ent.ip_whitelist
        msg.comment = ent.comment or ''
        msg.created_ts = utils.datetime_to_timestamp(ent.created_ts)
        msg.created_by = ent.created_by.to_bytes()

    return auth_db_proto
Beispiel #2
0
    def test_build_steps_keys_only(self, enqueue_tasks):
        build_steps = [
            model.BuildSteps(parent=ndb.Key(model.Build, i),
                             step_container_bytes='') for i in xrange(50, 60)
        ]
        ndb.put_multi(build_steps)

        def processor(results, payload):
            # Take 5
            page = list(itertools.islice(results, 5))
            self.assertEqual(page, [b.key for b in build_steps[:5]])
            self.assertEqual(payload, 'bar')

        self.proc = {
            'entity_kind': 'BuildSteps',
            'func': processor,
            'keys_only': True,
        }

        self.post({
            'job_id': 'jobid',
            'iteration': 0,
            'seg_index': 0,
            'seg_start': 50,
            'seg_end': 59,
            'started_ts': utils.datetime_to_timestamp(self.now),
            'proc': {
                'name': 'foo',
                'payload': 'bar'
            },
        })

        expected_next_payload = {
            'job_id': 'jobid',
            'iteration': 1,
            'seg_index': 0,
            'seg_start': 50,
            'seg_end': 59,
            'start_from': 55,
            'started_ts': utils.datetime_to_timestamp(self.now),
            'proc': {
                'name': 'foo',
                'payload': 'bar'
            },
        }
        enqueue_tasks.assert_called_with(
            'bulkproc',
            [(
                'jobid-0-1',
                bulkproc.PATH_PREFIX + 'segment/seg:0-percent:50',
                utils.encode_to_json(expected_next_payload),
            )],
        )
Beispiel #3
0
    def test_start(self, enqueue_tasks):
        # create a build a day for 3 days
        proc = {'name': 'foo', 'payload': 'bar'}
        self.post({
            'proc': proc,
        })

        # Expect a segment for each day.
        seg_path_prefix = bulkproc.PATH_PREFIX + 'segment/'
        self.assertEqual(enqueue_tasks.call_count, 24)
        all_tasks = []
        for (queue_name, tasks), _ in enqueue_tasks.call_args_list:
            self.assertEqual(queue_name, 'bulkproc')
            all_tasks.extend(tasks)
        self.assertEqual(len(all_tasks), 2165)
        self.assertEqual(
            all_tasks[0],
            (
                None,
                seg_path_prefix + 'seg:0-percent:0',
                utils.encode_to_json(
                    {
                        'job_id': 'taskname',
                        'iteration': 0,
                        'seg_index': 0,
                        'seg_start': 8991624996803575808,
                        'seg_end': 8991647646045175807,
                        'started_ts': utils.datetime_to_timestamp(self.now),
                        'proc': proc,
                    }),
            ),
        )
        self.assertEqual(
            all_tasks[1],
            (
                None,
                seg_path_prefix + 'seg:1-percent:0',
                utils.encode_to_json(
                    {
                        'job_id': 'taskname',
                        'iteration': 0,
                        'seg_index': 1,
                        'seg_start': 8991647646045175808,
                        'seg_end': 8991670295286775807,
                        'started_ts': utils.datetime_to_timestamp(self.now),
                        'proc': proc,
                    }),
            ),
        )
Beispiel #4
0
  def test_released(self):
    request = rpc_messages.LeaseRequest(
        dimensions=rpc_messages.Dimensions(
            os_family=rpc_messages.OSFamily.LINUX,
        ),
        duration=1,
        request_id='fake-id',
    )
    lease_key = models.LeaseRequest(
        deduplication_checksum=
            models.LeaseRequest.compute_deduplication_checksum(request),
        machine_id='id',
        owner=auth_testing.DEFAULT_MOCKED_IDENTITY,
        released=True,
        request=request,
        response=rpc_messages.LeaseResponse(
            client_request_id='fake-id',
        ),
    ).put()
    machine_key = models.CatalogMachineEntry(
        id='id',
        dimensions=rpc_messages.Dimensions(
            os_family=rpc_messages.OSFamily.LINUX,
        ),
    ).put()

    handlers_cron.release_lease(lease_key)
    self.assertFalse(lease_key.get().released)
    self.assertEqual(
        lease_key.get().response.lease_expiration_ts,
        utils.datetime_to_timestamp(
            machine_key.get().lease_expiration_ts) / 1000 / 1000,
    )
Beispiel #5
0
def build_to_message(build, include_lease_key=False):
  """Converts model.Build to BuildMessage."""
  assert build
  assert build.key
  assert build.key.id()

  msg = BuildMessage(
    id=build.key.id(),
    bucket=build.bucket,
    tags=build.tags,
    parameters_json=json.dumps(build.parameters or {}, sort_keys=True),
    status=build.status,
    result=build.result,
    result_details_json=json.dumps(build.result_details),
    cancelation_reason=build.cancelation_reason,
    failure_reason=build.failure_reason,
    lease_key=build.lease_key if include_lease_key else None,
    url=build.url,
    created_ts=datetime_to_timestamp_safe(build.create_time),
    updated_ts=datetime_to_timestamp_safe(build.update_time),
    completed_ts=datetime_to_timestamp_safe(build.complete_time),
    created_by=build.created_by.to_bytes() if build.created_by else None,
    status_changed_ts=datetime_to_timestamp_safe(build.status_changed_time),
    utcnow_ts=datetime_to_timestamp_safe(utils.utcnow()),
  )
  if build.lease_expiration_date is not None:
    msg.lease_expiration_ts = utils.datetime_to_timestamp(
      build.lease_expiration_date)
  return msg
Beispiel #6
0
def _buildbucket_property(build):
    """Returns value for 'buildbucket' build property.

  The format of the returned value corresponds the one used in
  buildbot-buildbucket integration [1], with two exceptions:
  - it is not encoded in JSON
  - the list of tags are initial tags only.
    Does not include auto-generated tags.

  [1]:
  https://chromium.googlesource.com/chromium/tools/build/+/82373bb503dca5f91cd0988d49df38394fdf8b0b/scripts/master/buildbucket/integration.py#329
  """
    return {
        'build': {
            'bucket': build.bucket,
            'created_by': build.created_by.to_bytes(),
            'created_ts': utils.datetime_to_timestamp(build.create_time),
            'id': str(build.key.id()),
            # Note: this includes only user-specified tags.
            # It does not include auto-generated tags, such as "swarming_tag".
            # This is a bit different from Buildbot-Buildbucket integration.
            # In practice, however, only "buildset" tag is read from this list.
            'tags': build.initial_tags,
        },
    }
Beispiel #7
0
def package_to_proto(entity):
  """Package entity -> Package proto message."""
  return Package(
      package_name=entity.package_name,
      registered_by=entity.registered_by.to_bytes(),
      registered_ts=utils.datetime_to_timestamp(entity.registered_ts),
      hidden=bool(entity.hidden)) # None and False are not the same in protorpc
Beispiel #8
0
def package_ref_to_proto(entity):
  """PackageRef entity -> PackageRef proto message."""
  return PackageRef(
      ref=entity.ref,
      instance_id=entity.instance_id,
      modified_by=entity.modified_by.to_bytes(),
      modified_ts=utils.datetime_to_timestamp(entity.modified_ts))
Beispiel #9
0
 def test_expired_build_to_message(self):
   yesterday = utils.utcnow() - datetime.timedelta(days=1)
   yesterday_timestamp = utils.datetime_to_timestamp(yesterday)
   self.test_build.lease_key = 1
   self.test_build.lease_expiration_date = yesterday
   msg = api.build_to_message(self.test_build)
   self.assertEqual(msg.lease_expiration_ts, yesterday_timestamp)
Beispiel #10
0
def build_to_message(build, include_lease_key=False):
    """Converts model.Build to BuildMessage."""
    assert build
    assert build.key
    assert build.key.id()

    msg = BuildMessage(
        id=build.key.id(),
        bucket=build.bucket,
        tags=build.tags,
        parameters_json=json.dumps(build.parameters or {}, sort_keys=True),
        status=build.status,
        result=build.result,
        result_details_json=json.dumps(build.result_details),
        cancelation_reason=build.cancelation_reason,
        failure_reason=build.failure_reason,
        lease_key=build.lease_key if include_lease_key else None,
        url=build.url,
        created_ts=datetime_to_timestamp_safe(build.create_time),
        started_ts=datetime_to_timestamp_safe(build.start_time),
        updated_ts=datetime_to_timestamp_safe(build.update_time),
        completed_ts=datetime_to_timestamp_safe(build.complete_time),
        created_by=build.created_by.to_bytes() if build.created_by else None,
        status_changed_ts=datetime_to_timestamp_safe(
            build.status_changed_time),
        utcnow_ts=datetime_to_timestamp_safe(utils.utcnow()),
        retry_of=build.retry_of,
    )
    if build.lease_expiration_date is not None:
        msg.lease_expiration_ts = utils.datetime_to_timestamp(
            build.lease_expiration_date)
    return msg
Beispiel #11
0
  def start_direct_upload(self, hash_algo):
    """Can be used to upload data to CAS directly from an Appengine handler.

    Opens a temp file for writing (and returns wrapper around it). Hashes the
    data while it is being written, and moves the temp file to an appropriate
    location in CAS once it is closed.

    Args:
      hash_algo: algorithm to use to calculate data hash.

    Returns:
      DirectUpload object to write data to.
    """
    assert is_supported_hash_algo(hash_algo)
    ts_sec = utils.datetime_to_timestamp(utils.utcnow()) / 1000000.
    temp_path = self._temp_direct_upload_gs_path(ts_sec)
    temp_file = cloudstorage.open(
        filename=temp_path,
        mode='w',
        retry_params=self._retry_params)
    def commit_callback(hash_digest, commit):
      if commit:
        self._gs_copy(temp_path, self._verified_gs_path(hash_algo, hash_digest))
      self._gs_delete(temp_path)
    return DirectUpload(
        file_obj=temp_file,
        hasher=SUPPORTED_HASH_ALGOS[hash_algo][0](),
        callback=commit_callback)
Beispiel #12
0
 def test_expired_build_to_message(self):
     yesterday = utils.utcnow() - datetime.timedelta(days=1)
     yesterday_timestamp = utils.datetime_to_timestamp(yesterday)
     self.test_build.lease_key = 1
     self.test_build.lease_expiration_date = yesterday
     msg = api.build_to_message(self.test_build)
     self.assertEqual(msg.lease_expiration_ts, yesterday_timestamp)
Beispiel #13
0
def instance_to_proto(entity):
  """PackageInstance entity -> PackageInstance proto message."""
  return PackageInstance(
      package_name=entity.package_name,
      instance_id=entity.instance_id,
      registered_by=entity.registered_by.to_bytes(),
      registered_ts=utils.datetime_to_timestamp(entity.registered_ts))
Beispiel #14
0
def release_lease(lease_key):
  """Releases a lease on a machine.

  Args:
    lease_key: ndb.Key for a models.LeaseRequest entity.
  """
  lease = lease_key.get()
  if not lease:
    logging.warning('LeaseRequest not found: %s', lease_key)
    return
  if not lease.released:
    logging.warning('LeaseRequest not released:\n%s', lease)
    return

  lease.released = False
  if not lease.machine_id:
    logging.warning('LeaseRequest has no associated machine:\n%s', lease)
    lease.put()
    return

  machine = ndb.Key(models.CatalogMachineEntry, lease.machine_id).get()
  if not machine:
    logging.error('LeaseRequest has non-existent machine leased:\n%s', lease)
    lease.put()
    return

  # Just expire the lease now and let MachineReclamationProcessor handle it.
  logging.info('Expiring LeaseRequest:\n%s', lease)
  now = utils.utcnow()
  lease.response.lease_expiration_ts = utils.datetime_to_timestamp(
      now) / 1000 / 1000
  machine.lease_expiration_ts = now
  ndb.put_multi([lease, machine])
Beispiel #15
0
    def start_direct_upload(self, hash_algo):
        """Can be used to upload data to CAS directly from an Appengine handler.

    Opens a temp file for writing (and returns wrapper around it). Hashes the
    data while it is being written, and moves the temp file to an appropriate
    location in CAS once it is closed.

    Args:
      hash_algo: algorithm to use to calculate data hash.

    Returns:
      DirectUpload object to write data to.
    """
        assert is_supported_hash_algo(hash_algo)
        ts_sec = utils.datetime_to_timestamp(utils.utcnow()) / 1000000.
        temp_path = self._temp_direct_upload_gs_path(ts_sec)
        temp_file = cloudstorage.open(filename=temp_path,
                                      mode='w',
                                      retry_params=self._retry_params)

        def commit_callback(hash_digest, commit):
            if commit:
                self._gs_copy(temp_path,
                              self._verified_gs_path(hash_algo, hash_digest))
            self._gs_delete(temp_path)

        return DirectUpload(file_obj=temp_file,
                            hasher=SUPPORTED_HASH_ALGOS[hash_algo][0](),
                            callback=commit_callback)
Beispiel #16
0
    def get(self, request):
        """Handles an incoming CatalogMachineRetrievalRequest."""
        user = auth.get_current_identity().to_bytes()
        logging.info(
            'Received CatalogMachineRetrievalRequest:\nUser: %s\n%s',
            user,
            request,
        )
        if acl.is_catalog_admin():
            if not request.backend:
                raise endpoints.BadRequestException(
                    'Backend unspecified by administrator')
        elif acl.is_backend_service():
            current_backend = acl.get_current_backend()
            if request.backend is None:
                request.backend = current_backend
            if request.backend != current_backend:
                raise endpoints.ForbiddenException('Mismatched backend')

        entry = models.CatalogMachineEntry.get(request.backend,
                                               request.hostname)
        if not entry:
            raise endpoints.NotFoundException('CatalogMachineEntry not found')

        response = rpc_messages.CatalogMachineRetrievalResponse(
            dimensions=entry.dimensions,
            policies=entry.policies,
            state=entry.state,
        )
        if entry.lease_expiration_ts:
            # datetime_to_timestamp returns microseconds, convert to seconds.
            response.lease_expiration_ts = utils.datetime_to_timestamp(
                entry.lease_expiration_ts) / 1000 / 1000
        return response
Beispiel #17
0
  def get_config_sets(self, request):
    """Returns config sets."""
    if request.config_set and not self.can_read_config_set(request.config_set):
      raise endpoints.ForbiddenException()

    config_sets = storage.get_config_sets_async(
        config_set=request.config_set).get_result()

    if request.include_last_import_attempt:
      attempts = ndb.get_multi([
        storage.last_import_attempt_key(cs.key.id()) for cs in config_sets
      ])
    else:
      attempts = [None] * len(config_sets)

    res = self.GetConfigSetsResponseMessage()
    for cs, attempt in zip(config_sets, attempts):
      if self.can_read_config_set(cs.key.id()):
        timestamp = None
        if cs.latest_revision_time:
          timestamp = utils.datetime_to_timestamp(cs.latest_revision_time)
        res.config_sets.append(ConfigSet(
            config_set=cs.key.id(),
            location=cs.location,
            revision=Revision(
                id=cs.latest_revision,
                url=cs.latest_revision_url,
                timestamp=timestamp,
                committer_email=cs.latest_revision_committer_email,
            ),
            last_import_attempt=attempt_to_msg(attempt),
        ))
    return res
    def test_segment_full(self, enqueue_tasks):
        ndb.put_multi([
            model.Build(id=i,
                        bucket='chromium',
                        tags=['buildset:%d' % (i % 3)])
            for i in xrange(50, 52)
        ])
        self.post({
            'action': 'segment',
            'tag': 'buildset',
            'seg_index': 0,
            'seg_start': 50,
            'seg_end': 60,
            'started_ts': utils.datetime_to_timestamp(self.now),
        })

        self.assertEqual(enqueue_tasks.call_count, 1)
        enqueue_tasks.assert_called_with('backfill-tag-index', [(
            None,
            self.task_url + 'tag:buildset-flush',
            utils.encode_to_json({
                'action': 'flush',
                'tag': 'buildset',
                'new_entries': {
                    '0': [['chromium', 51]],
                    '2': [['chromium', 50]],
                },
            }),
        )])
Beispiel #19
0
def pack_auth_db():
    """Packs an entire AuthDB into a blob, signing it using app's private key.

  Returns:
    Tuple (blob, name of a key used to sign it, base64 encoded signature).
  """
    # Grab the snapshot.
    state, snapshot = replication.new_auth_db_snapshot()

    # Serialize to binary proto message.
    req = replication_pb2.ReplicationPushRequest()
    req.revision.primary_id = app_identity.get_application_id()
    req.revision.auth_db_rev = state.auth_db_rev
    req.revision.modified_ts = utils.datetime_to_timestamp(state.modified_ts)
    replication.auth_db_snapshot_to_proto(snapshot, req.auth_db)
    req.auth_code_version = version.__version__
    auth_db_blob = req.SerializeToString()

    # Sign it using primary's private keys. sign_blob is limited to 8KB only, so
    # hash the body first and sign the digest.
    key_name, sig = signature.sign_blob(hashlib.sha512(auth_db_blob).digest())
    sig = base64.b64encode(sig)

    logging.debug('AuthDB blob size is %d bytes', len(auth_db_blob))
    return auth_db_blob, key_name, sig
Beispiel #20
0
def instance_to_proto(entity):
  """PackageInstance entity -> PackageInstance proto message."""
  return PackageInstance(
      package_name=entity.package_name,
      instance_id=entity.instance_id,
      registered_by=entity.registered_by.to_bytes(),
      registered_ts=utils.datetime_to_timestamp(entity.registered_ts))
Beispiel #21
0
def pack_auth_db():
  """Packs an entire AuthDB into a blob, signing it using app's private key.

  Returns:
    Tuple (blob, name of a key used to sign it, base64 encoded signature).
  """
  # Grab the snapshot.
  state, snapshot = replication.new_auth_db_snapshot()

  # Serialize to binary proto message.
  req = replication_pb2.ReplicationPushRequest()
  req.revision.primary_id = app_identity.get_application_id()
  req.revision.auth_db_rev = state.auth_db_rev
  req.revision.modified_ts = utils.datetime_to_timestamp(state.modified_ts)
  replication.auth_db_snapshot_to_proto(snapshot, req.auth_db)
  req.auth_code_version = version.__version__
  auth_db_blob = req.SerializeToString()

  # Sign it using primary's private keys. sign_blob is limited to 8KB only, so
  # hash the body first and sign the digest.
  key_name, sig = signature.sign_blob(hashlib.sha512(auth_db_blob).digest())
  sig = base64.b64encode(sig)

  logging.debug('AuthDB blob size is %d bytes', len(auth_db_blob))
  return auth_db_blob, key_name, sig
Beispiel #22
0
def release_lease(lease_key):
    """Releases a lease on a machine.

  Args:
    lease_key: ndb.Key for a models.LeaseRequest entity.
  """
    lease = lease_key.get()
    if not lease:
        logging.warning('LeaseRequest not found: %s', lease_key)
        return
    if not lease.released:
        logging.warning('LeaseRequest not released:\n%s', lease)
        return

    lease.released = False
    if not lease.machine_id:
        logging.warning('LeaseRequest has no associated machine:\n%s', lease)
        lease.put()
        return

    machine = ndb.Key(models.CatalogMachineEntry, lease.machine_id).get()
    if not machine:
        logging.error('LeaseRequest has non-existent machine leased:\n%s',
                      lease)
        lease.put()
        return

    # Just expire the lease now and let MachineReclamationProcessor handle it.
    logging.info('Expiring LeaseRequest:\n%s', lease)
    now = utils.utcnow()
    lease.response.lease_expiration_ts = utils.datetime_to_timestamp(
        now) / 1000 / 1000
    machine.lease_expiration_ts = now
    ndb.put_multi([lease, machine])
Beispiel #23
0
    def test_default_works(self):
        resp = self.create_token({
            'audience': ['*'],
            'services': ['*'],
        }, 'user:[email protected]')
        self.assertEqual(resp.status_code, 201)
        self.assertEqual(3600, resp.json_body['validity_duration'])
        self.assertEqual('1', resp.json_body['subtoken_id'])

        t = decode_token(resp.json_body['delegation_token'])
        self.assertEqual('user:[email protected]', t.delegated_identity)
        self.assertTrue(t.creation_time >= time.time() - 30)
        self.assertEqual(3600, t.validity_duration)
        self.assertEqual(['*'], t.audience)
        self.assertEqual(['*'], t.services)
        self.assertEqual('user:[email protected]', t.requestor_identity)
        self.assertTrue(t.subtoken_id is not None)

        # Entity is created.
        key = ndb.Key(delegation.AuthDelegationSubtoken, t.subtoken_id)
        ent = key.get()
        self.assertTrue(ent)
        self.assertTrue(ent.subtoken)
        self.assertEqual('127.1.2.3', ent.caller_ip)
        self.assertEqual('v1a', ent.auth_service_version)
        self.assertEqual('user:[email protected]', ent.delegated_identity)
        self.assertEqual(['*'], ent.services)
        self.assertEqual(t.creation_time * 1e6,
                         utils.datetime_to_timestamp(ent.creation_time))
        self.assertEqual('user:[email protected]', ent.requestor_identity)
Beispiel #24
0
  def setUp(self):
    super(V1ApiTest, self).setUp()
    gae_ts_mon.reset_for_unittest(disable=True)
    auth.disable_process_cache()
    user.clear_request_cache()

    self.patch(
        'components.utils.utcnow', return_value=datetime.datetime(2017, 1, 1)
    )
    self.future_date = utils.utcnow() + datetime.timedelta(days=1)
    # future_ts is str because INT64 values are formatted as strings.
    self.future_ts = str(utils.datetime_to_timestamp(self.future_date))

    config.put_bucket(
        'chromium',
        'a' * 40,
        test_util.parse_bucket_cfg(
            '''
            name: "luci.chromium.try"
            acls {
              role: SCHEDULER
              identity: "anonymous:anonymous"
            }
            '''
        ),
    )

    self.build_infra = test_util.build_bundle(id=1).infra
    self.build_infra.put()
Beispiel #25
0
 def test_expired_build_to_message(self):
     yesterday = utils.utcnow() - datetime.timedelta(days=1)
     yesterday_timestamp = utils.datetime_to_timestamp(yesterday)
     bundle = test_util.build_bundle()
     bundle.build.lease_key = 1
     bundle.build.lease_expiration_date = yesterday
     msg = api_common.build_to_message(bundle)
     self.assertEqual(msg.lease_expiration_ts, yesterday_timestamp)
Beispiel #26
0
def lease_machine(machine_key, lease):
    """Attempts to lease the given machine.

  Args:
    machine_key: ndb.Key for a model.CatalogMachineEntry instance.
    lease: model.LeaseRequest instance.

  Returns:
    True if the machine was leased, otherwise False.
  """
    machine = machine_key.get()
    lease = lease.key.get()
    logging.info('Attempting to lease matching CatalogMachineEntry:\n%s',
                 machine)

    if not can_fulfill(machine, lease.request):
        logging.warning('CatalogMachineEntry no longer matches:\n%s', machine)
        return False
    if machine.state != models.CatalogMachineEntryStates.AVAILABLE:
        logging.warning('CatalogMachineEntry no longer available:\n%s',
                        machine)
        return False
    if lease.response.state != rpc_messages.LeaseRequestState.UNTRIAGED:
        logging.warning('LeaseRequest no longer untriaged:\n%s', lease)
        return False
    if not machine.pubsub_subscription:
        logging.warning('CatalogMachineEntry not subscribed to Pub/Sub yet')
        return False

    logging.info('Leasing CatalogMachineEntry:\n%s', machine)
    lease.leased_ts = utils.utcnow()
    lease_expiration_ts = lease.leased_ts + datetime.timedelta(
        seconds=lease.request.duration, )
    lease.machine_id = machine.key.id()
    lease.response.hostname = machine.dimensions.hostname
    # datetime_to_timestamp returns microseconds, which are too fine grain.
    lease.response.lease_expiration_ts = utils.datetime_to_timestamp(
        lease_expiration_ts) / 1000 / 1000
    lease.response.state = rpc_messages.LeaseRequestState.FULFILLED
    machine.lease_id = lease.key.id()
    machine.lease_expiration_ts = lease_expiration_ts
    machine.state = models.CatalogMachineEntryStates.LEASED
    ndb.put_multi([lease, machine])
    params = {
        'policies': protojson.encode_message(machine.policies),
        'request_json': protojson.encode_message(lease.request),
        'response_json': protojson.encode_message(lease.response),
        'machine_project': machine.pubsub_topic_project,
        'machine_topic': machine.pubsub_topic,
    }
    if not utils.enqueue_task(
            '/internal/queues/fulfill-lease-request',
            'fulfill-lease-request',
            params=params,
            transactional=True,
    ):
        raise TaskEnqueuingError('fulfill-lease-request')
    return True
Beispiel #27
0
    def test_get_access_token_async(self):
        orig_get_access_token_async = service_account.get_access_token_async

        expire_time = '2014-10-02T15:01:23.045123456Z'

        @ndb.tasklet
        def urlfetch_mock(**kwargs):
            class Response(dict):
                def __init__(self, *args, **kwargs):
                    super(Response, self).__init__(*args, **kwargs)
                    self.status_code = 200
                    self.content = json.dumps(self)

            mock_dict = {
                "https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/":
                Response({
                    "accessToken": 'foobartoken',
                    "expireTime": expire_time,
                })
            }
            for url_prefix, response in mock_dict.iteritems():
                if kwargs['url'].find(url_prefix) == 0:
                    raise ndb.Return(response)
            raise Exception('url not found in mock: %s' % kwargs['url'])

        self.mock(service_account, '_urlfetch', urlfetch_mock)

        @ndb.tasklet
        def get_access_token_async_mock(scopes,
                                        service_account_key=None,
                                        act_as=None,
                                        min_lifetime_sec=5 * 60):
            if service_account_key:
                raise ndb.Return("FAKETOKENFAKETOKEN")
            result = yield orig_get_access_token_async(scopes,
                                                       service_account_key,
                                                       act_as,
                                                       min_lifetime_sec)
            raise ndb.Return(result)

        # Wrap get_access_token to mock out local signing
        self.mock(service_account, 'get_access_token_async',
                  get_access_token_async_mock)

        # Quick self check on mock of local-signer based flow
        self.assertEqual(
            "FAKETOKENFAKETOKEN",
            service_account.get_access_token_async(
                ["a", "b"], service_account_key=FAKE_SECRET_KEY).get_result())

        res = service_account.get_access_token_async(
            ["c"], service_account_key=None,
            act_as="*****@*****.**").get_result()
        self.assertEqual(
            ('foobartoken',
             int(
                 utils.datetime_to_timestamp(
                     utils.parse_rfc3339_datetime(expire_time)) / 1e6)), res)
Beispiel #28
0
    def get_config_sets(self, request):
        """Returns config sets."""
        if request.config_set and not can_read_config_set(request.config_set):
            raise endpoints.ForbiddenException()
        if request.include_files and not request.config_set:
            raise endpoints.BadRequestException(
                'Must specify config_set to use include_files')

        config_sets = storage.get_config_sets_async(
            config_set=request.config_set).get_result()

        # The files property must always be a list of File objects (not None).
        files = []
        if request.include_files:
            # There must be a single config set because request.config_set is
            # specified.
            cs = config_sets[0]
            if cs.latest_revision:
                file_keys = storage.get_file_keys(request.config_set,
                                                  cs.latest_revision)
                files = [File(path=key.id()) for key in file_keys]

        if request.include_last_import_attempt:
            attempts = ndb.get_multi([
                storage.last_import_attempt_key(cs.key.id())
                for cs in config_sets
            ])
        else:
            attempts = [None] * len(config_sets)

        res = self.GetConfigSetsResponseMessage()
        can_read = can_read_config_sets([cs.key.id() for cs in config_sets])
        for cs, attempt in zip(config_sets, attempts):
            if not can_read[cs.key.id()]:
                continue

            if common.REF_CONFIG_SET_RGX.match(cs.key.id()):
                # Exclude ref configs from the listing for crbug.com/935667
                # TODO(crbug.com/924803): remove ref configs altogether.
                continue

            cs_msg = ConfigSet(
                config_set=cs.key.id(),
                location=cs.location,
                files=files,
                last_import_attempt=attempt_to_msg(attempt),
            )
            if cs.latest_revision:
                cs_msg.revision = Revision(
                    id=cs.latest_revision,
                    url=cs.latest_revision_url,
                    committer_email=cs.latest_revision_committer_email,
                )
                if cs.latest_revision_time:
                    cs_msg.revision.timestamp = utils.datetime_to_timestamp(
                        cs.latest_revision_time)
            res.config_sets.append(cs_msg)
        return res
Beispiel #29
0
    def test_segment_attempt_2(self, enqueue_tasks):
        ndb.put_multi([test_util.build(id=i) for i in xrange(50, 60)])

        # process 5 builds
        self.proc['func'] = lambda builds, _: list(itertools.islice(builds, 5))

        self.post(
            {
                'job_id': 'jobid',
                'iteration': 0,
                'seg_index': 0,
                'seg_start': 50,
                'seg_end': 59,
                'started_ts': utils.datetime_to_timestamp(self.now),
                'proc': {
                    'name': 'foo',
                    'payload': 'bar'
                },
            },
            headers={
                'X-AppEngine-TaskExecutionCount': '1',
            },
        )

        enqueue_tasks.assert_called_with(
            'bulkproc',
            [(
                'jobid-0-1',
                bulkproc.PATH_PREFIX + 'segment/seg:0-percent:50',
                utils.encode_to_json(
                    {
                        'job_id': 'jobid',
                        'iteration': 1,
                        'seg_index': 0,
                        'seg_start': 50,
                        'seg_end': 59,
                        'start_from': 55,
                        'started_ts': utils.datetime_to_timestamp(self.now),
                        'proc': {
                            'name': 'foo',
                            'payload': 'bar'
                        },
                    }),
            )],
        )
Beispiel #30
0
    def test_segment_partial(self, enqueue_tasks):
        ndb.put_multi([test_util.build(id=i) for i in xrange(50, 60)])

        def process(builds, payload):
            # process 5 builds
            page = list(itertools.islice(builds, 5))
            self.assertEqual([b.key.id() for b in page], range(50, 55))
            self.assertEqual(payload, 'bar')

        self.proc['func'] = process

        self.post({
            'job_id': 'jobid',
            'iteration': 0,
            'seg_index': 0,
            'seg_start': 50,
            'seg_end': 59,
            'started_ts': utils.datetime_to_timestamp(self.now),
            'proc': {
                'name': 'foo',
                'payload': 'bar'
            },
        })

        expected_next_payload = {
            'job_id': 'jobid',
            'iteration': 1,
            'seg_index': 0,
            'seg_start': 50,
            'seg_end': 59,
            'start_from': 55,
            'started_ts': utils.datetime_to_timestamp(self.now),
            'proc': {
                'name': 'foo',
                'payload': 'bar'
            },
        }
        enqueue_tasks.assert_called_with(
            'bulkproc',
            [(
                'jobid-0-1',
                bulkproc.PATH_PREFIX + 'segment/seg:0-percent:50',
                utils.encode_to_json(expected_next_payload),
            )],
        )
 def get(self):
     services = sorted(replication.AuthReplicaState.query(
         ancestor=replication.replicas_root_key()),
                       key=lambda x: x.key.id())
     last_auth_state = model.get_replication_state()
     self.send_response({
         'services':
         [x.to_serializable_dict(with_id_as='app_id') for x in services],
         'auth_code_version':
         version.__version__,
         'auth_db_rev': {
             'primary_id': last_auth_state.primary_id,
             'rev': last_auth_state.auth_db_rev,
             'ts': utils.datetime_to_timestamp(last_auth_state.modified_ts),
         },
         'now':
         utils.datetime_to_timestamp(utils.utcnow()),
     })
Beispiel #32
0
    def start(self, payload):
        """Splits build space into segments and enqueues a task for each segment.

    Assumes that build creation rate was about the same forever.

    Payload properties:
      tag: tag to reindex. Required.
      shards: number of workers to create. Must be positive. Required.
    """
        tag = payload['tag']
        shards = payload['shards']
        assert isinstance(tag, basestring), tag
        assert tag
        assert isinstance(shards, int)
        assert shards > 0

        first, = model.Build.query().fetch(1, keys_only=True) or [None]
        if not first:  # pragma: no cover
            logging.warning('no builds to backfill')
            return
        # Do not require -key index by using created_time index.
        last, = (model.Build.query().order(model.Build.create_time).fetch(
            1, keys_only=True))
        space_start, space_end = first.id(), last.id() + 1
        space_size = space_end - space_start
        seg_size = max(1, int(math.ceil(space_size / shards)))

        logging.info(
            'build space [%d..%d), size %d, %d shards, segment size %d',
            space_start, space_end, space_size, shards, seg_size)

        next_seg_start = space_start
        tasks = []
        while next_seg_start < space_end:
            seg_start = next_seg_start
            seg_end = min(space_end, seg_start + seg_size)
            next_seg_start = seg_end
            tasks.append((None, 'tag:{tag}-seg:{seg_index}-percent:0', {
                'action':
                'segment',
                'tag':
                tag,
                'job_id':
                self.request.headers['X-AppEngine-TaskName'],
                'iteration':
                0,
                'seg_index':
                len(tasks),
                'seg_start':
                seg_start,
                'seg_end':
                seg_end,
                'started_ts':
                utils.datetime_to_timestamp(utils.utcnow()),
            }))
        self._recurse(tasks)
        logging.info('enqueued %d segment tasks for tag %s', len(tasks), tag)
Beispiel #33
0
def attempt_to_msg(entity):
  if entity is None:
    return None
  return ConfigSet.ImportAttempt(
    timestamp=utils.datetime_to_timestamp(entity.time),
    revision=Revision(
        id=entity.revision.id,
        url=entity.revision.url,
        timestamp=utils.datetime_to_timestamp(entity.revision.time),
        committer_email=entity.revision.committer_email,
    ) if entity.revision else None,
    success=entity.success,
    message=entity.message,
    validation_messages=[
      cfg_endpoint.ValidationMessage(severity=m.severity, text=m.text)
      for m in entity.validation_messages
    ],
  )
Beispiel #34
0
 def get(self):
   services = sorted(
       replication.AuthReplicaState.query(
           ancestor=replication.replicas_root_key()),
       key=lambda x: x.key.id())
   last_auth_state = model.get_replication_state()
   self.send_response({
     'services': [
       x.to_serializable_dict(with_id_as='app_id') for x in services
     ],
     'auth_code_version': version.__version__,
     'auth_db_rev': {
       'primary_id': last_auth_state.primary_id,
       'rev': last_auth_state.auth_db_rev,
       'ts': utils.datetime_to_timestamp(last_auth_state.modified_ts),
     },
     'now': utils.datetime_to_timestamp(utils.utcnow()),
   })
Beispiel #35
0
def attempt_to_msg(entity):
    if entity is None:
        return None
    return ConfigSet.ImportAttempt(
        timestamp=utils.datetime_to_timestamp(entity.time),
        revision=Revision(
            id=entity.revision.id,
            url=entity.revision.url,
            timestamp=utils.datetime_to_timestamp(entity.revision.time),
            committer_email=entity.revision.committer_email,
        ) if entity.revision else None,
        success=entity.success,
        message=entity.message,
        validation_messages=[
            cfg_endpoint.ValidationMessage(severity=m.severity, text=m.text)
            for m in entity.validation_messages
        ],
    )
Beispiel #36
0
 def simplify(v):
   if isinstance(v, list):
     return [simplify(i) for i in v]
   elif isinstance(v, datastore_utils.BytesSerializable):
     return v.to_bytes()
   elif isinstance(v, datastore_utils.JsonSerializable):
     return v.to_jsonish()
   elif isinstance(v, datetime.datetime):
     return utils.datetime_to_timestamp(v)
   return v
Beispiel #37
0
    def decorator(fn):
        fn = adapt_exceptions(fn)
        fn = auth.public(fn)
        fn = endpoints_decorator(fn)

        ts_mon_time = lambda: utils.datetime_to_timestamp(utils.utcnow()) / 1e6
        fn = gae_ts_mon.instrument_endpoint(time_fn=ts_mon_time)(fn)
        # ndb.toplevel must be the last one.
        # See also the comment in endpoint decorator in api.py
        return ndb.toplevel(fn)
Beispiel #38
0
 def simplify(v):
     if isinstance(v, list):
         return [simplify(i) for i in v]
     elif isinstance(v, datastore_utils.BytesSerializable):
         return v.to_bytes()
     elif isinstance(v, datastore_utils.JsonSerializable):
         return v.to_jsonish()
     elif isinstance(v, datetime.datetime):
         return utils.datetime_to_timestamp(v)
     return v
Beispiel #39
0
  def post(self):
    # Check that current service is a Replica.
    if not model.is_replica():
      self.send_error(replication_pb2.ReplicationPushResponse.NOT_A_REPLICA)
      return

    # Check that request came from expected Primary service.
    expected_ident = model.Identity(
        model.IDENTITY_SERVICE, model.get_replication_state().primary_id)
    if api.get_current_identity() != expected_ident:
      self.send_error(replication_pb2.ReplicationPushResponse.FORBIDDEN)
      return

    # Check the signature headers are present.
    key_name = self.request.headers.get('X-AuthDB-SigKey-v1')
    sign = self.request.headers.get('X-AuthDB-SigVal-v1')
    if not key_name or not sign:
      self.send_error(replication_pb2.ReplicationPushResponse.MISSING_SIGNATURE)
      return

    # Verify the signature.
    body = self.request.body
    sign = base64.b64decode(sign)
    if not replication.is_signed_by_primary(body, key_name, sign):
      self.send_error(replication_pb2.ReplicationPushResponse.BAD_SIGNATURE)
      return

    # Deserialize the request, check it is valid.
    request = replication_pb2.ReplicationPushRequest.FromString(body)
    if not request.HasField('revision') or not request.HasField('auth_db'):
      self.send_error(replication_pb2.ReplicationPushResponse.BAD_REQUEST)
      return

    # Handle it.
    logging.info('Received AuthDB push: rev %d', request.revision.auth_db_rev)
    if request.HasField('auth_code_version'):
      logging.info(
          'Primary\'s auth component version: %s', request.auth_code_version)
    applied, state = replication.push_auth_db(request.revision, request.auth_db)
    logging.info(
        'AuthDB push %s: rev is %d',
        'applied' if applied else 'skipped', state.auth_db_rev)

    # Send the response.
    response = replication_pb2.ReplicationPushResponse()
    if applied:
      response.status = replication_pb2.ReplicationPushResponse.APPLIED
    else:
      response.status = replication_pb2.ReplicationPushResponse.SKIPPED
    response.current_revision.primary_id = state.primary_id
    response.current_revision.auth_db_rev = state.auth_db_rev
    response.current_revision.modified_ts = utils.datetime_to_timestamp(
        state.modified_ts)
    response.auth_code_version = version.__version__
    self.send_response(response)
Beispiel #40
0
def lease_machine(machine_key, lease):
  """Attempts to lease the given machine.

  Args:
    machine_key: ndb.Key for a model.CatalogMachineEntry instance.
    lease: model.LeaseRequest instance.

  Returns:
    True if the machine was leased, otherwise False.
  """
  machine = machine_key.get()
  lease = lease.key.get()
  logging.info('Attempting to lease matching CatalogMachineEntry:\n%s', machine)

  if not can_fulfill(machine, lease.request):
    logging.warning('CatalogMachineEntry no longer matches:\n%s', machine)
    return False
  if machine.state != models.CatalogMachineEntryStates.AVAILABLE:
    logging.warning('CatalogMachineEntry no longer available:\n%s', machine)
    return False
  if lease.response.state != rpc_messages.LeaseRequestState.UNTRIAGED:
    logging.warning('LeaseRequest no longer untriaged:\n%s', lease)
    return False

  logging.info('Leasing CatalogMachineEntry:\n%s', machine)
  lease.leased_ts = utils.utcnow()
  lease_expiration_ts = lease.leased_ts + datetime.timedelta(
      seconds=lease.request.duration,
  )
  lease.machine_id = machine.key.id()
  lease.response.hostname = machine.dimensions.hostname
  # datetime_to_timestamp returns microseconds, which are too fine grain.
  lease.response.lease_expiration_ts = utils.datetime_to_timestamp(
      lease_expiration_ts) / 1000 / 1000
  lease.response.state = rpc_messages.LeaseRequestState.FULFILLED
  machine.lease_id = lease.key.id()
  machine.lease_expiration_ts = lease_expiration_ts
  machine.state = models.CatalogMachineEntryStates.LEASED
  ndb.put_multi([lease, machine])
  params = {
      'policies': protojson.encode_message(machine.policies),
      'request_json': protojson.encode_message(lease.request),
      'response_json': protojson.encode_message(lease.response),
      'machine_project': machine.pubsub_topic_project,
      'machine_topic': machine.pubsub_topic,
  }
  if not utils.enqueue_task(
      '/internal/queues/fulfill-lease-request',
      'fulfill-lease-request',
      params=params,
      transactional=True,
  ):
    raise TaskEnqueuingError('fulfill-lease-request')
  return True
Beispiel #41
0
 def to_message((build_id, build, ex)):
     msg = self.HeartbeatBatchResponseMessage.OneHeartbeatResult(
         build_id=build_id)
     if build:
         msg.lease_expiration_ts = utils.datetime_to_timestamp(
             build.lease_expiration_date)
     elif isinstance(ex, errors.Error):
         msg.error = exception_to_error_message(ex)
     else:
         raise ex
     return msg
Beispiel #42
0
def get_own_public_certificates():
  """Returns jsonish object with public certificates of current service."""
  certs = app_identity.get_public_certificates()
  return {
    'certificates': [
      {
        'key_name': cert.key_name,
        'x509_certificate_pem': cert.x509_certificate_pem,
      }
      for cert in certs
    ],
    'timestamp': utils.datetime_to_timestamp(utils.utcnow()),
  }
Beispiel #43
0
def auth_db_snapshot_to_proto(snapshot, auth_db_proto=None):
  """Writes AuthDBSnapshot into replication_pb2.AuthDB message.

  Args:
    snapshot: instance of AuthDBSnapshot with entities to convert to protobuf.
    auth_db_proto: optional instance of replication_pb2.AuthDB to update.

  Returns:
    Instance of replication_pb2.AuthDB (same as |auth_db_proto| if passed).
  """
  auth_db_proto = auth_db_proto or replication_pb2.AuthDB()

  auth_db_proto.oauth_client_id = snapshot.global_config.oauth_client_id or ''
  auth_db_proto.oauth_client_secret = (
      snapshot.global_config.oauth_client_secret or '')
  if snapshot.global_config.oauth_additional_client_ids:
    auth_db_proto.oauth_additional_client_ids.extend(
        snapshot.global_config.oauth_additional_client_ids)

  for ent in snapshot.groups:
    msg = auth_db_proto.groups.add()
    msg.name = ent.key.id()
    msg.members.extend(ident.to_bytes() for ident in ent.members)
    msg.globs.extend(glob.to_bytes() for glob in ent.globs)
    msg.nested.extend(ent.nested)
    msg.description = ent.description
    msg.created_ts = utils.datetime_to_timestamp(ent.created_ts)
    msg.created_by = ent.created_by.to_bytes()
    msg.modified_ts = utils.datetime_to_timestamp(ent.modified_ts)
    msg.modified_by = ent.modified_by.to_bytes()
    msg.owners = ent.owners

  for ent in snapshot.secrets:
    msg = auth_db_proto.secrets.add()
    msg.name = ent.key.id()
    msg.values.extend(ent.values)
    msg.modified_ts = utils.datetime_to_timestamp(ent.modified_ts)
    msg.modified_by = ent.modified_by.to_bytes()

  for ent in snapshot.ip_whitelists:
    msg = auth_db_proto.ip_whitelists.add()
    msg.name = ent.key.id()
    msg.subnets.extend(ent.subnets)
    msg.description = ent.description
    msg.created_ts = utils.datetime_to_timestamp(ent.created_ts)
    msg.created_by = ent.created_by.to_bytes()
    msg.modified_ts = utils.datetime_to_timestamp(ent.modified_ts)
    msg.modified_by = ent.modified_by.to_bytes()

  for ent in snapshot.ip_whitelist_assignments.assignments:
    msg = auth_db_proto.ip_whitelist_assignments.add()
    msg.identity = ent.identity.to_bytes()
    msg.ip_whitelist = ent.ip_whitelist
    msg.comment = ent.comment or ''
    msg.created_ts = utils.datetime_to_timestamp(ent.created_ts)
    msg.created_by = ent.created_by.to_bytes()

  return auth_db_proto
Beispiel #44
0
  def create_upload_session(self, hash_algo, hash_digest, caller):
    """Starts a new upload operation.

    Starts an upload regardless of whether the object is already stored or not.
    Generates upload_url for GS resumable upload protocol.

    Args:
      hash_algo: valid supported hash algorithm to use for verification.
      hash_digest: hex hash digest of the content to be uploaded.
      caller: auth.Identity of whoever makes the request.

    Returns:
      tuple (UploadSession object, signed upload session ID).
    """
    assert is_valid_hash_digest(hash_algo, hash_digest)

    # TODO(vadimsh): Check that number of pending uploads opened by |caller|
    # is low enough. To prevent malicious client from creating tons of uploads.

    # New unique ID (long int).
    upload_id = UploadSession.allocate_ids(size=1)[0]

    # Opening a GCS file and not closing it keeps upload session active.
    ts_sec = utils.datetime_to_timestamp(utils.utcnow()) / 1000000.
    temp_gs_location = self._temp_upload_session_gs_path(upload_id, ts_sec)
    temp_file = cloudstorage.open(
        filename=temp_gs_location,
        mode='w',
        retry_params=self._retry_params)

    # See cloudstorage/storage_api.py, StreamingBuffer for _path_with_token.
    upload_url = '%s%s' % (temp_file._api.api_url, temp_file._path_with_token)

    # New session.
    upload_session = UploadSession(
        id=upload_id,
        hash_algo=hash_algo,
        hash_digest=hash_digest,
        temp_gs_location=temp_gs_location,
        final_gs_location=self._verified_gs_path(hash_algo, hash_digest),
        upload_url=upload_url,
        status=UploadSession.STATUS_UPLOADING,
        created_by=caller)
    upload_session.put()

    # Generate signed ID. It will be usable only by |caller|.
    upload_session_id = UploadIdSignature.generate(
        message=[caller.to_bytes()],
        embedded={'id': '%s' % upload_session.key.id()})
    return upload_session, upload_session_id
Beispiel #45
0
  def setUp(self):
    super(BuildBucketApiTest, self).setUp()
    self.service = mock.Mock()
    self.mock(api.BuildBucketApi, 'service_factory', lambda _: self.service)

    self.future_date = utils.utcnow() + datetime.timedelta(minutes=1)
    # future_ts is str because INT64 values are formatted as strings.
    self.future_ts = str(utils.datetime_to_timestamp(self.future_date))
    self.test_build = model.Build(
        id=1,
        bucket='chromium',
        parameters={
            'buildername': 'linux_rel',
        },
    )
Beispiel #46
0
def package_acls_to_proto(per_role_acls):
  """Dict {role -> list of PackageACL entities} -> PackageACL message."""
  acls = []
  for role, package_acl_entities in per_role_acls.iteritems():
    for e in package_acl_entities:
      principals = []
      principals.extend(u.to_bytes() for u in e.users)
      principals.extend('group:%s' % g for g in e.groups)
      acls.append(PackageACL.ElementaryACL(
          package_path=e.package_path,
          role=role,
          principals=principals,
          modified_by=e.modified_by.to_bytes(),
          modified_ts=utils.datetime_to_timestamp(e.modified_ts),
      ))
  return PackageACL(acls=acls)
  def tag_existing(cls, collection):
    """Tag existing digests with new timestamp.

    Arguments:
      collection: a DigestCollection containing existing digests

    Returns:
      the enqueued task if there were existing entries; None otherwise
    """
    if collection.items:
      url = '/internal/taskqueue/tag/%s/%s' % (
          collection.namespace.namespace,
          utils.datetime_to_timestamp(utils.utcnow()))
      payload = ''.join(
          binascii.unhexlify(digest.digest) for digest in collection.items)
      return utils.enqueue_task(url, 'tag', payload=payload)
Beispiel #48
0
    def to_message((build_id, build, ex)):
      msg = self.HeartbeatBatchResponseMessage.OneHeartbeatResult(
        build_id=build_id)
      if build:
        msg.lease_expiration_ts = utils.datetime_to_timestamp(
          build.lease_expiration_date)
      else:
        if not isinstance(ex, errors.Error):
          logging.error(ex.message, exc_info=ex)
          raise endpoints.InternalServerErrorException(ex.message)

        assert type(ex) in ERROR_REASON_MAP
        msg.error = ErrorMessage(
          reason=ERROR_REASON_MAP[type(ex)],
          message=ex.message,
        )

      return msg
Beispiel #49
0
def pack_auth_db():
  """Packs an entire AuthDB into a blob (serialized protobuf message).

  Returns:
    Tuple (AuthReplicationState, blob).
  """
  # Grab the snapshot.
  state, snapshot = replication.new_auth_db_snapshot()

  # Serialize to binary proto message.
  req = replication_pb2.ReplicationPushRequest()
  req.revision.primary_id = app_identity.get_application_id()
  req.revision.auth_db_rev = state.auth_db_rev
  req.revision.modified_ts = utils.datetime_to_timestamp(state.modified_ts)
  replication.auth_db_snapshot_to_proto(snapshot, req.auth_db)
  req.auth_code_version = version.__version__
  auth_db_blob = req.SerializeToString()

  logging.debug('AuthDB blob size is %d bytes', len(auth_db_blob))
  return state, auth_db_blob
Beispiel #50
0
 def get(self, rev):
   skip_body = self.request.get('skip_body') == '1'
   if rev == 'latest':
     snapshot = replication.get_latest_auth_db_snapshot(skip_body)
   else:
     try:
       rev = int(rev)
     except ValueError:
       self.abort_with_error(400, text='Bad revision number, not an integer')
     snapshot = replication.get_auth_db_snapshot(rev, skip_body)
   if not snapshot:
     self.abort_with_error(404, text='No such snapshot: %s' % rev)
   resp = {
     'auth_db_rev': snapshot.key.integer_id(),
     'created_ts': utils.datetime_to_timestamp(snapshot.created_ts),
     'sha256': snapshot.auth_db_sha256,
   }
   if not skip_body:
     assert snapshot.auth_db_deflated
     resp['deflated_body'] = base64.b64encode(snapshot.auth_db_deflated)
   self.send_response({'snapshot': resp})
Beispiel #51
0
def publish_authdb_change(state):
  """Publishes AuthDB change notification to the topic.

  Args:
    state: AuthReplicationState with version info.
  """
  if utils.is_local_dev_server():
    return

  msg = replication_pb2.ReplicationPushRequest()
  msg.revision.primary_id = app_identity.get_application_id()
  msg.revision.auth_db_rev = state.auth_db_rev
  msg.revision.modified_ts = utils.datetime_to_timestamp(state.modified_ts)

  blob = msg.SerializeToString()
  key_name, sig = signature.sign_blob(blob)

  pubsub.publish(topic_name(), blob, {
    'X-AuthDB-SigKey-v1': key_name,
    'X-AuthDB-SigVal-v1': base64.b64encode(sig),
  })
Beispiel #52
0
def get_own_public_certificates():
  """Returns jsonish object with public certificates of current service."""
  attempt = 0
  while True:
    attempt += 1
    try:
      certs = app_identity.get_public_certificates(deadline=1.5)
      break
    except apiproxy_errors.DeadlineExceededError as e:
      logging.warning('%s', e)
      if attempt == 3:
        raise
  return {
    'certificates': [
      {
        'key_name': cert.key_name,
        'x509_certificate_pem': cert.x509_certificate_pem,
      }
      for cert in certs
    ],
    'timestamp': utils.datetime_to_timestamp(utils.utcnow()),
  }
  def test_events(self):
    # Run one task, push an event manually.
    self.mock(random, 'getrandbits', lambda _: 0x88)
    now = datetime.datetime(2010, 1, 2, 3, 4, 5)
    self.mock_now(now)
    str_now = unicode(now.strftime(self.DATETIME_NO_MICRO))

    self.set_as_bot()
    self.client_create_task_raw()
    token, params = self.get_bot_token()
    res = self.bot_poll()
    now_60 = self.mock_now(now, 60)
    str_now_60 = unicode(now_60.strftime(self.DATETIME_NO_MICRO))
    self.bot_complete_task(token, task_id=res['manifest']['task_id'])

    params['event'] = 'bot_rebooting'
    params['message'] = 'for the best'
    response = self.post_with_token('/swarming/api/v1/bot/event', params, token)
    self.assertEqual({}, response)

    start = utils.datetime_to_timestamp(now) / 1000000.
    end = utils.datetime_to_timestamp(now_60) / 1000000.
    self.set_as_privileged_user()
    body = message_to_dict(
        swarming_rpcs.BotEventsRequest(start=start, end=end+1))
    body['bot_id'] = 'bot1'
    response = self.call_api('events', body=body)
    dimensions = [
      {u'key': u'id', u'value': [u'bot1']},
      {u'key': u'os', u'value': [u'Amiga']},
    ]
    state = unicode(json.dumps(
        {'running_time': 1234., 'sleep_streak': 0,
          'started_ts': 1410990411.111},
        sort_keys=True,
        separators=(',',':')))
    expected = {
      u'items': [
        {
          u'dimensions': dimensions,
          u'event_type': u'bot_rebooting',
          u'external_ip': unicode(self.source_ip),
          u'message': u'for the best',
          u'quarantined': False,
          u'state': state,
          u'ts': str_now_60,
          u'version': unicode(self.bot_version),
        },
        {
          u'dimensions': dimensions,
          u'event_type': u'task_completed',
          u'external_ip': unicode(self.source_ip),
          u'quarantined': False,
          u'state': state,
          u'task_id': u'5cee488008811',
          u'ts': str_now_60,
          u'version': unicode(self.bot_version),
        },
        {
          u'dimensions': dimensions,
          u'event_type': u'request_task',
          u'external_ip': unicode(self.source_ip),
          u'quarantined': False,
          u'state': state,
          u'task_id': u'5cee488008811',
          u'ts': str_now,
          u'version': unicode(self.bot_version),
        },
        {
          u'dimensions': dimensions,
          u'event_type': u'bot_connected',
          u'external_ip': unicode(self.source_ip),
          u'quarantined': False,
          u'state': state,
          u'ts': str_now,
          u'version': u'123',
        },
        {
          u'dimensions': dimensions,
          u'event_type': u'bot_connected',
          u'external_ip': unicode(self.source_ip),
          u'quarantined': False,
          u'state': state,
          u'ts': str_now,
          u'version': u'123',
        },
      ],
        u'now': str_now_60,
    }
    self.assertEqual(expected, response.json)

    # Now test with a subset.
    body = message_to_dict(swarming_rpcs.BotEventsRequest(start=end, end=end+1))
    body['bot_id'] = 'bot1'
    response = self.call_api('events', body=body)
    expected['items'] = expected['items'][:-3]
    self.assertEqual(expected, response.json)
Beispiel #54
0
 def tag_entries(entries, namespace):
   """Enqueues a task to update the timestamp for given entries."""
   url = '/internal/taskqueue/tag/%s/%s' % (
       namespace, utils.datetime_to_timestamp(utils.utcnow()))
   payload = ''.join(binascii.unhexlify(e.digest) for e in entries)
   return utils.enqueue_task(url, 'tag', payload=payload)
  def test_tasks_ok(self):
    """Asserts that tasks produces bot information."""
    self.mock(random, 'getrandbits', lambda _: 0x88)
    now = datetime.datetime(2010, 1, 2, 3, 4, 5, 6)
    self.mock_now(now)

    self.set_as_bot()
    self.client_create_task_raw()
    token, _ = self.get_bot_token()
    res = self.bot_poll()
    self.bot_complete_task(token, task_id=res['manifest']['task_id'])

    now_1 = self.mock_now(now, 1)
    now_1_str = unicode(now_1.strftime(self.DATETIME_FORMAT))
    self.mock(random, 'getrandbits', lambda _: 0x55)
    self.client_create_task_raw(name='philbert')
    token, _ = self.get_bot_token()
    res = self.bot_poll()
    self.bot_complete_task(
        token, exit_code=1, task_id=res['manifest']['task_id'])

    start = (
        utils.datetime_to_timestamp(now + datetime.timedelta(seconds=0.5)) /
        1000000.)
    end = (
        utils.datetime_to_timestamp(now_1 + datetime.timedelta(seconds=0.5)) /
        1000000.)
    request = swarming_rpcs.BotTasksRequest(end=end, start=start)

    self.set_as_privileged_user()
    body = message_to_dict(request)
    body['bot_id'] = 'bot1'
    response = self.call_api('tasks', body=body)
    expected = {
      u'items': [
        {
          u'bot_dimensions': [
            {u'key': u'id', u'value': [u'bot1']},
            {u'key': u'os', u'value': [u'Amiga']},
          ],
          u'bot_id': u'bot1',
          u'bot_version': self.bot_version,
          u'completed_ts': now_1_str,
          u'costs_usd': [0.1],
          u'created_ts': now_1_str,
          u'duration': 0.1,
          u'exit_code': u'1',
          u'failure': True,
          u'internal_failure': False,
          u'modified_ts': now_1_str,
          u'name': u'philbert',
          u'server_versions': [u'v1a'],
          u'started_ts': now_1_str,
          u'state': u'COMPLETED',
          u'task_id': u'5cee870005511',
          u'try_number': u'1',
        },
      ],
      u'now': unicode(now_1.strftime(self.DATETIME_FORMAT)),
    }
    self.assertEqual(expected, response.json)
  def _gen_two_tasks(self):
    # first request
    now = datetime.datetime(2010, 1, 2, 3, 4, 5)
    str_now = unicode(now.strftime(self.DATETIME_NO_MICRO))
    self.mock_now(now)
    self.mock(random, 'getrandbits', lambda _: 0x66)
    _, first_id = self.client_create_task_raw(
        name='first', tags=['project:yay', 'commit:post', 'os:Win'],
        properties=dict(idempotent=True))
    self.set_as_bot()
    self.bot_run_task()

    # second request
    self.set_as_user()
    self.mock(random, 'getrandbits', lambda _: 0x88)
    now_60 = self.mock_now(now, 60)
    str_now_60 = unicode(now_60.strftime(self.DATETIME_NO_MICRO))
    self.client_create_task_raw(
        name='second', user='******',
        tags=['project:yay', 'commit:pre', 'os:Win'],
        properties=dict(idempotent=True))

    # Hack the datastore so MODIFIED_TS returns in backward order compared to
    # CREATED_TS.
    now_120 = self.mock_now(now, 120)
    str_now_120 = unicode(now_120.strftime(self.DATETIME_NO_MICRO))
    entity = task_pack.unpack_result_summary_key(first_id).get()
    entity.modified_ts = now_120
    entity.put()

    second = {
      u'bot_dimensions': [
        {u'key': u'id', u'value': [u'bot1']},
        {u'key': u'os', u'value': [u'Amiga']},
      ],
      u'bot_id': u'bot1',
      u'bot_version': self.bot_version,
      u'cost_saved_usd': 0.1,
      u'created_ts': str_now_60,
      u'completed_ts': str_now,
      u'deduped_from': u'5cee488006611',
      u'duration': 0.1,
      u'exit_code': u'0',
      u'failure': False,
      u'internal_failure': False,
      u'modified_ts': str_now_60,
      u'name': u'second',
      u'server_versions': [u'v1a'],
      u'started_ts': str_now,
      u'state': u'COMPLETED',
      u'tags': [
        u'commit:pre',
        u'os:Amiga',
        u'os:Win',
        u'priority:10',
        u'project:yay',
        u'user:jack@localhost',
      ],
      u'task_id': u'5cfcee8008810',
      u'try_number': u'0',
      u'user': u'jack@localhost',
    }
    first = {
      u'bot_dimensions': [
        {u'key': u'id', u'value': [u'bot1']},
        {u'key': u'os', u'value': [u'Amiga']},
      ],
      u'bot_id': u'bot1',
      u'bot_version': self.bot_version,
      u'costs_usd': [0.1],
      u'created_ts': str_now,
      u'completed_ts': str_now,
      u'duration': 0.1,
      u'exit_code': u'0',
      u'failure': False,
      u'internal_failure': False,
      u'modified_ts': str_now_120,
      u'name': u'first',
      u'properties_hash': u'8771754ee465a689f19c87f2d21ea0d9b8dd4f64',
      u'server_versions': [u'v1a'],
      u'started_ts': str_now,
      u'state': u'COMPLETED',
      u'tags': [
        u'commit:post',
        u'os:Amiga',
        u'os:Win',
        u'priority:10',
        u'project:yay',
        u'user:joe@localhost',
      ],
      u'task_id': u'5cee488006610',
      u'try_number': u'1',
      u'user': u'joe@localhost'
    }

    start = (
        utils.datetime_to_timestamp(now - datetime.timedelta(days=1)) /
        1000000.)
    end = (
        utils.datetime_to_timestamp(now + datetime.timedelta(days=1)) /
        1000000.)
    self.set_as_privileged_user()
    return first, second, str_now_120, start, end
Beispiel #57
0
      (property_cls, include_subclasses, rich_to_simple))
  _simple_to_rich_converters.append(
      (property_cls, include_subclasses, simple_to_rich))


### Function calls.


_register_simple_converters()


# TODO(vadimsh): Add ndb.DateProperty if needed.
register_converter(
    property_cls=ndb.DateTimeProperty,
    include_subclasses=False,
    rich_to_simple=lambda _prop, x: utils.datetime_to_timestamp(x),
    simple_to_rich=lambda _prop, x: utils.timestamp_to_datetime(x))


# Handles all property classes inherited from JsonSerializableProperty.
register_converter(
    property_cls=JsonSerializableProperty,
    include_subclasses=True,
    rich_to_simple=lambda prop, value: value.to_jsonish(),
    simple_to_rich=lambda prop, value: prop._value_type.from_jsonish(value))


# Handles all property classes inherited from BytesSerializableProperty.
register_converter(
    property_cls=BytesSerializableProperty,
    include_subclasses=True,
  def test_new_ok_deduped(self):
    """Asserts that new returns task result for deduped."""
    # Run a task to completion.
    self.mock(random, 'getrandbits', lambda _: 0x88)
    self.mock_now(datetime.datetime(2010, 1, 2, 3, 4, 5))
    self.client_create_task_raw(
        name='task', tags=['project:yay', 'commit:post', 'os:Win'],
        properties=dict(idempotent=True))
    self.set_as_bot()
    self.bot_run_task()

    self.mock(random, 'getrandbits', lambda _: 0x66)
    now = datetime.datetime(2010, 1, 2, 5, 5, 5)
    self.mock_now(now)
    str_now = unicode(now.strftime(self.DATETIME_NO_MICRO))
    self.set_as_user()

    request = swarming_rpcs.NewTaskRequest(
        expiration_secs=30,
        name='job1',
        priority=200,
        properties=swarming_rpcs.TaskProperties(
            command=['python', 'run_test.py'],
            dimensions=[
              swarming_rpcs.StringPair(key='os', value='Amiga'),
            ],
            execution_timeout_secs=3600,
            io_timeout_secs=1200,
            idempotent=True),
        tags=['foo:bar'],
        user='******')
    expected = {
      u'request': {
        u'authenticated': u'user:[email protected]',
        u'created_ts': str_now,
        u'expiration_secs': u'30',
        u'name': u'job1',
        u'priority': u'200',
        u'properties': {
          u'command': [u'python', u'run_test.py'],
          u'dimensions': [
            {u'key': u'os', u'value': u'Amiga'},
          ],
          u'execution_timeout_secs': u'3600',
          u'grace_period_secs': u'30',
          u'idempotent': True,
          u'io_timeout_secs': u'1200',
        },
        u'tags': [
          u'foo:bar',
          u'os:Amiga',
          u'priority:200',
          u'user:joe@localhost',
        ],
        u'user': u'joe@localhost',
      },
      u'task_id': u'63dabe8006610',
      u'task_result': {
        u'bot_dimensions': [
          {u'key': u'id', u'value': [u'bot1']},
          {u'key': u'os', u'value': [u'Amiga']},
        ],
        u'bot_id': u'bot1',
        u'bot_version': self.bot_version,
        u'completed_ts': u'2010-01-02T03:04:05',
        u'cost_saved_usd': 0.1,
        u'created_ts': u'2010-01-02T05:05:05',
        u'deduped_from': u'5cee488008811',
        u'duration': 0.1,
        u'exit_code': u'0',
        u'failure': False,
        u'internal_failure': False,
        u'modified_ts': u'2010-01-02T05:05:05',
        u'name': u'job1',
        u'server_versions': [u'v1a'],
        u'started_ts': u'2010-01-02T03:04:05',
        u'state': u'COMPLETED',
        u'tags': [
          u'foo:bar',
          u'os:Amiga',
          u'priority:200',
          u'user:joe@localhost',
        ],
        u'task_id': u'63dabe8006610',
        u'try_number': u'0',
        u'user': u'joe@localhost',
      },
    }
    response = self.call_api('new', body=message_to_dict(request))
    self.assertEqual(expected, response.json)

    request = swarming_rpcs.TasksRequest(state=swarming_rpcs.TaskState.DEDUPED)
    expected = {
      u'items': [
        {
          u'bot_dimensions': [
            {u'key': u'id', u'value': [u'bot1']},
            {u'key': u'os', u'value': [u'Amiga']},
          ],
          u'bot_id': u'bot1',
          u'bot_version': self.bot_version,
          u'completed_ts': u'2010-01-02T03:04:05',
          u'cost_saved_usd': 0.1,
          u'created_ts': u'2010-01-02T05:05:05',
          u'deduped_from': u'5cee488008811',
          u'duration': 0.1,
          u'exit_code': u'0',
          u'failure': False,
          u'internal_failure': False,
          u'modified_ts': u'2010-01-02T05:05:05',
          u'name': u'job1',
          u'server_versions': [u'v1a'],
          u'started_ts': u'2010-01-02T03:04:05',
          u'state': u'COMPLETED',
          u'tags': [
            u'foo:bar',
            u'os:Amiga',
            u'priority:200',
            u'user:joe@localhost',
          ],
          u'task_id': u'63dabe8006610',
          u'try_number': u'0',
          u'user': u'joe@localhost',
        },
      ],
      u'now': str_now,
    }
    self.assertEqual(
        expected,
        self.call_api('list', body=message_to_dict(request)).json)

    start = utils.datetime_to_timestamp(now) / 1000000. - 1
    end = utils.datetime_to_timestamp(now) / 1000000. + 1
    request = swarming_rpcs.TasksCountRequest(
        start=start, end=end, state=swarming_rpcs.TaskState.DEDUPED)
    self.assertEqual(
        {u'now': str_now, u'count': u'1'},
        self.call_api('count', body=message_to_dict(request)).json)
Beispiel #59
0
 def set_gauge(self, value):
   self.descriptor.validate_value(value)
   self.value = value
   self.min_ts = utils.datetime_to_timestamp(utils.utcnow())
   self.max_ts = self.min_ts