예제 #1
0
def _PersistBit9Certificates(signing_chain):
    """Creates Bit9Certificates from the given Event protobuf.

  Args:
    signing_chain: List[api.Certificate] the signing chain of the event.

  Returns:
    An ndb.Future that resolves when all certs are created.
  """
    if not signing_chain:
        return datastore_utils.GetNoOpFuture()

    to_create = []
    for cert in signing_chain:
        thumbprint = cert.thumbprint
        existing_cert = bit9.Bit9Certificate.get_by_id(thumbprint)
        if existing_cert is None:
            cert = bit9.Bit9Certificate(id=thumbprint,
                                        id_type=cert.thumbprint_algorithm,
                                        valid_from_dt=cert.valid_from,
                                        valid_to_dt=cert.valid_to)

            # Insert a row into the Certificate table. Allow the timestamp to be
            # generated within InsertBigQueryRow(). The Blockable.recorded_dt Property
            # is set to auto_now_add, but this isn't filled in until persist time.
            cert.InsertBigQueryRow(constants.BLOCK_ACTION.FIRST_SEEN)

            to_create.append(cert)

    futures = ndb.put_multi_async(to_create)
    return datastore_utils.GetMultiFuture(futures)
예제 #2
0
파일: sync_test.py 프로젝트: crudbug/upvote
  def testEventsExist(self):
    event_count = 5
    host_id = _CreateUnsyncedEvents(events_per_host=event_count)[0]

    # Patch out the various _Persist methods since they're tested below.
    methods = [
        '_PersistBit9Certificates', '_PersistBit9Binary', '_PersistBanNote',
        '_PersistBit9Host', '_PersistBit9Events'
    ]
    for method in methods:
      self.Patch(sync, method, return_value=model_utils.GetNoOpFuture())

    sync.Process(host_id)

    # Verify all usage of the DatastoreLock.
    self.assertTrue(self.mock_lock.__enter__.called)
    self.assertTrue(self.mock_lock.__exit__.called)

    # Verify everything was persisted.
    self.assertEqual(event_count, sync._PersistBit9Certificates.call_count)
    self.assertEqual(event_count, sync._PersistBit9Binary.call_count)
    self.assertEqual(event_count, sync._PersistBanNote.call_count)
    self.assertEqual(event_count, sync._PersistBit9Host.call_count)
    self.assertEqual(event_count, sync._PersistBit9Events.call_count)

    self.assertEqual(event_count,
                     self.mock_events_processed.Increment.call_count)
예제 #3
0
  def _CreateNewLocalRules(self, uuid, user_key):
    """Creates copies of all local rules for the new host."""
    # Pick any host owned by the user to copy rules from. Exclude hosts that
    # haven't completed a full sync because they won't have a complete rule set.
    # NOTE: Because we expect all hosts owned by a user to have the same local
    # rules, we should get the same rules set with any one of the user's hosts.
    username = user_map.EmailToUsername(user_key.id())
    host_query = santa_db.SantaHost.query(
        santa_db.SantaHost.primary_user == username,
        santa_db.SantaHost.last_postflight_dt != None)  # pylint: disable=g-equals-none
    a_host = host_query.get()
    if a_host is None:
      return utils.GetNoOpFuture()

    # Get all local rules from that host.
    rules_query = santa_db.SantaRule.query(
        santa_db.SantaRule.host_id == a_host.key.id(),
        santa_db.SantaRule.in_effect == True)  # pylint: disable=g-explicit-bool-comparison

    # Copy the local rules to the new host.
    new_rules = []
    for batch in query_utils.Paginate(rules_query):
      for rule in batch:
        new_rule = utils.CopyEntity(
            rule, new_parent=rule.key.parent(), host_id=uuid, user_key=user_key)
        new_rules.append(new_rule)

    futures = ndb.put_multi_async(new_rules)
    return utils.GetMultiFuture(futures)
예제 #4
0
파일: sync.py 프로젝트: crudbug/upvote
def _PersistBit9Certificates(signing_chain):
    """Creates Bit9Certificates from the given Event protobuf.

  Args:
    signing_chain: List[api.Certificate] the signing chain of the event.

  Returns:
    An ndb.Future that resolves when all certs are created.
  """
    if not signing_chain:
        return datastore_utils.GetNoOpFuture()

    to_create = []
    for cert in signing_chain:
        thumbprint = cert.thumbprint
        existing_cert = bit9.Bit9Certificate.get_by_id(thumbprint)
        if existing_cert is None:
            cert = bit9.Bit9Certificate(id=thumbprint,
                                        id_type=cert.thumbprint_algorithm,
                                        valid_from_dt=cert.valid_from,
                                        valid_to_dt=cert.valid_to)

            cert.PersistRow(constants.BLOCK_ACTION.FIRST_SEEN,
                            timestamp=cert.recorded_dt)

            to_create.append(cert)

    futures = ndb.put_multi_async(to_create)
    return datastore_utils.GetMultiFuture(futures)
예제 #5
0
def _GetBlockableContext(blockables):
  """Adds relevant entities corresponding to the listed Blockables.

  The entities included (if present) are the Certificate associated with blockable

  Args:
    blockables: list of base.Blockables, The blockables for which context should be fetched.

  Returns:
    A list of dicts where each dict is of the form:
        {'blockable': Blockable, 'cert': Certificate}
    If the cert is not found, that dict entry is present but set to None.
  """
  # Fetch the Certificate associated with the Blockable.
  cert_futures = []
  for blockable in blockables:
    if isinstance(blockable, base_models.Binary) and blockable.cert_id:
      cert_future = ndb.Key(
          santa_models.SantaCertificate, blockable.cert_id).get_async()
    else:
      cert_future = datastore_utils.GetNoOpFuture()
    cert_futures.append(cert_future)

  # Merge all Blockable context entities into their associated dicts.
  events_with_context = []
  for i, blockable in enumerate(blockables):
    context_dict = {
        'blockable': blockable,
        'cert': cert_futures[i].get_result(),
    }
    events_with_context.append(context_dict)

  return events_with_context
예제 #6
0
파일: sync.py 프로젝트: crudbug/upvote
def _PersistBanNote(file_catalog):
    """Creates a Note entity containing a ban description if needed."""

    tuples = [(file_catalog.certificate_state, 'certificate'),
              (file_catalog.file_state, 'file'),
              (file_catalog.publisher_state, 'publisher')]

    ban_strings = sorted([
        'Banned by %s' % string for state, string in tuples
        if state == bit9_constants.APPROVAL_STATE.BANNED
    ])

    if ban_strings:
        full_message = '\n'.join(ban_strings)

        blockable_key = ndb.Key(bit9.Bit9Binary, file_catalog.sha256)
        note_key = base.Note.GenerateKey(full_message, blockable_key)

        if note_key.get() is None:
            logging.info('Persisting new ban Note for %s: %s',
                         file_catalog.sha256, ', '.join(ban_strings))
            note = base.Note(key=note_key, message=full_message)
            return note.put_async()

    return datastore_utils.GetNoOpFuture()
예제 #7
0
    def CreateAsync(cls, **row_params):
        if not settings.ENV.ENABLE_BIGQUERY_STREAMING:
            return model_utils.GetNoOpFuture()

        row = cls(**row_params)
        future = row.put_async()
        _PERSISTED_METRIC.Increment()
        return future
예제 #8
0
    def add_async(self,
                  queue_name=constants.TASK_QUEUE.DEFAULT,
                  transactional=False):
        ctx_id = id(ndb.get_context())

        force_commit = (_COMMIT_KEY in self.headers)
        grouping_enabled = (ctx_id in _DELAYED_TASKS)
        if transactional and grouping_enabled and not force_commit:
            _DELAYED_TASKS[ctx_id][queue_name].append(self)
            return utils.GetNoOpFuture()
        else:
            return self.add_async_forced(queue_name, transactional)
예제 #9
0
 def _GenerateRemoveRules(self, existing_rules):
     # Create removal rules on each host for which a rule exists.
     host_ids = set(rule.host_id for rule in existing_rules)
     removal_rules = []
     for host_id in host_ids:
         removal_rules.append(
             self._GenerateRule(host_id=host_id,
                                policy=constants.RULE_POLICY.REMOVE,
                                in_effect=True))
     put_futures = ndb.put_multi_async(removal_rules)
     future = model_utils.GetMultiFuture(put_futures)
     future.add_callback(self._CreateRuleChangeSet,
                         model_utils.GetNoOpFuture(removal_rules),
                         constants.RULE_POLICY.REMOVE)
예제 #10
0
파일: sync_test.py 프로젝트: crudbug/upvote
  def testInsertsCertificateRow(self):
    event, cert = _CreateEventAndCert()
    sync._UnsyncedEvent.Generate(event, [cert]).put()

    # Patch out the all methods except _PersistBit9Certificates.
    methods = [
        '_PersistBit9Binary', '_PersistBanNote',
        '_PersistBit9Host', '_PersistBit9Events']
    for method in methods:
      self.Patch(sync, method, return_value=model_utils.GetNoOpFuture())

    sync.Process(event.computer_id)

    # Should be 1 Task for the CertificateRow caused by the event.
    self.assertBigQueryInsertions([constants.BIGQUERY_TABLE.CERTIFICATE])
예제 #11
0
파일: sync_test.py 프로젝트: crudbug/upvote
  def testInsertsExecutionRow(self):
    event_count = 3
    host_id = _CreateUnsyncedEvents(events_per_host=event_count)[0]

    # Patch out the all methods except _PersistBit9Events.
    methods = [
        '_PersistBit9Certificates', '_PersistBit9Binary',
        '_PersistBanNote', '_PersistBit9Host']
    for method in methods:
      self.Patch(sync, method, return_value=model_utils.GetNoOpFuture())

    sync.Process(host_id)

    # Should be 3 ExecutionRows since 3 Unsynced Events were created.
    self.assertBigQueryInsertions(
        [constants.BIGQUERY_TABLE.EXECUTION] * event_count)
예제 #12
0
  def testPersistsExecutionRow(self):
    event_count = 3
    host_id = self._CreateUnsyncedEvents(events_per_host=event_count)[0]

    # Patch out the all methods except _PersistBit9Events.
    methods = [
        '_PersistBit9Certificates', '_PersistBit9Binary',
        '_PersistBanNote', '_PersistBit9Host']
    for method in methods:
      self.Patch(sync, method, return_value=model_utils.GetNoOpFuture())

    sync.Process(host_id)

    # Should be 3 ExecutionRows since 3 Unsynced Events were created.
    self.assertTaskCount(constants.TASK_QUEUE.BQ_PERSISTENCE, event_count)
    self.RunDeferredTasks(constants.TASK_QUEUE.BQ_PERSISTENCE)
    self.assertEntityCount(bigquery_db.ExecutionRow, event_count)
예제 #13
0
  def testPersistsCertificateRow(self):
    event, signing_chain = _CreateEventTuple()
    sync._UnsyncedEvent.Generate(event, signing_chain).put()

    # Patch out the all methods except _PersistBit9Certificates.
    methods = [
        '_PersistBit9Binary', '_PersistBanNote',
        '_PersistBit9Host', '_PersistBit9Events']
    for method in methods:
      self.Patch(sync, method, return_value=model_utils.GetNoOpFuture())

    sync.Process(event.computer_id)

    # Should be 1 Task for the CertificateRow caused by the event.
    self.assertTaskCount(constants.TASK_QUEUE.BQ_PERSISTENCE, 1)
    self.RunDeferredTasks(constants.TASK_QUEUE.BQ_PERSISTENCE)
    self.assertEntityCount(bigquery_db.CertificateRow, 1)
예제 #14
0
def _CopyLocalRules(user_key, dest_host_id):
    """Creates copies of all local rules for the new host."""

    logging.info('Copying rules for user %s to host %s', user_key.id(),
                 dest_host_id)

    # Pick any host owned by the user to copy rules from. Exclude hosts that
    # haven't completed a full sync because they won't have a complete rule set.
    username = user_utils.EmailToUsername(user_key.id())
    query = host_models.SantaHost.query(
        host_models.SantaHost.primary_user == username,
        host_models.SantaHost.last_postflight_dt != None)  # pylint: disable=g-equals-none
    src_host = query.get()
    if src_host is None:
        logging.warning('User %s has no hosts to copy from', username)
        return datastore_utils.GetNoOpFuture()
    else:
        logging.info('Copying local rules from %s', src_host.key.id())

    # Query for all SantaRules for the given user on the chosen host.
    query = rule_models.SantaRule.query(
        rule_models.SantaRule.host_id == src_host.key.id(),
        rule_models.SantaRule.user_key == user_key)

    # Copy the local rules to the new host.
    new_rules = []
    for src_rules in datastore_utils.Paginate(query):
        for src_rule in src_rules:
            logging.info('Copying local rule for %s',
                         src_rule.key.parent().id())
            new_rule = datastore_utils.CopyEntity(
                src_rule,
                new_parent=src_rule.key.parent(),
                host_id=dest_host_id,
                user_key=user_key)
            new_rules.append(new_rule)
            new_rule.InsertBigQueryRow()

    logging.info('Copying %d rule(s) to host %s', len(new_rules), dest_host_id)
    futures = ndb.put_multi_async(new_rules)
    return datastore_utils.GetMultiFuture(futures)
예제 #15
0
파일: events.py 프로젝트: crudbug/upvote
def _GetEventContext(events):
    """Adds relevant entities corresponding to the listed Events.

  The entities included (if present) are the Blockable run, the Certificate
  associated with run, the Host on which it was run, and the Vote cast by the
  user.

  Args:
    events: list of base.Events, The events for which context should be fetched.

  Returns:
    A list of dicts where each dict is of the form:
        {'event': Event, 'blockable': Blockable, 'cert': Certificate,
         'host': Host, 'vote': Vote}
    If any of the entities are not found (e.g. the user hasn't voted on a
    Blockable), that dict entry is present but set to None.
  """
    host_futures = ndb.get_multi_async(
        ndb.Key(base_models.Host, event.host_id) for event in events)

    # Fetch the entities associated with Event.blockable_key.
    blockable_futures = ndb.get_multi_async(event.blockable_key
                                            for event in events)
    vote_futures = ndb.get_multi_async(
        base_models.Vote.GetKey(event.blockable_key, event.user_key)
        for event in events)

    # Fetch the entities associated with SantaEvent.bundle_key.
    has_bundle = (
        lambda e: isinstance(e, santa_models.SantaEvent) and e.bundle_key)
    bundle_futures = [(event.bundle_key.get_async()
                       if has_bundle(event) else model_utils.GetNoOpFuture())
                      for event in events]
    bundle_vote_futures = [
        (base_models.Vote.GetKey(event.bundle_key, event.user_key).get_async()
         if has_bundle(event) else model_utils.GetNoOpFuture())
        for event in events
    ]

    # Fetch the Certificate associated with the Event.
    cert_futures = []
    for event in events:
        if event.cert_key:
            cert_future = event.cert_key.get_async()
        elif isinstance(event, santa_models.SantaEvent) and event.cert_sha256:
            cert_future = ndb.Key(santa_models.SantaCertificate,
                                  event.cert_sha256).get_async()
        else:
            cert_future = model_utils.GetNoOpFuture()
        cert_futures.append(cert_future)

    # Merge all Event context entities into their associated dicts.
    events_with_context = []
    for i, event in enumerate(events):
        context_dict = {
            'event': event,
            'host': host_futures[i].get_result(),
        }
        bundle = bundle_futures[i].get_result()
        if bundle is None:
            context_dict.update({
                'blockable': blockable_futures[i].get_result(),
                'cert': cert_futures[i].get_result(),
                'vote': vote_futures[i].get_result(),
            })
        else:
            context_dict.update({
                'blockable': bundle,
                'cert': bundle.main_cert_key,
                'vote': bundle_vote_futures[i].get_result(),
            })
        events_with_context.append(context_dict)

    return events_with_context
예제 #16
0
    def post(self, uuid):
        # If the host doesn't have any rules, ignore all the events it generated.
        if not self.host.last_postflight_dt:
            self.respond_json({})
            return

        all_futures = []
        json_events = self.parsed_json.get(_EVENT_UPLOAD.EVENTS)
        logging.info('Syncing %d events', len(json_events))

        # Create cert entities for all signing chains if they don't already exist.
        all_futures.append(self._CreateCertificatesFromJsonEvents(json_events))

        # Filter out bundle upload events because they should not be created as
        # conventional SantaEvents.
        bundle_upload_events = []
        normal_events = []
        blockable_event_map = {
        }  # Maps a blockable key to one of its json events.
        for event in json_events:
            decision = event.get(_EVENT_UPLOAD.DECISION)
            if decision == constants.EVENT_TYPE.BUNDLE_BINARY:
                bundle_upload_events.append(event)
            else:
                normal_events.append(event)
            key = self._GetBlockableKeyFromJsonEvent(event)
            blockable_event_map[key] = event

        # Create all SantaBundle entities associated with the non-bundle-upload
        # events to ensures the bundles are present prior to upload.
        all_futures.append(self._CreateAllBundlesFromJsonEvents(normal_events))

        # Create bundle members for bundle upload events.
        bundle_member_future = datastore_utils.GetNoOpFuture()
        if bundle_upload_events:
            logging.info('Syncing %d bundle events', len(bundle_upload_events))
            bundle_member_future = self._CreateAllBundleBinaries(
                bundle_upload_events)
            all_futures.append(bundle_member_future)

        # Create SantaEvent entites from the uploaded JSON events.
        santa_events = []
        for json_event in normal_events:
            events = self._GenerateSantaEventsFromJsonEvent(
                json_event, self.host)
            santa_events.extend(events)

        all_futures.extend(self._CreateEvents(santa_events))

        # Determine which blockables are already known to Upvote.
        unique_blockable_keys = set(blockable_event_map.keys())
        existing_blockable_keys = {
            blockable.key
            for blockable in ndb.get_multi(list(unique_blockable_keys))
            if blockable
        }
        unknown_blockable_keys = unique_blockable_keys - existing_blockable_keys

        # Create previously unknown blockables.
        now = datetime.datetime.utcnow()
        for blockable_key in list(unknown_blockable_keys):
            json_event = blockable_event_map[blockable_key]

            all_futures.append(
                self._CreateBlockableFromJsonEvent(json_event, now))

        # Generate and send the response.
        response_dict = {}

        # NOTE: The bundles-to-upload calculation needs to wait for the
        # bundle members in this upload to be committed and for those bundles'
        # upload statuses to be recalculated.
        bundle_member_future.get_result()
        bundles_to_upload = self._GetBundlesToUpload(json_events).get_result()
        if bundles_to_upload:
            bundle_ids = [bundle_key.id() for bundle_key in bundles_to_upload]
            response_dict.update({
                _EVENT_UPLOAD.EVENT_UPLOAD_BUNDLE_BINARIES:
                bundle_ids,
            })

        # Resolve all futures. This will have the side effect of raising the first
        # exception, if any, that a future in the list raised.
        for future in all_futures:
            future.check_success()

        self.respond_json(response_dict)
예제 #17
0
 def RunAssert():
   fut = utils.GetNoOpFuture()
   fut.add_callback(AssertInTxn)
   fut.add_immediate_callback(AssertInTxn)
   fut.get_result()
예제 #18
0
 def testGetNoOpFuture(self):
   future = utils.GetNoOpFuture()
   self.assertTrue(future.done())
   self.assertIsNone(future.get_result())
예제 #19
0
 def testGetNoOpFuture_Result(self):
   result = 'foobar'
   future = utils.GetNoOpFuture(result)
   self.assertTrue(future.done())
   self.assertEqual(result, future.get_result())