Esempio n. 1
0
 def _HasFlaggedBinary(self):
   """Returns whether any of the bundle's blockable contents are flagged."""
   query = SantaBundleBinary.query(ancestor=self.key)
   futures = [
       self._PageHasFlaggedBinary(page)
       for page in datastore_utils.Paginate(query, page_size=1000)]
   return any(future.get_result() for future in futures)
Esempio n. 2
0
def Process(host_id):
  """Processes _UnsyncedEvents for a single Windows host.

  Args:
    host_id: The integer ID of this host in Bit9.
  """
  try:

    with datastore_locks.DatastoreLock(
        'bit9-process-%d' % host_id, default_timeout=_PROCESS_LOCK_TIMEOUT,
        default_max_acquire_attempts=_PROCESS_LOCK_MAX_ACQUIRE_ATTEMPTS):

      total_process_count = 0
      start_time = _Now()
      logging.info('Starting a new processing task for %d', host_id)

      # Query for all _UnsyncedEvents that belong to the given host, in batches,
      # and process them until we run out, or the task nears its deadline.
      query = (_UnsyncedEvent.query(_UnsyncedEvent.host_id == host_id)
               .order(_UnsyncedEvent.bit9_id))
      event_pages = datastore_utils.Paginate(query, page_size=25)
      event_page = next(event_pages, None)
      while time_utils.TimeRemains(start_time, _TASK_DURATION) and event_page:
        for unsynced_event in event_page:
          event = api.Event.from_dict(unsynced_event.event)
          signing_chain = [
              api.Certificate.from_dict(cert)
              for cert in unsynced_event.signing_chain
          ]
          file_catalog = event.get_expand(api.Event.file_catalog_id)
          computer = event.get_expand(api.Event.computer_id)

          # Persist the event data.
          persist_futures = [
              _PersistBit9Certificates(signing_chain),
              _PersistBit9Binary(
                  event, file_catalog, signing_chain,
                  datetime.datetime.utcnow()),
              _PersistBanNote(file_catalog),
              _PersistBit9Host(computer, event.timestamp),
              _PersistBit9Events(event, file_catalog, computer, signing_chain)
          ]
          ndb.Future.wait_all(persist_futures)
          for persist_future in persist_futures:
            persist_future.check_success()

          # Now that the event sync has completed successfully, remove the
          # intermediate proto entity.
          unsynced_event.key.delete()

          monitoring.events_processed.Increment()
          total_process_count += 1

        event_page = next(event_pages, None)

    logging.info('Processed %d event(s)', total_process_count)

  except datastore_locks.AcquireLockError:
    logging.info('Unable to acquire datastore lock')
Esempio n. 3
0
 def _HasFlaggedCert(self):
   """Returns whether any of the bundle's signing certs are flagged."""
   query = SantaBundleBinary.query(
       projection=[SantaBundleBinary.cert_key], distinct=True,
       ancestor=self.key)
   futures = [
       self._PageHasFlaggedCert(page)
       for page in datastore_utils.Paginate(query, page_size=1000)]
   return any(future.get_result() for future in futures)
Esempio n. 4
0
def Dispatch():
  """Dispatches per-host tasks onto the event processing queue."""
  total_dispatch_count = 0
  logging.info('Starting a new dispatch task')

  # Query for all distinct host_id values among the _UnsyncedEvents, in batches,
  # either until we run out, or the task nears its deadline.
  query = _UnsyncedEvent.query(
      projection=[_UnsyncedEvent.host_id], distinct=True)
  for event_page in datastore_utils.Paginate(query, page_size=25):
    host_ids = [event.host_id for event in event_page]
    for host_id in host_ids:
      deferred.defer(Process, host_id, _queue=constants.TASK_QUEUE.BIT9_PROCESS)
      total_dispatch_count += 1

  logging.info('Dispatched %d task(s)', total_dispatch_count)
Esempio n. 5
0
def _CopyLocalRules(user_key, dest_host_id):
    """Creates copies of all local rules for the new host."""

    logging.info('Copying rules for user %s to host %s', user_key.id(),
                 dest_host_id)

    # Pick any host owned by the user to copy rules from. Exclude hosts that
    # haven't completed a full sync because they won't have a complete rule set.
    username = user_utils.EmailToUsername(user_key.id())
    query = host_models.SantaHost.query(
        host_models.SantaHost.primary_user == username,
        host_models.SantaHost.last_postflight_dt != None)  # pylint: disable=g-equals-none
    src_host = query.get()
    if src_host is None:
        logging.warning('User %s has no hosts to copy from', username)
        return datastore_utils.GetNoOpFuture()
    else:
        logging.info('Copying local rules from %s', src_host.key.id())

    # Query for all SantaRules for the given user on the chosen host.
    query = rule_models.SantaRule.query(
        rule_models.SantaRule.host_id == src_host.key.id(),
        rule_models.SantaRule.user_key == user_key)

    # Copy the local rules to the new host.
    new_rules = []
    for src_rules in datastore_utils.Paginate(query):
        for src_rule in src_rules:
            logging.info('Copying local rule for %s',
                         src_rule.key.parent().id())
            new_rule = datastore_utils.CopyEntity(
                src_rule,
                new_parent=src_rule.key.parent(),
                host_id=dest_host_id,
                user_key=user_key)
            new_rules.append(new_rule)
            new_rule.InsertBigQueryRow()

    logging.info('Copying %d rule(s) to host %s', len(new_rules), dest_host_id)
    futures = ndb.put_multi_async(new_rules)
    return datastore_utils.GetMultiFuture(futures)
Esempio n. 6
0
  def testSuccess(self):

    page_size = 10
    for entity_count in xrange(50):

      # Create some number of entities.
      CreateEntities(entity_count)

      # Verify that we get the expected number of pages.
      pages = list(
          datastore_utils.Paginate(TestModel.query(), page_size=page_size))
      expected_page_count = int(math.ceil(float(entity_count) / page_size))
      self.assertLen(pages, expected_page_count)

      # Verify that we get the expected number of entities.
      entities = list(itertools.chain(*pages))
      self.assertLen(entities, entity_count)

      # Delete everything.
      for entity in entities:
        entity.key.delete()