def _CreateNewLocalRules(self, uuid, user_key): """Creates copies of all local rules for the new host.""" # Pick any host owned by the user to copy rules from. Exclude hosts that # haven't completed a full sync because they won't have a complete rule set. # NOTE: Because we expect all hosts owned by a user to have the same local # rules, we should get the same rules set with any one of the user's hosts. username = user_map.EmailToUsername(user_key.id()) host_query = santa_db.SantaHost.query( santa_db.SantaHost.primary_user == username, santa_db.SantaHost.last_postflight_dt != None) # pylint: disable=g-equals-none a_host = host_query.get() if a_host is None: return utils.GetNoOpFuture() # Get all local rules from that host. rules_query = santa_db.SantaRule.query( santa_db.SantaRule.host_id == a_host.key.id(), santa_db.SantaRule.in_effect == True) # pylint: disable=g-explicit-bool-comparison # Copy the local rules to the new host. new_rules = [] for batch in query_utils.Paginate(rules_query): for rule in batch: new_rule = utils.CopyEntity( rule, new_parent=rule.key.parent(), host_id=uuid, user_key=user_key) new_rules.append(new_rule) futures = ndb.put_multi_async(new_rules) return utils.GetMultiFuture(futures)
def _PersistBit9Certificates(signing_chain): """Creates Bit9Certificates from the given Event protobuf. Args: signing_chain: List[api.Certificate] the signing chain of the event. Returns: An ndb.Future that resolves when all certs are created. """ if not signing_chain: return datastore_utils.GetNoOpFuture() to_create = [] for cert in signing_chain: thumbprint = cert.thumbprint existing_cert = bit9.Bit9Certificate.get_by_id(thumbprint) if existing_cert is None: cert = bit9.Bit9Certificate(id=thumbprint, id_type=cert.thumbprint_algorithm, valid_from_dt=cert.valid_from, valid_to_dt=cert.valid_to) cert.PersistRow(constants.BLOCK_ACTION.FIRST_SEEN, timestamp=cert.recorded_dt) to_create.append(cert) futures = ndb.put_multi_async(to_create) return datastore_utils.GetMultiFuture(futures)
def _PersistBit9Certificates(signing_chain): """Creates Bit9Certificates from the given Event protobuf. Args: signing_chain: List[api.Certificate] the signing chain of the event. Returns: An ndb.Future that resolves when all certs are created. """ if not signing_chain: return datastore_utils.GetNoOpFuture() to_create = [] for cert in signing_chain: thumbprint = cert.thumbprint existing_cert = bit9.Bit9Certificate.get_by_id(thumbprint) if existing_cert is None: cert = bit9.Bit9Certificate(id=thumbprint, id_type=cert.thumbprint_algorithm, valid_from_dt=cert.valid_from, valid_to_dt=cert.valid_to) # Insert a row into the Certificate table. Allow the timestamp to be # generated within InsertBigQueryRow(). The Blockable.recorded_dt Property # is set to auto_now_add, but this isn't filled in until persist time. cert.InsertBigQueryRow(constants.BLOCK_ACTION.FIRST_SEEN) to_create.append(cert) futures = ndb.put_multi_async(to_create) return datastore_utils.GetMultiFuture(futures)
def _PersistBit9Events(event, file_catalog, computer, signing_chain): """Creates a Bit9Event from the given Event protobuf. Args: event: The api.Event instance to be synced to Upvote. file_catalog: The api.FileCatalog instance associated with this event. computer: The api.Computer instance associated with this event. signing_chain: List of api.Certificate instances associated with this event. Returns: An ndb.Future that resolves when all events are created. """ logging.info('Creating new Bit9Event') host_id = str(computer.id) blockable_key = ndb.Key(binary_models.Bit9Binary, file_catalog.sha256) host_users = list(bit9_utils.ExtractHostUsers(computer.users)) occurred_dt = event.timestamp _CheckAndResolveAnomalousBlock(blockable_key, host_id) new_event = event_models.Bit9Event( blockable_key=blockable_key, cert_key=_GetCertKey(signing_chain), event_type=constants.EVENT_TYPE.BLOCK_BINARY, last_blocked_dt=occurred_dt, first_blocked_dt=occurred_dt, host_id=host_id, file_name=event.file_name, file_path=event.path_name, publisher=file_catalog.publisher, version=file_catalog.product_version, description=event.description, executing_user=bit9_utils.ExtractHostUser(event.user_name), bit9_id=event.id) tables.EXECUTION.InsertRow( sha256=new_event.blockable_key.id(), device_id=host_id, timestamp=occurred_dt, platform=new_event.GetPlatformName(), client=new_event.GetClientName(), file_path=new_event.file_path, file_name=new_event.file_name, executing_user=new_event.executing_user, associated_users=host_users, decision=new_event.event_type) keys_to_insert = model_utils.GetEventKeysToInsert( new_event, host_users, host_users) futures = [_PersistBit9Event(new_event, key) for key in keys_to_insert] return datastore_utils.GetMultiFuture(futures)
def _GenerateRemoveRules(self, existing_rules): # Create removal rules on each host for which a rule exists. host_ids = set(rule.host_id for rule in existing_rules) removal_rules = [] for host_id in host_ids: removal_rules.append( self._GenerateRule(host_id=host_id, policy=constants.RULE_POLICY.REMOVE, in_effect=True)) put_futures = ndb.put_multi_async(removal_rules) future = model_utils.GetMultiFuture(put_futures) future.add_callback(self._CreateRuleChangeSet, model_utils.GetNoOpFuture(removal_rules), constants.RULE_POLICY.REMOVE)
def testSingleFuture(self): f = ndb.Future() mf = utils.GetMultiFuture([f]) self.assertFalse(f.done()) self.assertFalse(mf.done()) f.set_result(None) self.assertTrue(f.done()) self.assertFalse(mf.done()) # Event loop must run for the MultiFuture to be marked as done. mf.wait() self.assertTrue(mf.done())
def testManyFutures(self): futures = [ndb.Future() for _ in xrange(3)] mf = utils.GetMultiFuture(futures) self.assertFalse(any(f.done() for f in futures)) self.assertFalse(mf.done()) for f in futures: f.set_result(None) self.assertTrue(all(f.done() for f in futures)) self.assertFalse(mf.done()) # Event loop must run for the MultiFuture to be marked as done. mf.wait() self.assertTrue(mf.done())
def _CopyLocalRules(user_key, dest_host_id): """Creates copies of all local rules for the new host.""" logging.info('Copying rules for user %s to host %s', user_key.id(), dest_host_id) # Pick any host owned by the user to copy rules from. Exclude hosts that # haven't completed a full sync because they won't have a complete rule set. username = user_utils.EmailToUsername(user_key.id()) query = host_models.SantaHost.query( host_models.SantaHost.primary_user == username, host_models.SantaHost.last_postflight_dt != None) # pylint: disable=g-equals-none src_host = query.get() if src_host is None: logging.warning('User %s has no hosts to copy from', username) return datastore_utils.GetNoOpFuture() else: logging.info('Copying local rules from %s', src_host.key.id()) # Query for all SantaRules for the given user on the chosen host. query = rule_models.SantaRule.query( rule_models.SantaRule.host_id == src_host.key.id(), rule_models.SantaRule.user_key == user_key) # Copy the local rules to the new host. new_rules = [] for src_rules in datastore_utils.Paginate(query): for src_rule in src_rules: logging.info('Copying local rule for %s', src_rule.key.parent().id()) new_rule = datastore_utils.CopyEntity( src_rule, new_parent=src_rule.key.parent(), host_id=dest_host_id, user_key=user_key) new_rules.append(new_rule) new_rule.InsertBigQueryRow() logging.info('Copying %d rule(s) to host %s', len(new_rules), dest_host_id) futures = ndb.put_multi_async(new_rules) return datastore_utils.GetMultiFuture(futures)
def testCantModifyResult(self): f = ndb.Future() mf = utils.GetMultiFuture([f]) with self.assertRaises(RuntimeError): mf.add_dependent(ndb.Future())
def testNoInput(self): mf = utils.GetMultiFuture([]) self.assertTrue(mf.done())