def testEmptyQueue(self): # Create and empty HuntResultCollection. collection_urn = rdfvalue.RDFURN("aff4:/testEmptyQueue/collection") hunts_results.HuntResultCollection(collection_urn, token=self.token) # The queue starts empty, and returns no notifications. results = hunts_results.HuntResultQueue.ClaimNotificationsForCollection( token=self.token) self.assertEqual(None, results[0]) self.assertEqual([], results[1])
def _OpenCollectionPath(coll_path, token=None): """Tries to open various types of collections at the given path.""" collection = results.HuntResultCollection(coll_path, token=token) if collection and collection[0].payload: return collection collection = sequential_collection.GeneralIndexedCollection(coll_path, token=token) if collection: return collection
def testGetValuesForExportHuntResultCollection(self): fd = results.HuntResultCollection("aff4:/huntcoll", token=self.token) fd.Add( rdf_flows.GrrMessage(payload=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile", pathtype="OS")), source=self.client_id)) plugin = collection_plugin.CollectionExportPlugin() mock_args = mock.Mock() mock_args.path = rdfvalue.RDFURN("aff4:/huntcoll") mock_args.no_legacy_warning_pause = True self.assertEqual(len(plugin.GetValuesForExport(mock_args)), 1)
def _OpenCollectionPath(coll_path, token=None): """Tries to open various types of collections at the given path.""" coll = aff4.FACTORY.Open(coll_path, token=token) if coll.__class__.__name__ == "RDFValueCollection": return coll collection = results.HuntResultCollection(coll_path, token=token) if collection and collection[0].payload: return collection collection = sequential_collection.GeneralIndexedCollection(coll_path, token=token) if collection: return collection
def testExportCollectionWithEmailPlugin(self): # Create a collection with URNs to some files. fd = results.HuntResultCollection("aff4:/testcoll", token=self.token) fd.Add( rdf_flows.GrrMessage(payload=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile", pathtype="OS")), source=self.client_id)) plugin = collection_plugin.CollectionExportPlugin() parser = argparse.ArgumentParser() plugin.ConfigureArgParser(parser) def SendEmail(address, sender, title, message, **_): self.email_messages.append( dict(address=address, sender=sender, title=title, message=message)) email_address = "notify@%s" % config_lib.CONFIG["Logging.domain"] with utils.Stubber(email_alerts.EMAIL_ALERTER, "SendEmail", SendEmail): self.email_messages = [] plugin.Run( parser.parse_args(args=[ "--no_legacy_warning_pause", "--path", "aff4:/testcoll", email_plugin.EmailOutputPlugin.name, "--email_address", email_address, "--emails_limit", "100", ])) self.assertEqual(len(self.email_messages), 1) for msg in self.email_messages: self.assertEqual(msg["address"], email_address) self.assertEqual("GRR got a new result in aff4:/testcoll.", msg["title"]) self.assertTrue( "GRR got a new result in aff4:/testcoll" in msg["message"]) self.assertTrue("(Host-0)" in msg["message"])
def testNotificationsContainTimestamps(self): collection_urn = rdfvalue.RDFURN( "aff4:/testNotificationsContainTimestamps/collection") for i in range(5): hunts_results.HuntResultCollection.StaticAdd( collection_urn, self.token, rdf_flows.GrrMessage(request_id=i)) # If we claim results, we should get all 5. results = hunts_results.HuntResultQueue.ClaimNotificationsForCollection( token=self.token) self.assertEqual(collection_urn, results[0]) self.assertEqual(5, len(results[1])) # Read all the results, using the contained (ts, suffix) pairs. values_read = [] collection = hunts_results.HuntResultCollection( collection_urn, token=self.token) for message in collection.MultiResolve([(ts, suffix) for (_, ts, suffix) in results[1]]): values_read.append(message.request_id) self.assertEqual(sorted(values_read), range(5))
def testNotificationsSplitByCollection(self): # Create two HuntResultCollections. collection_urn_1 = rdfvalue.RDFURN( "aff4:/testNotificationsSplitByCollection/collection_1") collection_urn_2 = rdfvalue.RDFURN( "aff4:/testNotificationsSplitByCollection/collection_2") # Add 100 records to each collection, in an interleaved manner. for i in range(100): hunts_results.HuntResultCollection.StaticAdd( collection_urn_1, self.token, rdf_flows.GrrMessage(request_id=i)) hunts_results.HuntResultCollection.StaticAdd( collection_urn_2, self.token, rdf_flows.GrrMessage(request_id=100 + i)) # The first result was added to collection 1, so this should return # all 100 results for collection 1. results_1 = hunts_results.HuntResultQueue.ClaimNotificationsForCollection( token=self.token) self.assertEqual(collection_urn_1, results_1[0]) self.assertEqual(100, len(results_1[1])) # The first call claimed all the notifications for collection 1. These are # claimed, so another call should skip them and give all notifications for # collection 2. results_2 = hunts_results.HuntResultQueue.ClaimNotificationsForCollection( token=self.token) self.assertEqual(collection_urn_2, results_2[0]) self.assertEqual(100, len(results_2[1])) values_read = [] collection_2 = hunts_results.HuntResultCollection(collection_urn_2, token=self.token) for message in collection_2.MultiResolve([ (ts, suffix) for (_, ts, suffix) in results_2[1] ]): values_read.append(message.request_id) self.assertEqual(sorted(values_read), range(100, 200))
def testDownloadHuntResultCollection(self): """Check we can download files references in HuntResultCollection.""" # Create a collection with URNs to some files. fd = results.HuntResultCollection(self.collection_urn, token=self.token) fd.AddAsMessage(rdfvalue.RDFURN(self.out.Add("testfile1")), self.client_id) fd.AddAsMessage( rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")), self.client_id) fd.AddAsMessage( rdf_file_finder.FileFinderResult(stat_entry=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile5", pathtype="OS"))), self.client_id) fd.AddAsMessage( collectors.ArtifactFilesDownloaderResult( downloaded_file=rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile6", pathtype="OS"))), self.client_id) self._VerifyDownload()
def GetValuesForExport(self, args): return results.HuntResultCollection(args.path, token=data_store.default_token)