def Run(self): def ReplaceFlowId(): flows_dir_fd = aff4.FACTORY.Open(self.client_id.Add("flows"), token=self.token) flow_urn = list(flows_dir_fd.ListChildren())[0] return {flow_urn.Basename(): "W:ABCDEF"} with test_lib.FakeTime(42): self.Check("POST", "/api/clients/%s/flows/remotegetfile" % self.client_id.Basename(), { "hostname": self.client_id.Basename(), "paths": ["/tmp/test"] }, replace=ReplaceFlowId)
def setUp(self): super(ApiRDFValueCollectionRendererTest, self).setUp() with test_lib.FakeTime(42): with aff4.FACTORY.Create("aff4:/tmp/foo/bar", "RDFValueCollection", token=self.token) as fd: for i in range(10): fd.Add( rdfvalue.PathSpec(path="/var/os/tmp-%d" % i, pathtype="OS")) self.fd = aff4.FACTORY.Open("aff4:/tmp/foo/bar", token=self.token) self.renderer = api_aff4_object_renderers.ApiRDFValueCollectionRenderer( )
def testReports(self): """Test the reports interface.""" with self.ACLChecksDisabled(): with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/14")): AddFakeAuditLog( "Fake audit description 14 Dec.", "C.123", "User123", token=self.token) with test_lib.FakeTime( rdfvalue.RDFDatetime.FromHumanReadable("2012/12/22")): AddFakeAuditLog( "Fake audit description 22 Dec.", "C.456", "User456", token=self.token) # Make "test" user an admin. self.CreateAdminUser("test") self.Open("/#/stats/") # Go to reports. self.Click("css=#MostActiveUsersReportPlugin_anchor i.jstree-icon") self.WaitUntil(self.IsTextPresent, "Server | User Breakdown") self.WaitUntil(self.IsTextPresent, "No data to display.") # Enter a timerange that only matches one of the two fake events. self.Type("css=grr-form-datetime input", "2012-12-21 12:34") self.Click("css=button:contains('Show report')") self.WaitUntil(self.IsTextPresent, "User456") self.WaitUntil(self.IsTextPresent, "100%") self.assertFalse(self.IsTextPresent("User123"))
def testIndexedReads(self): collection = self._TestCollection( "aff4:/sequential_collection/testIndexedReads") data_size = 4 * 1024 for i in range(data_size): collection.Add(rdfvalue.RDFInteger(i)) with test_lib.FakeTime(rdfvalue.RDFDatetime.Now() + rdfvalue.Duration("10m")): for i in range(data_size - 1, data_size - 20, -1): self.assertEqual(collection[i], i) self.assertEqual(collection[1023], 1023) self.assertEqual(collection[1024], 1024) self.assertEqual(collection[1025], 1025) for i in range(data_size - 1020, data_size - 1040, -1): self.assertEqual(collection[i], i)
def Run(self): test_lib.ClientFixture(self.client_id, token=self.token) def ReplaceFlowId(): flows_dir_fd = aff4.FACTORY.Open(self.client_id.Add("flows"), token=self.token) flow_urn = list(flows_dir_fd.ListChildren())[0] return {flow_urn.Basename(): "W:ABCDEF"} with test_lib.FakeTime(42): self.Check("UpdateVfsFileContent", args=vfs_plugin.ApiUpdateVfsFileContentArgs( client_id=self.client_id.Basename(), file_path=self.file_path), replace=ReplaceFlowId)
def testRendersSubrangeOfListOfHuntObjects(self): for i in range(10): with test_lib.FakeTime(i * 1000): self.CreateSampleHunt("hunt_%d" % i, token=self.token) result = self.renderer.Render(hunt_plugin.ApiHuntsListRendererArgs( offset=2, count=2), token=self.token) create_times = [ r["summary"]["create_time"]["value"] for r in result["items"] ] self.assertEqual(len(create_times), 2) self.assertEqual(create_times[0], 7 * 1000000000) self.assertEqual(create_times[1], 6 * 1000000000)
def Run(self): with test_lib.FakeTime(42): self._SendNotification(notification_type="Discovery", subject=str(self.client_id), message="<some message>", client_id=self.client_id) with test_lib.FakeTime(44): self._SendNotification(notification_type="ViewObject", subject=str(self.client_id), message="<some other message>", client_id=self.client_id) # Notifications are pending in this request. self.Check("ListAndResetUserNotifications", args=user_plugin.ApiListAndResetUserNotificationsArgs()) # But not anymore in these requests. self.Check("ListAndResetUserNotifications", args=user_plugin.ApiListAndResetUserNotificationsArgs( offset=1, count=1)) self.Check("ListAndResetUserNotifications", args=user_plugin.ApiListAndResetUserNotificationsArgs( filter="other"))
def Run(self): with test_lib.FakeTime(42, increment=1): hunt_urn = self.StartHunt( description="the hunt", output_plugins=[ output_plugin.OutputPluginDescriptor( plugin_name=hunt_plugin_test.DummyHuntTestOutputPlugin. __name__, plugin_args=hunt_plugin_test.DummyHuntTestOutputPlugin. args_type(filename_regex="blah!", fetch_binaries=True)) ]) self.client_ids = self.SetupClients(2) for index, client_id in enumerate(self.client_ids): self.AssignTasksToClients(client_ids=[client_id]) self.RunHunt(failrate=-1) with test_lib.FakeTime(100042 + index * 100): self.ProcessHuntOutputPlugins() self.Check("ListHuntOutputPluginLogs", args=hunt_plugin.ApiListHuntOutputPluginLogsArgs( hunt_id=hunt_urn.Basename(), plugin_id="DummyHuntTestOutputPlugin_0"), replace={hunt_urn.Basename(): "H:123456"})
def Run(self): test_lib.ClientFixture(self.client_id, token=self.token) def ReplaceFlowId(): flows_dir_fd = aff4.FACTORY.Open( self.client_id.Add("flows"), token=self.token) flow_urn = list(flows_dir_fd.ListChildren())[0] return {flow_urn.Basename(): "W:ABCDEF"} url = "/api/clients/%s/vfs-update" % self.client_id.Basename() with test_lib.FakeTime(42): self.Check("POST", url, {"file_path": self.file_path}, replace=ReplaceFlowId)
def Run(self): with test_lib.FakeTime(42): with aff4.FACTORY.Create("aff4:/foo/bar", "AFF4Object", mode="rw", token=self.token) as sample_object: # Add labels to have some attributes filled in. sample_object.AddLabels("label1", "label2") self.Check("GET", "/api/aff4/foo/bar") self.Check("GET", "/api/aff4/foo/bar?" "AFF4Object.type_info=WITH_TYPES") self.Check( "GET", "/api/aff4/foo/bar?" "AFF4Object.type_info=WITH_TYPES_AND_METADATA")
def testClientStatsCollectionHappensEveryMinuteWhenClientIsBusy(self): """Tests that client stats are collected more often when client is busy.""" now = 1000000 # Pretend we have already sent stats. self.client_communicator.client_worker.last_stats_sent_time = ( rdfvalue.RDFDatetime().FromSecondsFromEpoch(now)) self.client_communicator.client_worker._is_active = True with test_lib.FakeTime(now): self.client_communicator.client_worker.CheckStats() runs = [] action_cls = actions.ActionPlugin.classes.get("GetClientStatsAuto") with utils.Stubber(action_cls, "Run", lambda cls, _: runs.append(1)): # No stats collection after 30 seconds. with test_lib.FakeTime(now + 30): self.client_communicator.client_worker.CheckStats() self.assertEqual(len(runs), 0) # Let 61 seconds pass. with test_lib.FakeTime(now + 61): self.client_communicator.client_worker.CheckStats() # This time the client should collect stats. self.assertEqual(len(runs), 1) # No stats collection within one minute from the last time. with test_lib.FakeTime(now + 61 + 59): self.client_communicator.client_worker.CheckStats() self.assertEqual(len(runs), 1) # Stats collection happens as more than one minute has passed since the # last one. with test_lib.FakeTime(now + 61 + 61): self.client_communicator.client_worker.CheckStats() self.assertEqual(len(runs), 2)
def testLogTimestampsArePresentedInUTC(self): with self.ACLChecksDisabled(): with test_lib.FakeTime(42): for _ in test_lib.TestFlowHelper("FlowWithOneLogStatement", self.action_mock, client_id=self.client_id, token=self.token): pass self.Open("/#c=C.0000000000000001") self.Click("css=a[grrtarget='client.flows']") self.Click("css=td:contains('FlowWithOneLogStatement')") self.Click("css=li[heading=Log]") self.WaitUntil(self.IsTextPresent, "1970-01-01 00:00:42 UTC")
def Run(self): with test_lib.FakeTime(42): with self.CreateHunt( description="the hunt", output_plugins=[ output_plugin.OutputPluginDescriptor( plugin_name=DummyHuntTestOutputPlugin.__name__, plugin_args=DummyHuntTestOutputPlugin.args_type( filename_regex="blah!", fetch_binaries=True)) ]) as hunt_obj: pass self.Check("GET", "/api/hunts/%s/output-plugins" % hunt_obj.urn.Basename(), replace={hunt_obj.urn.Basename(): "H:123456"})
def testRendersSubrangeOfListOfHuntObjects(self): for i in range(1, 11): with test_lib.FakeTime(i * 1000): self.CreateHunt(description="hunt_%d" % i) result = self.handler.Render(hunt_plugin.ApiListHuntsArgs(offset=2, count=2), token=self.token) create_times = [ r["summary"]["create_time"]["value"] for r in result["items"] ] self.assertEqual(len(create_times), 2) self.assertEqual(create_times[0], 8 * 1000000000) self.assertEqual(create_times[1], 7 * 1000000000)
def Run(self): with test_lib.FakeTime(42): with ApiHuntsListRendererTest.CreateSampleHunt( "the hunt", token=self.token) as hunt_obj: with test_lib.FakeTime(52): hunt_obj.LogClientError( rdf_client.ClientURN("C.0000111122223333"), "Error foo.") with test_lib.FakeTime(55): hunt_obj.LogClientError( rdf_client.ClientURN("C.1111222233334444"), "Error bar.", "<some backtrace>") self.Check("GET", "/api/hunts/%s/errors" % hunt_obj.urn.Basename(), replace={hunt_obj.urn.Basename(): "H:123456"}) self.Check("GET", "/api/hunts/%s/errors?count=1" % hunt_obj.urn.Basename(), replace={hunt_obj.urn.Basename(): "H:123456"}) self.Check("GET", ("/api/hunts/%s/errors?offset=1&count=1" % hunt_obj.urn.Basename()), replace={hunt_obj.urn.Basename(): "H:123456"})
def testNoNotificationRescheduling(self): """Test that no notifications are rescheduled when a flow raises.""" with test_lib.FakeTime(10000): flow_obj = self.FlowSetup("RaisingTestFlow") session_id = flow_obj.session_id flow_obj.Close() # Send the flow some messages. self.SendResponse(session_id, "Hello1", request_id=1) self.SendResponse(session_id, "Hello2", request_id=2) self.SendResponse(session_id, "Hello3", request_id=3) worker_obj = worker.GRRWorker(token=self.token) # Process all messages. worker_obj.RunOnce() worker_obj.thread_pool.Join() delay = config_lib.CONFIG["Worker.notification_retry_interval"] with test_lib.FakeTime(10000 + 100 + delay): manager = queue_manager.QueueManager(token=self.token) self.assertFalse( manager.GetNotificationsForAllShards(session_id.Queue()))
def testLatencyStatsAreCorrectlyRecorded(self): with test_lib.FakeTime(0): cron_manager = cronjobs.CronManager() cron_args = cronjobs.CreateCronJobFlowArgs() cron_args.flow_runner_args.flow_name = "FakeCronJob" cron_args.periodicity = "1w" cron_job_urn = cron_manager.ScheduleFlow(cron_args=cron_args, token=self.token) cron_manager.RunOnce(token=self.token) prev_metric_value = stats.STATS.GetMetricValue( "cron_job_latency", fields=[cron_job_urn.Basename()]) # Fast foward one minute with test_lib.FakeTime(60): cron_manager.RunOnce(token=self.token) cron_job = aff4.FACTORY.Open(cron_job_urn, aff4_type=cronjobs.CronJob, token=self.token) cron_flow_urn = cron_job.Get(cron_job.Schema.CURRENT_FLOW_URN) for _ in test_lib.TestFlowHelper(cron_flow_urn, check_flow_errors=False, token=self.token): pass # This RunOnce call should determine that the flow has finished cron_manager.RunOnce(token=self.token) # Check that stats got updated current_metric_value = stats.STATS.GetMetricValue( "cron_job_latency", fields=[cron_job_urn.Basename()]) self.assertEqual(current_metric_value.count - prev_metric_value.count, 1) self.assertEqual(current_metric_value.sum - prev_metric_value.sum, 60)
def testPOSTRequestFailsIfCSRFTokenIsExpired(self): with test_lib.FakeTime(rdfvalue.RDFDatetime().FromSecondsFromEpoch(42)): index_response = requests.get(self.base_url) csrf_token = index_response.cookies.get("csrftoken") headers = {"x-csrftoken": csrf_token} data = {"client_ids": ["C.0000000000000000"], "labels": ["foo", "bar"]} cookies = {"csrftoken": csrf_token} response = requests.post( self.base_url + "/api/clients/labels/add", headers=headers, data=json.dumps(data), cookies=cookies) self.assertEquals(response.status_code, 200) # This should still succeed as we use strict check in wsgiapp.py: # current_time - token_time > CSRF_TOKEN_DURATION.microseconds with test_lib.FakeTime(rdfvalue.RDFDatetime().FromSecondsFromEpoch(42) + wsgiapp.CSRF_TOKEN_DURATION.seconds): response = requests.post( self.base_url + "/api/clients/labels/add", headers=headers, data=json.dumps(data), cookies=cookies) self.assertEquals(response.status_code, 200) with test_lib.FakeTime(rdfvalue.RDFDatetime().FromSecondsFromEpoch(42) + wsgiapp.CSRF_TOKEN_DURATION.seconds + 1): response = requests.post( self.base_url + "/api/clients/labels/add", headers=headers, data=json.dumps(data), cookies=cookies) self.assertEquals(response.status_code, 403) self.assertTrue("Expired CSRF token" in response.text)
def setUp(self): super(CleanCronJobsTest, self).setUp() with test_lib.FakeTime(40): cron_args = cronjobs.CreateCronJobFlowArgs( periodicity=DummySystemCronJob.frequency) cron_args.flow_runner_args.flow_name = DummySystemCronJob.__name__ cron_args.lifetime = DummySystemCronJob.lifetime self.cron_jobs_urns = [] self.cron_jobs_urns.append( cronjobs.CRON_MANAGER.ScheduleFlow(cron_args=cron_args, job_name="Foo", token=self.token, disabled=False)) self.cron_jobs_urns.append( cronjobs.CRON_MANAGER.ScheduleFlow(cron_args=cron_args, job_name="Bar", token=self.token, disabled=False)) for i in range(self.NUM_CRON_RUNS): with test_lib.FakeTime(40 + 60 * i): cronjobs.CRON_MANAGER.RunOnce(token=self.token, force=True)
def Run(self): # Fix the time to avoid regressions. with test_lib.FakeTime(42): client_ids = self.SetupClients(1) with aff4.FACTORY.Open(client_ids[0], mode="rw", token=self.token) as grr_client: grr_client.Set(grr_client.Schema.MEMORY_SIZE(4294967296)) # Delete the certificate as it's being regenerated every time the # client is created. grr_client.DeleteAttribute(grr_client.Schema.CERT) self.Check("GetClient", args=client_plugin.ApiGetClientArgs( client_id=client_ids[0].Basename()))
def testHandlesSubrangeOfListOfHuntObjects(self): for i in range(1, 11): with test_lib.FakeTime(i * 1000): self.CreateHunt(description="hunt_%d" % i) result = self.handler.Handle(hunt_plugin.ApiListHuntsArgs(offset=2, count=2), token=self.token) create_times = [ r.created.AsMicroSecondsFromEpoch() for r in result.items ] self.assertEqual(len(create_times), 2) self.assertEqual(create_times[0], 8 * 1000000000) self.assertEqual(create_times[1], 7 * 1000000000)
def testListResults(self): self.client_ids = self.SetupClients(5) with test_lib.FakeTime(42): hunt_urn = self.StartHunt() self.AssignTasksToClients() self.RunHunt(failrate=-1) h = self.api.Hunt(hunt_urn.Basename()).Get() results = list(h.ListResults()) client_ids = set(r.client.client_id for r in results) self.assertEqual(client_ids, set(x.Basename() for x in self.client_ids)) for r in results: self.assertEqual(r.timestamp, 42000000) self.assertEqual(r.payload.pathspec.path, "/tmp/evil.txt")
def testExpiredTokens(self): """Tests that expired tokens are rejected.""" urn = rdf_client.ClientURN("C.%016X" % 0).Add("/fs/os/c") self.assertRaises(access_control.UnauthorizedAccess, aff4.FACTORY.Open, urn) with test_lib.FakeTime(100): # Token expires in 5 seconds. super_token = access_control.ACLToken(username="******", expiry=105) super_token.supervisor = True # This should work since token is a super token. aff4.FACTORY.Open(urn, mode="rw", token=super_token) # Change the time to 200 with test_lib.FakeTime(200): # Should be expired now. self.assertRaises(access_control.ExpiryError, aff4.FACTORY.Open, urn, token=super_token, mode="rw")
def CreateFileWithTimeline(file_path, token): """Add a file with timeline.""" # Add a version of the file at gui_test_lib.TIME_0. Since we write all MAC # times, this will result in three timeline items. with test_lib.FakeTime(gui_test_lib.TIME_0): with aff4.FACTORY.Create(file_path, aff4_grr.VFSAnalysisFile, mode="w", token=token) as fd: stats = rdf_client.StatEntry( st_atime=gui_test_lib.TIME_0.AsSecondsFromEpoch() + 1000, st_mtime=gui_test_lib.TIME_0.AsSecondsFromEpoch(), st_ctime=gui_test_lib.TIME_0.AsSecondsFromEpoch() - 1000) fd.Set(fd.Schema.STAT, stats) # Add a version with a stat entry, but without timestamps. with test_lib.FakeTime(gui_test_lib.TIME_1): with aff4.FACTORY.Create(file_path, aff4_grr.VFSAnalysisFile, mode="w", token=token) as fd: stats = rdf_client.StatEntry(st_ino=99) fd.Set(fd.Schema.STAT, stats)
def testForceRunCronJob(self): with self.ACLChecksDisabled(): cronjobs.CRON_MANAGER.EnableJob(rdfvalue.RDFURN("aff4:/cron/OSBreakDown")) with test_lib.FakeTime( # 2274264646 corresponds to Sat, 25 Jan 2042 12:10:46 GMT. rdfvalue.RDFDatetime().FromSecondsFromEpoch(2274264646), increment=1e-6): self.Open("/") self.Click("css=a[grrtarget=crons]") self.Click("css=td:contains('OSBreakDown')") # Click on Force Run button and check that dialog appears. self.Click("css=button[name=ForceRunCronJob]:not([disabled])") self.WaitUntil(self.IsTextPresent, "Are you sure you want to FORCE-RUN this cron job?") # Click on "Proceed" and wait for authorization dialog to appear. self.Click("css=button[name=Proceed]") self.WaitUntil(self.IsTextPresent, "Create a new approval") self.Click("css=grr-request-approval-dialog button[name=Cancel]") # Wait for dialog to disappear. self.WaitUntilNot(self.IsVisible, "css=.modal-open") with self.ACLChecksDisabled(): self.GrantCronJobApproval(rdfvalue.RDFURN("aff4:/cron/OSBreakDown")) # Click on Force Run button and check that dialog appears. self.Click("css=button[name=ForceRunCronJob]:not([disabled])") self.WaitUntil(self.IsTextPresent, "Are you sure you want to FORCE-RUN this cron job?") # Click on "Proceed" and wait for success label to appear. # Also check that "Proceed" button gets disabled. self.Click("css=button[name=Proceed]") self.WaitUntil(self.IsTextPresent, "Cron job flow was FORCE-STARTED successfully!") self.assertFalse(self.IsElementPresent("css=button[name=Proceed]")) # Click on "Close" and check that dialog disappears. self.Click("css=button[name=Close]") self.WaitUntilNot(self.IsVisible, "css=.modal-open") # View should be refreshed automatically. The last run date should appear. self.WaitUntil(self.IsElementPresent, "css=grr-cron-jobs-list " "tr:contains('OSBreakDown') td:contains('2042')")
def testHuntClientRate(self): """Check that clients are scheduled slowly by the hunt.""" start_time = 10 # Set up 10 clients. client_ids = self.SetupClients(10) with test_lib.FakeTime(start_time): with hunts.GRRHunt.StartHunt(hunt_name="DummyHunt", regex_rules=[ rdfvalue.ForemanAttributeRegex( attribute_name="GRR client", attribute_regex="GRR"), ], client_rate=1, token=self.token) as hunt: hunt.Run() # Pretend to be the foreman now and dish out hunting jobs to all the # clients.. foreman = aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=self.token) for client_id in client_ids: foreman.AssignTasksToClient(client_id) self.assertEqual(len(DummyHunt.client_ids), 0) # Run the hunt. worker_mock = test_lib.MockWorker(check_flow_errors=True, token=self.token) time.time = lambda: start_time + 2 # One client is scheduled in the first minute. worker_mock.Simulate() self.assertEqual(len(DummyHunt.client_ids), 1) # No further clients will be scheduled until the end of the first minute. time.time = lambda: start_time + 59 worker_mock.Simulate() self.assertEqual(len(DummyHunt.client_ids), 1) # One client will be processed every minute. for i in range(len(client_ids)): time.time = lambda: start_time + 1 + 60 * i worker_mock.Simulate() self.assertEqual(len(DummyHunt.client_ids), i + 1)
def testIndexedReads(self): with aff4.FACTORY.Create("aff4:/sequential_collection/test4", "TestIndexedSequentialCollection", token=self.token) as collection: data_size = 128 * 1024 for i in range(data_size): collection.Add(rdfvalue.RDFInteger(i)) with test_lib.FakeTime(rdfvalue.RDFDatetime().Now() + rdfvalue.Duration("10m")): for i in range(data_size - 1, data_size - 20, -1): self.assertEqual(collection[i], i) self.assertEqual(collection[1023], 1023) self.assertEqual(collection[1024], 1024) self.assertEqual(collection[1025], 1025) for i in range(data_size - 1020, data_size - 1040, -1): self.assertEqual(collection[i], i)
def Run(self): email_descriptor = output_plugin.OutputPluginDescriptor( plugin_name=email_plugin.EmailOutputPlugin.__name__, plugin_args=email_plugin.EmailOutputPluginArgs( email_address="test@localhost", emails_limit=42)) with test_lib.FakeTime(42): flow_urn = flow.GRRFlow.StartFlow( flow_name=processes.ListProcesses.__name__, client_id=self.client_id, output_plugins=[email_descriptor], token=self.token) self.Check("GET", "/api/clients/%s/flows/%s/output-plugins" % ( self.client_id.Basename(), flow_urn.Basename()), replace={flow_urn.Basename(): "W:ABCDEF"})
def testAppliesSingleLabelToSingleClient(self): client_id = self.SetupClients(1)[0] self.assertFalse(self.GetClientLabels(client_id)) with test_lib.FakeTime(42): flow.GRRFlow.StartFlow(flow_name="ApplyLabelsToClientsFlow", clients=[client_id], labels=["foo"], token=self.token) self.assertListEqual( self.GetClientLabels(client_id), [rdfvalue.AFF4ObjectLabel( name="foo", owner="test", timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42))])
def SetupTestTimeline(self): self.client_id = self.SetupClients(1)[0] test_lib.ClientFixture(self.client_id, token=self.token) # Choose some directory with pathspec in the ClientFixture. self.folder_path = "fs/os/Users/Shared" self.file_path = self.folder_path + "/a.txt" file_urn = self.client_id.Add(self.file_path) for i in range(0, 5): with test_lib.FakeTime(i): with aff4.FACTORY.Create( file_urn, aff4_grr.VFSAnalysisFile, mode="w", token=self.token) as fd: stats = rdf_client.StatEntry( st_mtime=rdfvalue.RDFDatetimeSeconds().Now()) fd.Set(fd.Schema.STAT, stats)