def _GetArtifactCollectorFlowThrottler(self): acparams = self.params.artifact_collector_flow return throttle.FlowThrottler( daily_req_limit=acparams.max_flows_per_client_daily, dup_interval=rdfvalue.Duration( acparams.min_interval_between_duplicate_flows))
def _GetFileFinderThrottler(self): ffparams = self.params.file_finder_flow return throttle.FlowThrottler( daily_req_limit=ffparams.max_flows_per_client_daily, dup_interval=rdfvalue.Duration( ffparams.min_interval_between_duplicate_flows))
def testCheckFlowRequestLimit(self): # Create a flow with test_lib.FakeTime(self.BASE_TIME): flow.StartFlow( client_id=self.client_id, flow_name=flow_test_lib.DummyLogFlow.__name__, token=self.token) # One day + 1s later with test_lib.FakeTime(self.BASE_TIME + 86400 + 1): flow.StartFlow( client_id=self.client_id, flow_name=flow_test_lib.DummyLogFlow.__name__, token=self.token) # Disable the dup interval checking by setting it to 0. throttler = throttle.FlowThrottler( daily_req_limit=2, dup_interval=rdfvalue.Duration("0s")) # Should succeeed, only one flow present in the 1 day window. throttler.EnforceLimits( self.client_id, self.token.username, flow_test_lib.DummyLogFlow.__name__, None, token=self.token) # Start some more flows with a different user token2 = access_control.ACLToken(username="******", reason="Running tests") flow.StartFlow( client_id=self.client_id, flow_name=flow_test_lib.DummyLogFlow.__name__, token=token2) flow.StartFlow( client_id=self.client_id, flow_name=flow_test_lib.DummyLogFlow.__name__, token=token2) # Should still succeed, since we count per-user throttler.EnforceLimits( self.client_id, self.token.username, flow_test_lib.DummyLogFlow.__name__, None, token=self.token) # Add another flow at current time flow.StartFlow( client_id=self.client_id, flow_name=flow_test_lib.DummyLogFlow.__name__, token=self.token) with self.assertRaises(throttle.ErrorDailyFlowRequestLimitExceeded): throttler.EnforceLimits( self.client_id, self.token.username, flow_test_lib.DummyLogFlow.__name__, None, token=self.token)
def testCheckFlowRequestLimit(self): # Create a flow with test_lib.FakeTime(self.BASE_TIME): flow_test_lib.StartFlow( flow_test_lib.DummyLogFlow, client_id=self.client_id, creator=self.test_username) # One day + 1s later with test_lib.FakeTime(self.BASE_TIME + 86400 + 1): flow_test_lib.StartFlow( flow_cls=flow_test_lib.DummyLogFlow, client_id=self.client_id, creator=self.test_username) # Disable the dup interval checking by setting it to 0. throttler = throttle.FlowThrottler( daily_req_limit=2, dup_interval=rdfvalue.Duration.From(0, rdfvalue.SECONDS)) # Should succeed, only one flow present in the 1 day window. throttler.EnforceLimits(self.client_id, self.test_username, flow_test_lib.DummyLogFlow.__name__, None) # Start some more flows with a different user flow_test_lib.StartFlow( flow_cls=flow_test_lib.DummyLogFlow, client_id=self.client_id, creator="test2") flow_test_lib.StartFlow( flow_test_lib.DummyLogFlow, client_id=self.client_id, creator="test2") # Should still succeed, since we count per-user throttler.EnforceLimits(self.client_id, self.test_username, flow_test_lib.DummyLogFlow.__name__, None) # Add another flow at current time flow_test_lib.StartFlow( flow_test_lib.DummyLogFlow, client_id=self.client_id, creator=self.test_username) with self.assertRaises(throttle.DailyFlowRequestLimitExceededError): throttler.EnforceLimits(self.client_id, self.test_username, flow_test_lib.DummyLogFlow.__name__, None)
def testFlowDuplicateLimit(self): # Disable the request limit checking by setting it to 0. throttler = throttle.FlowThrottler( daily_req_limit=0, dup_interval=rdfvalue.Duration("1200s")) # Running the same flow immediately should fail with test_lib.FakeTime(self.BASE_TIME): throttler.EnforceLimits(self.client_id, self.token.username, flow_test_lib.DummyLogFlow.__name__, None, token=self.token) api_regression_test_lib.StartFlow( client_id=self.client_id, flow_cls=flow_test_lib.DummyLogFlow, token=self.token) with self.assertRaises(throttle.DuplicateFlowError): throttler.EnforceLimits(self.client_id, self.token.username, flow_test_lib.DummyLogFlow.__name__, None, token=self.token) # Doing the same outside the window should work with test_lib.FakeTime(self.BASE_TIME + 1200 + 1): throttler.EnforceLimits(self.client_id, self.token.username, flow_test_lib.DummyLogFlow.__name__, None, token=self.token) api_regression_test_lib.StartFlow( client_id=self.client_id, flow_cls=flow_test_lib.DummyLogFlow, token=self.token) with self.assertRaises(throttle.DuplicateFlowError): throttler.EnforceLimits(self.client_id, self.token.username, flow_test_lib.DummyLogFlow.__name__, None, token=self.token) # Now try a flow with more complicated args args = rdf_file_finder.FileFinderArgs( paths=["/tmp/1", "/tmp/2"], action=rdf_file_finder.FileFinderAction(action_type="STAT")) with test_lib.FakeTime(self.BASE_TIME): throttler.EnforceLimits(self.client_id, self.token.username, file_finder.FileFinder.__name__, args, token=self.token) new_args = rdf_file_finder.FileFinderArgs( paths=["/tmp/1", "/tmp/2"], action=rdf_file_finder.FileFinderAction(action_type="STAT")) api_regression_test_lib.StartFlow(client_id=self.client_id, flow_cls=file_finder.FileFinder, token=self.token, flow_args=new_args) with self.assertRaises(throttle.DuplicateFlowError): throttler.EnforceLimits(self.client_id, self.token.username, file_finder.FileFinder.__name__, args, token=self.token) # Different args should succeed. args = rdf_file_finder.FileFinderArgs( paths=["/tmp/1", "/tmp/3"], action=rdf_file_finder.FileFinderAction(action_type="STAT")) throttler.EnforceLimits(self.client_id, self.token.username, file_finder.FileFinder.__name__, args, token=self.token)