Ejemplo n.º 1
0
    def Handle(self, args, token=None):
        client_urn = self.GetClientTarget(args, token=token)

        size_condition = file_finder.FileFinderCondition(
            condition_type=file_finder.FileFinderCondition.Type.SIZE,
            size=file_finder.FileFinderSizeCondition(
                max_file_size=args.max_file_size))

        file_finder_args = file_finder.FileFinderArgs(
            paths=args.paths,
            action=file_finder.FileFinderAction(action_type=args.action),
            conditions=[size_condition])

        # Check our flow throttling limits, will raise if there are problems.
        throttler = throttle.FlowThrottler()
        throttler.EnforceLimits(client_urn,
                                token.username,
                                file_finder.FileFinder.__name__,
                                file_finder_args,
                                token=token)

        # Limit the whole flow to 200MB so if a glob matches lots of small files we
        # still don't have too much impact.
        runner_args = flow_runner.FlowRunnerArgs(
            client_id=client_urn,
            flow_name=file_finder.FileFinder.__name__,
            network_bytes_limit=200 * 1000 * 1000)

        flow_id = flow.GRRFlow.StartFlow(runner_args=runner_args,
                                         token=token,
                                         args=file_finder_args)

        return ApiStartRobotGetFilesOperationResult(
            operation_id=utils.SmartUnicode(flow_id))
Ejemplo n.º 2
0
    def testCheckFlowRequestLimit(self):
        # Create a flow
        with test_lib.FakeTime(self.BASE_TIME):
            flow.GRRFlow.StartFlow(
                client_id=self.client_id,
                flow_name=flow_test_lib.DummyLogFlow.__name__,
                token=self.token)

        # One day + 1s later
        with test_lib.FakeTime(self.BASE_TIME + 86400 + 1):
            flow.GRRFlow.StartFlow(
                client_id=self.client_id,
                flow_name=flow_test_lib.DummyLogFlow.__name__,
                token=self.token)

            # Disable the dup interval checking by setting it to 0.
            throttler = throttle.FlowThrottler(
                daily_req_limit=2, dup_interval=rdfvalue.Duration("0s"))

            # Should succeeed, only one flow present in the 1 day window.
            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    flow_test_lib.DummyLogFlow.__name__,
                                    None,
                                    token=self.token)

            # Start some more flows with a different user
            token2 = access_control.ACLToken(username="******",
                                             reason="Running tests")
            flow.GRRFlow.StartFlow(
                client_id=self.client_id,
                flow_name=flow_test_lib.DummyLogFlow.__name__,
                token=token2)
            flow.GRRFlow.StartFlow(
                client_id=self.client_id,
                flow_name=flow_test_lib.DummyLogFlow.__name__,
                token=token2)

            # Should still succeed, since we count per-user
            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    flow_test_lib.DummyLogFlow.__name__,
                                    None,
                                    token=self.token)

            # Add another flow at current time
            flow.GRRFlow.StartFlow(
                client_id=self.client_id,
                flow_name=flow_test_lib.DummyLogFlow.__name__,
                token=self.token)

            with self.assertRaises(
                    throttle.ErrorDailyFlowRequestLimitExceeded):
                throttler.EnforceLimits(self.client_id,
                                        self.token.username,
                                        flow_test_lib.DummyLogFlow.__name__,
                                        None,
                                        token=self.token)
Ejemplo n.º 3
0
    def Render(self, args, token=None):
        client_urn = self.GetClientTarget(args, token=token)

        size_condition = file_finder.FileFinderCondition(
            condition_type=file_finder.FileFinderCondition.Type.SIZE,
            size=file_finder.FileFinderSizeCondition(
                max_file_size=args.max_file_size))

        file_finder_args = file_finder.FileFinderArgs(
            paths=args.paths,
            action=file_finder.FileFinderAction(action_type=args.action),
            conditions=[size_condition])

        # Check our flow throttling limits, will raise if there are problems.
        throttler = throttle.FlowThrottler()
        throttler.EnforceLimits(client_urn,
                                token.username,
                                "FileFinder",
                                file_finder_args,
                                token=token)

        # Limit the whole flow to 200MB so if a glob matches lots of small files we
        # still don't have too much impact.
        runner_args = flow_runner.FlowRunnerArgs(client_id=client_urn,
                                                 flow_name="FileFinder",
                                                 network_bytes_limit=200 *
                                                 1000 * 1000)

        flow_id = flow.GRRFlow.StartFlow(runner_args=runner_args,
                                         token=token,
                                         args=file_finder_args)

        # Provide a url where the caller can check on the flow status.
        status_url = urlparse.urljoin(
            config_lib.CONFIG["AdminUI.url"], "/api/flows/%s/%s/status" %
            (client_urn.Basename(), flow_id.Basename()))
        return dict(
            flow_id=api_value_renderers.RenderValue(flow_id),
            flow_args=api_value_renderers.RenderValue(file_finder_args),
            runner_args=api_value_renderers.RenderValue(runner_args),
            status_url=status_url)
Ejemplo n.º 4
0
    def testFlowDuplicateLimit(self):
        # Disable the request limit checking by setting it to 0.
        throttler = throttle.FlowThrottler(
            daily_req_limit=0, dup_interval=rdfvalue.Duration("1200s"))

        # Running the same flow immediately should fail
        with test_lib.FakeTime(self.BASE_TIME):
            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    "DummyLogFlow",
                                    None,
                                    token=self.token)

            flow.GRRFlow.StartFlow(client_id=self.client_id,
                                   flow_name="DummyLogFlow",
                                   token=self.token)

            with self.assertRaises(throttle.ErrorFlowDuplicate):
                throttler.EnforceLimits(self.client_id,
                                        self.token.username,
                                        "DummyLogFlow",
                                        None,
                                        token=self.token)

        # Doing the same outside the window should work
        with test_lib.FakeTime(self.BASE_TIME + 1200 + 1):
            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    "DummyLogFlow",
                                    None,
                                    token=self.token)

            flow.GRRFlow.StartFlow(client_id=self.client_id,
                                   flow_name="DummyLogFlow",
                                   token=self.token)

            with self.assertRaises(throttle.ErrorFlowDuplicate):
                throttler.EnforceLimits(self.client_id,
                                        self.token.username,
                                        "DummyLogFlow",
                                        None,
                                        token=self.token)

        # Now try a flow with more complicated args
        args = rdf_file_finder.FileFinderArgs(
            paths=["/tmp/1", "/tmp/2"],
            action=rdf_file_finder.FileFinderAction(action_type="STAT"))

        with test_lib.FakeTime(self.BASE_TIME):
            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    file_finder.FileFinder.__name__,
                                    args,
                                    token=self.token)

            flow.GRRFlow.StartFlow(
                client_id=self.client_id,
                flow_name=file_finder.FileFinder.__name__,
                token=self.token,
                paths=["/tmp/1", "/tmp/2"],
                action=rdf_file_finder.FileFinderAction(action_type="STAT"))

            with self.assertRaises(throttle.ErrorFlowDuplicate):
                throttler.EnforceLimits(self.client_id,
                                        self.token.username,
                                        file_finder.FileFinder.__name__,
                                        args,
                                        token=self.token)

            # Different args should succeed.
            args = rdf_file_finder.FileFinderArgs(
                paths=["/tmp/1", "/tmp/3"],
                action=rdf_file_finder.FileFinderAction(action_type="STAT"))

            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    file_finder.FileFinder.__name__,
                                    args,
                                    token=self.token)
Ejemplo n.º 5
0
    def _GetArtifactCollectorFlowThrottler(self):
        acparams = self.params.artifact_collector_flow

        return throttle.FlowThrottler(
            daily_req_limit=acparams.max_flows_per_client_daily,
            dup_interval=acparams.min_interval_between_duplicate_flows)
Ejemplo n.º 6
0
    def _GetFileFinderThrottler(self):
        ffparams = self.params.file_finder_flow

        return throttle.FlowThrottler(
            daily_req_limit=ffparams.max_flows_per_client_daily,
            dup_interval=ffparams.min_interval_between_duplicate_flows)