示例#1
0
    def testNoLogsIfBtimeSupported(self, db: abstract_db.Database):
        client_id = self.client_id
        db.WriteClientMetadata(client_id, fleetspeak_enabled=True)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "Linux"
        snapshot.startup_info.client_info.timeline_btime_support = True
        db.WriteClientSnapshot(snapshot)

        with temp.AutoTempDirPath() as tempdir:
            args = rdf_timeline.TimelineArgs(root=tempdir.encode("utf-8"))

            flow_id = flow_test_lib.TestFlowHelper(
                timeline_flow.TimelineFlow.__name__,
                action_mocks.ActionMock(timeline_action.Timeline),
                client_id=client_id,
                creator=self.test_username,
                args=args)

            flow_test_lib.FinishAllFlowsOnClient(client_id)

        log_entries = db.ReadFlowLogEntries(client_id,
                                            flow_id,
                                            offset=0,
                                            count=1)
        self.assertEmpty(log_entries)
示例#2
0
    def testLogsWarningIfBtimeNotSupported(self, db: abstract_db.Database):
        client_id = self.client_id
        db.WriteClientMetadata(client_id, fleetspeak_enabled=True)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "Linux"
        snapshot.startup_info.client_info.timeline_btime_support = False
        db.WriteClientSnapshot(snapshot)

        with temp.AutoTempDirPath() as tempdir:
            args = rdf_timeline.TimelineArgs(root=tempdir.encode("utf-8"))

            flow_id = flow_test_lib.TestFlowHelper(
                timeline_flow.TimelineFlow.__name__,
                action_mocks.ActionMock(timeline_action.Timeline),
                client_id=client_id,
                token=self.token,
                args=args)

            flow_test_lib.FinishAllFlowsOnClient(client_id)

        log_entries = db.ReadFlowLogEntries(client_id,
                                            flow_id,
                                            offset=0,
                                            count=1)
        self.assertLen(log_entries, 1)
        self.assertRegex(log_entries[0].message, "birth time is not supported")
示例#3
0
文件: flow_test.py 项目: viszsec/grr
    def testHandlerUsesKnowledgeBase(self, db: abstract_db.Database):
        token = _CreateToken(db)
        client_id = db_test_utils.InitializeClient(db)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.users = [rdf_client.User(homedir="/home/foo")]
        db.WriteClientSnapshot(snapshot)

        handler = flow_plugin.ApiExplainGlobExpressionHandler()
        args = flow_plugin.ApiExplainGlobExpressionArgs(
            example_count=2,
            client_id=client_id,
            glob_expression="%%users.homedir%%/foo")
        results = handler.Handle(args, token=token)
        self.assertEqual(list(results.components), [
            rdf_paths.GlobComponentExplanation(
                glob_expression="%%users.homedir%%", examples=["/home/foo"]),
            rdf_paths.GlobComponentExplanation(glob_expression="/foo",
                                               examples=[]),
        ])
示例#4
0
文件: flow_test.py 项目: viszsec/grr
    def testUsesCollectionTimeFiles(self, db: abstract_db.Database):
        token = _CreateToken(db)
        client_id = db_test_utils.InitializeClient(db)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "redox"
        db.WriteClientSnapshot(snapshot)

        with temp.AutoTempFilePath() as temp_filepath:
            fake_artifact_source = rdf_artifacts.ArtifactSource(
                type=rdf_artifacts.ArtifactSource.SourceType.FILE,
                attributes={
                    "paths": [temp_filepath],
                })

            fake_artifact = rdf_artifacts.Artifact(
                name="FakeArtifact",
                doc="Lorem ipsum.",
                sources=[fake_artifact_source])

            flow_args = rdf_artifacts.ArtifactCollectorFlowArgs()
            flow_args.artifact_list = [fake_artifact.name]
            flow_args.apply_parsers = False

            with io.open(temp_filepath, mode="wb") as temp_filedesc:
                temp_filedesc.write(b"OLD")

            with mock.patch.object(
                    artifact_registry, "REGISTRY",
                    artifact_registry.ArtifactRegistry()) as registry:
                registry.RegisterArtifact(fake_artifact)

                # First, we run the artifact collector to collect the old file and save
                # the flow id to parse the results later.
                flow_id = flow_test_lib.TestFlowHelper(
                    collectors.ArtifactCollectorFlow.__name__,
                    action_mocks.FileFinderClientMock(),
                    client_id=client_id,
                    args=flow_args,
                    token=token)

                flow_test_lib.FinishAllFlowsOnClient(client_id)

            with io.open(temp_filepath, mode="wb") as temp_filedesc:
                temp_filedesc.write(b"NEW")

            with mock.patch.object(
                    artifact_registry, "REGISTRY",
                    artifact_registry.ArtifactRegistry()) as registry:
                registry.RegisterArtifact(fake_artifact)

                # Now, we run the artifact collector again to collect the new file to
                # update to this version on the server. The parsing should be performed
                # against the previous flow.
                flow_test_lib.TestFlowHelper(
                    collectors.ArtifactCollectorFlow.__name__,
                    action_mocks.FileFinderClientMock(),
                    client_id=client_id,
                    args=flow_args,
                    token=token)

                flow_test_lib.FinishAllFlowsOnClient(client_id)

        class FakeFileParser(abstract_parser.SingleFileParser):

            supported_artifacts = [fake_artifact.name]

            def ParseFile(
                self,
                knowledge_base: rdf_client.KnowledgeBase,
                pathspec: rdf_paths.PathSpec,
                filedesc: file_store.BlobStream,
            ) -> Iterable[rdfvalue.RDFBytes]:
                del knowledge_base, pathspec  # Unused.
                return [rdfvalue.RDFBytes(filedesc.Read())]

        with parser_test_lib._ParserContext("FakeFile", FakeFileParser):
            args = flow_plugin.ApiListParsedFlowResultsArgs(
                client_id=client_id, flow_id=flow_id, offset=0, count=1024)

            result = self.handler.Handle(args, token=token)

        self.assertEmpty(result.errors)
        self.assertLen(result.items, 1)

        response = result.items[0].payload
        self.assertEqual(response, b"OLD")
示例#5
0
文件: flow_test.py 项目: viszsec/grr
    def testUsesKnowledgebaseFromFlow(self, db: abstract_db.Database):
        token = _CreateToken(db)

        client_id = db_test_utils.InitializeClient(db)

        # This is the snapshot that is visible to the flow and should be used for
        # parsing results.
        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "redox"
        db.WriteClientSnapshot(snapshot)

        with mock.patch.object(
                artifact_registry, "REGISTRY",
                artifact_registry.ArtifactRegistry()) as registry:
            registry.RegisterArtifact(self.ECHO1337_ARTIFACT)

            flow_args = rdf_artifacts.ArtifactCollectorFlowArgs()
            flow_args.artifact_list = [self.ECHO1337_ARTIFACT.name]
            flow_args.apply_parsers = False

            flow_id = flow_test_lib.TestFlowHelper(
                collectors.ArtifactCollectorFlow.__name__,
                self.FakeExecuteCommand(),
                client_id=client_id,
                args=flow_args,
                token=token)

        class FakeParser(abstract_parser.SingleResponseParser):

            supported_artifacts = [self.ECHO1337_ARTIFACT.name]

            def ParseResponse(
                self,
                knowledge_base: rdf_client.KnowledgeBase,
                response: rdf_client_action.ExecuteResponse,
            ) -> Iterable[rdf_client_action.ExecuteResponse]:
                precondition.AssertType(response,
                                        rdf_client_action.ExecuteResponse)

                parsed_response = rdf_client_action.ExecuteResponse()
                parsed_response.stdout = response.stdout
                parsed_response.stderr = knowledge_base.os.encode("utf-8")
                return [parsed_response]

        # This is a snapshot written to the database after the responses were
        # collected, so this should not be used for parsing.
        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "linux"
        db.WriteClientSnapshot(snapshot)

        with parser_test_lib._ParserContext("Fake", FakeParser):
            args = flow_plugin.ApiListParsedFlowResultsArgs(
                client_id=client_id, flow_id=flow_id, offset=0, count=1024)

            result = self.handler.Handle(args, token=token)

        self.assertEmpty(result.errors)
        self.assertLen(result.items, 1)

        response = result.items[0].payload
        self.assertIsInstance(response, rdf_client_action.ExecuteResponse)
        self.assertEqual(response.stdout, b"1337")
        self.assertEqual(response.stderr.decode("utf-8"), "redox")