Esempio n. 1
0
  def testResultMetadataAreCorrectlyUpdatedAfterMultiplePersistStateCalls(
      self, db: abstract_db.Database):
    client_id = db_test_utils.InitializeClient(db)

    flow = rdf_flow_objects.Flow()
    flow.client_id = client_id
    flow.flow_id = self._FLOW_ID
    db.WriteFlowObject(flow)

    flow_obj = FlowBaseTest.Flow(flow)
    flow_obj.SendReply(rdf_client.ClientInformation())
    flow_obj.PersistState()
    flow_obj.PersistState()
    db.WriteFlowObject(flow_obj.rdf_flow)

    flow_2 = db.ReadFlowObject(client_id, self._FLOW_ID)
    flow_obj_2 = FlowBaseTest.Flow(flow_2)
    result_metadata = flow_obj_2.GetResultMetadata()

    self.assertLen(result_metadata.num_results_per_type_tag, 1)
    self.assertTrue(result_metadata.is_metadata_set)
    self.assertEqual(result_metadata.num_results_per_type_tag[0].type,
                     "ClientInformation")
    self.assertEqual(result_metadata.num_results_per_type_tag[0].tag, "")
    self.assertEqual(result_metadata.num_results_per_type_tag[0].count, 1)
Esempio n. 2
0
    def testLogsWarningIfBtimeNotSupported(self, db: abstract_db.Database):
        client_id = self.client_id
        db.WriteClientMetadata(client_id, fleetspeak_enabled=True)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "Linux"
        snapshot.startup_info.client_info.timeline_btime_support = False
        db.WriteClientSnapshot(snapshot)

        with temp.AutoTempDirPath() as tempdir:
            args = rdf_timeline.TimelineArgs(root=tempdir.encode("utf-8"))

            flow_id = flow_test_lib.TestFlowHelper(
                timeline_flow.TimelineFlow.__name__,
                action_mocks.ActionMock(timeline_action.Timeline),
                client_id=client_id,
                token=self.token,
                args=args)

            flow_test_lib.FinishAllFlowsOnClient(client_id)

        log_entries = db.ReadFlowLogEntries(client_id,
                                            flow_id,
                                            offset=0,
                                            count=1)
        self.assertLen(log_entries, 1)
        self.assertRegex(log_entries[0].message, "birth time is not supported")
Esempio n. 3
0
    def testResultMetadataHasGroupedNumberOfReplies(self,
                                                    db: abstract_db.Database):
        client_id = db_test_utils.InitializeClient(db)

        flow = rdf_flow_objects.Flow()
        flow.client_id = client_id
        flow.flow_id = self._FLOW_ID
        db.WriteFlowObject(flow)

        flow_obj = FlowBaseTest.Flow(flow)
        flow_obj.SendReply(rdf_client.ClientInformation())
        flow_obj.SendReply(rdf_client.StartupInfo())
        flow_obj.SendReply(rdf_client.StartupInfo())
        flow_obj.SendReply(rdf_client.StartupInfo(), tag="foo")
        flow_obj.PersistState()
        db.WriteFlowObject(flow_obj.rdf_flow)

        flow_2 = db.ReadFlowObject(client_id, self._FLOW_ID)
        flow_obj_2 = FlowBaseTest.Flow(flow_2)

        result_metadata = flow_obj_2.GetResultMetadata()
        self.assertLen(result_metadata.num_results_per_type_tag, 3)

        sorted_counts = sorted(result_metadata.num_results_per_type_tag,
                               key=lambda v: (v.type, v.tag))
        self.assertEqual(sorted_counts[0].type, "ClientInformation")
        self.assertEqual(sorted_counts[0].tag, "")
        self.assertEqual(sorted_counts[0].count, 1)
        self.assertEqual(sorted_counts[1].type, "StartupInfo")
        self.assertEqual(sorted_counts[1].tag, "")
        self.assertEqual(sorted_counts[1].count, 2)
        self.assertEqual(sorted_counts[2].type, "StartupInfo")
        self.assertEqual(sorted_counts[2].tag, "foo")
        self.assertEqual(sorted_counts[2].count, 1)
Esempio n. 4
0
    def testNoLogsIfBtimeSupported(self, db: abstract_db.Database):
        client_id = self.client_id
        db.WriteClientMetadata(client_id, fleetspeak_enabled=True)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "Linux"
        snapshot.startup_info.client_info.timeline_btime_support = True
        db.WriteClientSnapshot(snapshot)

        with temp.AutoTempDirPath() as tempdir:
            args = rdf_timeline.TimelineArgs(root=tempdir.encode("utf-8"))

            flow_id = flow_test_lib.TestFlowHelper(
                timeline_flow.TimelineFlow.__name__,
                action_mocks.ActionMock(timeline_action.Timeline),
                client_id=client_id,
                creator=self.test_username,
                args=args)

            flow_test_lib.FinishAllFlowsOnClient(client_id)

        log_entries = db.ReadFlowLogEntries(client_id,
                                            flow_id,
                                            offset=0,
                                            count=1)
        self.assertEmpty(log_entries)
Esempio n. 5
0
  def testLogWithoutFormatArgs(self, db: abstract_db.Database) -> None:
    client_id = db_test_utils.InitializeClient(db)

    flow = rdf_flow_objects.Flow()
    flow.client_id = client_id
    flow.flow_id = self._FLOW_ID
    db.WriteFlowObject(flow)

    flow = FlowBaseTest.Flow(flow)
    flow.Log("foo %s %s")

    logs = db.ReadFlowLogEntries(client_id, self._FLOW_ID, offset=0, count=1024)
    self.assertLen(logs, 1)
    self.assertEqual(logs[0].message, "foo %s %s")
Esempio n. 6
0
    def testFlowWithNoResult(self, db: abstract_db.Database) -> None:
        client_id = "C.1234567890123456"
        flow_id = "ABCDEF92"

        db.WriteClientMetadata(client_id, last_ping=rdfvalue.RDFDatetime.Now())

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline_flow.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        db.WriteFlowObject(flow_obj)

        self.assertIsNone(timeline_flow.FilesystemType(client_id, flow_id))
Esempio n. 7
0
    def testLogWithFormatArgs(self, db: abstract_db.Database) -> None:
        client_id = db_test_utils.InitializeClient(db)
        flow_id = "FEDCBA9876543210"

        flow = rdf_flow_objects.Flow()
        flow.client_id = client_id
        flow.flow_id = flow_id
        db.WriteFlowObject(flow)

        flow = FlowBaseTest.Flow(flow)
        flow.Log("foo %s %s", "bar", 42)

        logs = db.ReadFlowLogEntries(client_id, flow_id, offset=0, count=1024)
        self.assertLen(logs, 1)
        self.assertEqual(logs[0].message, "foo bar 42")
Esempio n. 8
0
    def testClientInfo(self, db: abstract_db.Database):
        client_id = "C.0123456789ABCDEF"
        db.WriteClientMetadata(client_id, fleetspeak_enabled=False)

        startup_info = rdf_client.StartupInfo()
        startup_info.client_info.client_name = "rrg"
        startup_info.client_info.client_version = 1337
        db.WriteClientStartupInfo(client_id, startup_info)

        flow = rdf_flow_objects.Flow()
        flow.client_id = client_id
        flow.flow_id = "FEDCBA9876543210"

        flow = FlowBaseTest.Flow(flow)
        self.assertIsInstance(flow.client_info, rdf_client.ClientInformation)
        self.assertEqual(flow.client_info.client_name, "rrg")
        self.assertEqual(flow.client_info.client_version, 1337)
Esempio n. 9
0
    def testNotAppliedParsers(self, db: abstract_db.Database) -> None:
        client_id = db_test_utils.InitializeClient(db)
        flow_id = "4815162342ABCDEF"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = collectors.ArtifactCollectorFlow.__name__
        flow_obj.args = rdf_artifacts.ArtifactCollectorFlowArgs(
            apply_parsers=False)
        db.WriteFlowObject(flow_obj)

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.tag = "artifact:Fake"
        flow_result.payload = rdfvalue.RDFString("foobar")
        db.WriteFlowResults([flow_result])

        args = flow_plugin.ApiListFlowApplicableParsersArgs()
        args.client_id = client_id
        args.flow_id = flow_id

        result = self.handler.Handle(args)
        self.assertCountEqual(result.parsers, [
            flow_plugin.ApiParserDescriptor(
                type=flow_plugin.ApiParserDescriptor.Type.SINGLE_RESPONSE,
                name="FakeSingleResponse",
            ),
            flow_plugin.ApiParserDescriptor(
                type=flow_plugin.ApiParserDescriptor.Type.MULTI_RESPONSE,
                name="FakeMultiResponse",
            ),
            flow_plugin.ApiParserDescriptor(
                type=flow_plugin.ApiParserDescriptor.Type.SINGLE_FILE,
                name="FakeSingleFile",
            ),
            flow_plugin.ApiParserDescriptor(
                type=flow_plugin.ApiParserDescriptor.Type.MULTI_FILE,
                name="FakeMultiFile",
            ),
        ])
Esempio n. 10
0
    def testClientInfoDefault(self, db: abstract_db.Database):
        client_id = "C.0123456789ABCDEF"
        db.WriteClientMetadata(client_id, fleetspeak_enabled=False)

        flow = rdf_flow_objects.Flow()
        flow.client_id = client_id
        flow.flow_id = "FEDCBA9876543210"

        flow = FlowBaseTest.Flow(flow)
        self.assertIsInstance(flow.client_info, rdf_client.ClientInformation)
        self.assertEmpty(flow.client_info.client_name)
Esempio n. 11
0
    def testReturnsDefaultFlowProgressForEmptyFlow(self,
                                                   db: abstract_db.Database):
        client_id = db_test_utils.InitializeClient(db)

        flow = rdf_flow_objects.Flow()
        flow.client_id = client_id
        flow.flow_id = self._FLOW_ID
        db.WriteFlowObject(flow)

        flow_obj = FlowBaseTest.Flow(flow)
        progress = flow_obj.GetProgress()
        self.assertIsInstance(progress, rdf_flow_objects.DefaultFlowProgress)
Esempio n. 12
0
    def testFlowWithResult(self, db: abstract_db.Database) -> None:
        client_id = "C.1234567890123456"
        flow_id = "ABCDEF92"

        db.WriteClientMetadata(client_id, last_ping=rdfvalue.RDFDatetime.Now())

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline_flow.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        db.WriteFlowObject(flow_obj)

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.payload = rdf_timeline.TimelineResult(
            filesystem_type="ntfs")
        db.WriteFlowResults([flow_result])

        self.assertEqual(timeline_flow.FilesystemType(client_id, flow_id),
                         "ntfs")
Esempio n. 13
0
    def testReturnsEmptyResultMetadataForEmptyFlow(self,
                                                   db: abstract_db.Database):
        client_id = db_test_utils.InitializeClient(db)

        flow = rdf_flow_objects.Flow()
        flow.client_id = client_id
        flow.flow_id = self._FLOW_ID
        db.WriteFlowObject(flow)

        flow_obj = FlowBaseTest.Flow(flow)
        result_metadata = flow_obj.GetResultMetadata()
        self.assertIsInstance(result_metadata,
                              rdf_flow_objects.FlowResultMetadata)
        self.assertEmpty(result_metadata.num_results_per_type_tag)
Esempio n. 14
0
    def testAlreadyAppliedParsers(self, db: abstract_db.Database) -> None:
        client_id = db_test_utils.InitializeClient(db)
        flow_id = "4815162342ABCDEF"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = collectors.ArtifactCollectorFlow.__name__
        flow_obj.args = rdf_artifacts.ArtifactCollectorFlowArgs(
            apply_parsers=True)
        db.WriteFlowObject(flow_obj)

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.tag = "artifact:Fake"
        db.WriteFlowResults([flow_result])

        args = flow_plugin.ApiListFlowApplicableParsersArgs()
        args.client_id = client_id
        args.flow_id = flow_id

        result = self.handler.Handle(args)
        self.assertEmpty(result.parsers)
Esempio n. 15
0
    def testClientInfo(self, db: abstract_db.Database):
        client_id = db_test_utils.InitializeClient(db)

        startup_info = rdf_client.StartupInfo()
        startup_info.client_info.client_name = "rrg"
        startup_info.client_info.client_version = 1337
        db.WriteClientStartupInfo(client_id, startup_info)

        flow = rdf_flow_objects.Flow()
        flow.client_id = client_id
        flow.flow_id = self._FLOW_ID

        flow = FlowBaseTest.Flow(flow)
        self.assertIsInstance(flow.client_info, rdf_client.ClientInformation)
        self.assertEqual(flow.client_info.client_name, "rrg")
        self.assertEqual(flow.client_info.client_version, 1337)
Esempio n. 16
0
    def testIncorrectFlowType(self, db: abstract_db.Database) -> None:
        client_id = db_test_utils.InitializeClient(db)
        flow_id = "4815162342ABCDEF"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = "NotArtifactCollector"
        db.WriteFlowObject(flow_obj)

        args = flow_plugin.ApiListFlowApplicableParsersArgs()
        args.client_id = client_id
        args.flow_id = flow_id

        with self.assertRaisesRegex(ValueError,
                                    "Not an artifact-collector flow"):
            self.handler.Handle(args)
Esempio n. 17
0
    def testHandlerUsesKnowledgeBase(self, db: abstract_db.Database):
        token = _CreateToken(db)
        client_id = db_test_utils.InitializeClient(db)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.users = [rdf_client.User(homedir="/home/foo")]
        db.WriteClientSnapshot(snapshot)

        handler = flow_plugin.ApiExplainGlobExpressionHandler()
        args = flow_plugin.ApiExplainGlobExpressionArgs(
            example_count=2,
            client_id=client_id,
            glob_expression="%%users.homedir%%/foo")
        results = handler.Handle(args, token=token)
        self.assertEqual(list(results.components), [
            rdf_paths.GlobComponentExplanation(
                glob_expression="%%users.homedir%%", examples=["/home/foo"]),
            rdf_paths.GlobComponentExplanation(glob_expression="/foo",
                                               examples=[]),
        ])
Esempio n. 18
0
    def TestMethod(self, db: abstract_db.Database):
      client_id = "C.0123456789abcdef"
      db.WriteClientMetadata(client_id, first_seen=now)

      client = db.ReadClientFullInfo(client_id)
      self.assertEqual(client.metadata.first_seen, now)
Esempio n. 19
0
def _CreateToken(db: abstract_db.Database) -> access_control.ACLToken:
    username = "".join(random.choice("abcdef") for _ in range(8))
    db.WriteGRRUser(username)
    return access_control.ACLToken(username=username, reason="Lorem ipsum.")
Esempio n. 20
0
    def testUsesCollectionTimeFiles(self, db: abstract_db.Database):
        token = _CreateToken(db)
        client_id = db_test_utils.InitializeClient(db)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "redox"
        db.WriteClientSnapshot(snapshot)

        with temp.AutoTempFilePath() as temp_filepath:
            fake_artifact_source = rdf_artifacts.ArtifactSource(
                type=rdf_artifacts.ArtifactSource.SourceType.FILE,
                attributes={
                    "paths": [temp_filepath],
                })

            fake_artifact = rdf_artifacts.Artifact(
                name="FakeArtifact",
                doc="Lorem ipsum.",
                sources=[fake_artifact_source])

            flow_args = rdf_artifacts.ArtifactCollectorFlowArgs()
            flow_args.artifact_list = [fake_artifact.name]
            flow_args.apply_parsers = False

            with io.open(temp_filepath, mode="wb") as temp_filedesc:
                temp_filedesc.write(b"OLD")

            with mock.patch.object(
                    artifact_registry, "REGISTRY",
                    artifact_registry.ArtifactRegistry()) as registry:
                registry.RegisterArtifact(fake_artifact)

                # First, we run the artifact collector to collect the old file and save
                # the flow id to parse the results later.
                flow_id = flow_test_lib.TestFlowHelper(
                    collectors.ArtifactCollectorFlow.__name__,
                    action_mocks.FileFinderClientMock(),
                    client_id=client_id,
                    args=flow_args,
                    token=token)

                flow_test_lib.FinishAllFlowsOnClient(client_id)

            with io.open(temp_filepath, mode="wb") as temp_filedesc:
                temp_filedesc.write(b"NEW")

            with mock.patch.object(
                    artifact_registry, "REGISTRY",
                    artifact_registry.ArtifactRegistry()) as registry:
                registry.RegisterArtifact(fake_artifact)

                # Now, we run the artifact collector again to collect the new file to
                # update to this version on the server. The parsing should be performed
                # against the previous flow.
                flow_test_lib.TestFlowHelper(
                    collectors.ArtifactCollectorFlow.__name__,
                    action_mocks.FileFinderClientMock(),
                    client_id=client_id,
                    args=flow_args,
                    token=token)

                flow_test_lib.FinishAllFlowsOnClient(client_id)

        class FakeFileParser(abstract_parser.SingleFileParser):

            supported_artifacts = [fake_artifact.name]

            def ParseFile(
                self,
                knowledge_base: rdf_client.KnowledgeBase,
                pathspec: rdf_paths.PathSpec,
                filedesc: file_store.BlobStream,
            ) -> Iterable[rdfvalue.RDFBytes]:
                del knowledge_base, pathspec  # Unused.
                return [rdfvalue.RDFBytes(filedesc.Read())]

        with parser_test_lib._ParserContext("FakeFile", FakeFileParser):
            args = flow_plugin.ApiListParsedFlowResultsArgs(
                client_id=client_id, flow_id=flow_id, offset=0, count=1024)

            result = self.handler.Handle(args, token=token)

        self.assertEmpty(result.errors)
        self.assertLen(result.items, 1)

        response = result.items[0].payload
        self.assertEqual(response, b"OLD")
Esempio n. 21
0
    def testUsesKnowledgebaseFromFlow(self, db: abstract_db.Database):
        token = _CreateToken(db)

        client_id = db_test_utils.InitializeClient(db)

        # This is the snapshot that is visible to the flow and should be used for
        # parsing results.
        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "redox"
        db.WriteClientSnapshot(snapshot)

        with mock.patch.object(
                artifact_registry, "REGISTRY",
                artifact_registry.ArtifactRegistry()) as registry:
            registry.RegisterArtifact(self.ECHO1337_ARTIFACT)

            flow_args = rdf_artifacts.ArtifactCollectorFlowArgs()
            flow_args.artifact_list = [self.ECHO1337_ARTIFACT.name]
            flow_args.apply_parsers = False

            flow_id = flow_test_lib.TestFlowHelper(
                collectors.ArtifactCollectorFlow.__name__,
                self.FakeExecuteCommand(),
                client_id=client_id,
                args=flow_args,
                token=token)

        class FakeParser(abstract_parser.SingleResponseParser):

            supported_artifacts = [self.ECHO1337_ARTIFACT.name]

            def ParseResponse(
                self,
                knowledge_base: rdf_client.KnowledgeBase,
                response: rdf_client_action.ExecuteResponse,
            ) -> Iterable[rdf_client_action.ExecuteResponse]:
                precondition.AssertType(response,
                                        rdf_client_action.ExecuteResponse)

                parsed_response = rdf_client_action.ExecuteResponse()
                parsed_response.stdout = response.stdout
                parsed_response.stderr = knowledge_base.os.encode("utf-8")
                return [parsed_response]

        # This is a snapshot written to the database after the responses were
        # collected, so this should not be used for parsing.
        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.os = "linux"
        db.WriteClientSnapshot(snapshot)

        with parser_test_lib._ParserContext("Fake", FakeParser):
            args = flow_plugin.ApiListParsedFlowResultsArgs(
                client_id=client_id, flow_id=flow_id, offset=0, count=1024)

            result = self.handler.Handle(args, token=token)

        self.assertEmpty(result.errors)
        self.assertLen(result.items, 1)

        response = result.items[0].payload
        self.assertIsInstance(response, rdf_client_action.ExecuteResponse)
        self.assertEqual(response.stdout, b"1337")
        self.assertEqual(response.stderr.decode("utf-8"), "redox")
Esempio n. 22
0
def _CreateContext(
        db: abstract_db.Database) -> api_call_context.ApiCallContext:
    username = "".join(random.choice("abcdef") for _ in range(8))
    db.WriteGRRUser(username)
    return api_call_context.ApiCallContext(username)
Esempio n. 23
0
    def TestMethod(self, username: Text, db: abstract_db.Database):
      db.WriteGRRUser(username)

      user = db.ReadGRRUser(username)
      self.assertEqual(user.username, username)
Esempio n. 24
0
    def TestMethod(db: abstract_db.Database):
      self.assertEqual(db.CountGRRUsers(), 0)

      db.WriteGRRUser("foo")
      self.assertEqual(db.CountGRRUsers(), 1)