Exemple #1
0
    def GetFiles(self, source, path_type, max_size):
        """Get a set of files."""
        new_path_list = []
        for path in source.attributes["paths"]:
            # Interpolate any attributes from the knowledgebase.
            new_path_list.extend(
                artifact_lib.InterpolateKbAttributes(
                    path, self.state.knowledge_base))

        action = file_finder.FileFinderAction(
            action_type=rdfvalue.FileFinderAction.Action.DOWNLOAD,
            download=rdfvalue.FileFinderDownloadActionOptions(
                max_size=max_size))

        self.CallFlow("FileFinder",
                      paths=new_path_list,
                      pathtype=path_type,
                      action=action,
                      request_data={
                          "artifact_name": self.current_artifact_name,
                          "source": source.ToPrimitiveDict()
                      },
                      next_state="ProcessFileFinderResults")
Exemple #2
0
class TestFileFinderOSWindows(transfer.TestGetFileOSWindows):
    """Download a file with FileFinder.

  Exercise globbing, interpolation and filtering.
  """
    flow = "FileFinder"
    test_output_path = "/fs/os/.*/Windows/System32/notepad.exe"

    sizecondition = file_finder.FileFinderSizeCondition(max_file_size=1000000)
    filecondition = file_finder.FileFinderCondition(
        condition_type=file_finder.FileFinderCondition.Type.SIZE,
        size=sizecondition)

    download = file_finder.FileFinderDownloadActionOptions()
    action = file_finder.FileFinderAction(
        action_type=file_finder.FileFinderAction.Action.DOWNLOAD,
        download=download)

    args = {
        "paths": ["%%environ_systemroot%%\\System32\\notepad.*"],
        "conditions": filecondition,
        "action": action
    }
Exemple #3
0
    def RunFlowAndCheckResults(self,
                               conditions=None,
                               action=file_finder.FileFinderAction.Action.STAT,
                               expected_files=None,
                               non_expected_files=None,
                               paths=None):
        if not isinstance(action, file_finder.FileFinderAction):
            action = file_finder.FileFinderAction(action_type=action)
        action_type = action.action_type

        conditions = conditions or []
        expected_files = expected_files or []
        non_expected_files = non_expected_files or []

        for fname in expected_files + non_expected_files:
            aff4.FACTORY.Delete(self.FileNameToURN(fname), token=self.token)

        results = self.RunFlow(paths=paths,
                               conditions=conditions,
                               action=action)
        self.CheckReplies(results, action_type, expected_files)

        self.CheckFilesInCollection(expected_files)

        if action_type == file_finder.FileFinderAction.Action.STAT:
            self.CheckFilesNotDownloaded(expected_files + non_expected_files)
            self.CheckFilesNotHashed(expected_files + non_expected_files)
        elif action_type == file_finder.FileFinderAction.Action.DOWNLOAD:
            self.CheckFilesHashed(expected_files)
            self.CheckFilesNotHashed(non_expected_files)
            self.CheckFilesDownloaded(expected_files)
            self.CheckFilesNotDownloaded(non_expected_files)
            # Downloaded files are hashed to allow for deduping.
        elif action_type == file_finder.FileFinderAction.Action.HASH:
            self.CheckFilesNotDownloaded(expected_files + non_expected_files)
            self.CheckFilesHashed(expected_files)
            self.CheckFilesNotHashed(non_expected_files)
Exemple #4
0
    def Handle(self, args, token=None):
        client_urn = self.GetClientTarget(args, token=token)

        size_condition = file_finder.FileFinderCondition(
            condition_type=file_finder.FileFinderCondition.Type.SIZE,
            size=file_finder.FileFinderSizeCondition(
                max_file_size=args.max_file_size))

        file_finder_args = file_finder.FileFinderArgs(
            paths=args.paths,
            action=file_finder.FileFinderAction(action_type=args.action),
            conditions=[size_condition])

        # Check our flow throttling limits, will raise if there are problems.
        throttler = throttle.FlowThrottler(
            daily_req_limit=config_lib.CONFIG.Get("API.DailyFlowRequestLimit"),
            dup_interval=config_lib.CONFIG.Get("API.FlowDuplicateInterval"))
        throttler.EnforceLimits(client_urn,
                                token.username,
                                file_finder.FileFinder.__name__,
                                file_finder_args,
                                token=token)

        # Limit the whole flow to 200MB so if a glob matches lots of small files we
        # still don't have too much impact.
        runner_args = rdf_flows.FlowRunnerArgs(
            client_id=client_urn,
            flow_name=file_finder.FileFinder.__name__,
            network_bytes_limit=200 * 1000 * 1000)

        flow_id = flow.GRRFlow.StartFlow(runner_args=runner_args,
                                         token=token,
                                         args=file_finder_args)

        return ApiStartRobotGetFilesOperationResult(
            operation_id=utils.SmartUnicode(flow_id))
Exemple #5
0
  def setUp(self):
    super(ApiGetHuntFileHandlerTest, self).setUp()

    self.handler = hunt_plugin.ApiGetHuntFileHandler()

    self.file_path = os.path.join(self.base_path, "test.plist")
    self.hunt = hunts.GRRHunt.StartHunt(
        hunt_name="GenericHunt",
        flow_runner_args=rdf_flows.FlowRunnerArgs(
            flow_name=file_finder.FileFinder.__name__),
        flow_args=file_finder.FileFinderArgs(
            paths=[self.file_path],
            action=file_finder.FileFinderAction(action_type="DOWNLOAD"),),
        client_rate=0,
        token=self.token)
    self.hunt.Run()

    self.results_urn = self.hunt.results_collection_urn
    self.aff4_file_path = "fs/os/%s" % self.file_path

    self.client_id = self.SetupClients(1)[0]
    self.AssignTasksToClients(client_ids=[self.client_id])
    action_mock = action_mocks.FileFinderClientMock()
    test_lib.TestHuntHelper(action_mock, [self.client_id], token=self.token)
Exemple #6
0
    def StartRequests(self):
        """Generate and send the Find requests."""
        client = aff4.FACTORY.Open(self.client_id, token=self.token)
        if self.runner.output is not None:
            self.runner.output.Set(
                self.runner.output.Schema.DESCRIPTION(
                    "CacheGrep for {0}".format(self.args.data_regex)))

        usernames = [
            "%s\\%s" % (u.userdomain, u.username) for u in self.state.users
        ]
        usernames = [u.lstrip("\\")
                     for u in usernames]  # Strip \\ if no domain.

        condition = file_finder.FileFinderCondition(
            condition_type=file_finder.FileFinderCondition.Type.
            CONTENTS_REGEX_MATCH,
            contents_regex_match=file_finder.
            FileFinderContentsRegexMatchCondition(
                regex=self.args.data_regex,
                mode=file_finder.FileFinderContentsRegexMatchCondition.Mode.
                FIRST_HIT))

        for path in self.state.all_paths:
            full_paths = flow_utils.InterpolatePath(path,
                                                    client,
                                                    users=usernames)
            for full_path in full_paths:
                self.CallFlow("FileFinder",
                              paths=[os.path.join(full_path, "**5")],
                              pathtype=self.state.args.pathtype,
                              conditions=[condition],
                              action=file_finder.FileFinderAction(
                                  action_type=file_finder.FileFinderAction.
                                  Action.DOWNLOAD),
                              next_state="HandleResults")
Exemple #7
0
    def testAttributesOfFileFoundInHashFileStoreAreSetCorrectly(self):
        client_ids = self.SetupClients(2)

        filename = os.path.join(self.base_path, "tcpip.sig")
        pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
                                      path=filename)
        urn1 = aff4_grr.VFSGRRClient.PathspecToURN(pathspec, client_ids[0])
        urn2 = aff4_grr.VFSGRRClient.PathspecToURN(pathspec, client_ids[1])

        for client_id in client_ids:
            client_mock = action_mocks.FileFinderClientMock()
            for _ in test_lib.TestFlowHelper(
                    file_finder.FileFinder.__name__,
                    client_mock,
                    token=self.token,
                    client_id=client_id,
                    paths=[filename],
                    action=file_finder.FileFinderAction(
                        action_type=file_finder.FileFinderAction.Action.
                        DOWNLOAD)):
                pass
            # Running worker to make sure FileStore.AddFileToStore event is processed
            # by the worker.
            worker = test_lib.MockWorker(token=self.token)
            worker.Simulate()

        fd1 = aff4.FACTORY.Open(urn1, token=self.token)
        self.assertTrue(isinstance(fd1, aff4_grr.VFSBlobImage))

        fd2 = aff4.FACTORY.Open(urn2, token=self.token)
        self.assertTrue(isinstance(fd2, filestore.FileStoreImage))

        self.assertEqual(fd1.Get(fd1.Schema.STAT), fd2.Get(fd2.Schema.STAT))
        self.assertEqual(fd1.Get(fd1.Schema.SIZE), fd2.Get(fd2.Schema.SIZE))
        self.assertEqual(fd1.Get(fd1.Schema.CONTENT_LAST),
                         fd2.Get(fd2.Schema.CONTENT_LAST))
Exemple #8
0
    def testFileFinderThrottlingByFlowCountWorks(self):
        self.InitRouterConfig(
            self.__class__.FILE_FINDER_THROTTLED_ROUTER_CONFIG)

        args = []
        for p in ["tests.plist", "numbers.txt", "numbers.txt.ver2"]:
            args.append(
                file_finder.FileFinderArgs(
                    action=file_finder.FileFinderAction(action_type="STAT"),
                    paths=[p]).AsPrimitiveProto())

        client_ref = self.api.Client(client_id=self.client_id.Basename())

        flow_obj = client_ref.CreateFlow(name=file_finder.FileFinder.__name__,
                                         args=args[0])
        self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING)

        flow_obj = client_ref.CreateFlow(name=file_finder.FileFinder.__name__,
                                         args=args[1])
        self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING)

        with self.assertRaisesRegexp(RuntimeError, "2 flows run since"):
            client_ref.CreateFlow(name=file_finder.FileFinder.__name__,
                                  args=args[2])
Exemple #9
0
    def testFlowDuplicateLimit(self):
        # Disable the request limit checking by setting it to 0.
        throttler = throttle.FlowThrottler(
            daily_req_limit=0, dup_interval=rdfvalue.Duration("1200s"))

        # Running the same flow immediately should fail
        with test_lib.FakeTime(self.BASE_TIME):
            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    "DummyLogFlow",
                                    None,
                                    token=self.token)

            flow.GRRFlow.StartFlow(client_id=self.client_id,
                                   flow_name="DummyLogFlow",
                                   token=self.token)

            with self.assertRaises(throttle.ErrorFlowDuplicate):
                throttler.EnforceLimits(self.client_id,
                                        self.token.username,
                                        "DummyLogFlow",
                                        None,
                                        token=self.token)

        # Doing the same outside the window should work
        with test_lib.FakeTime(self.BASE_TIME + 1200 + 1):
            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    "DummyLogFlow",
                                    None,
                                    token=self.token)

            flow.GRRFlow.StartFlow(client_id=self.client_id,
                                   flow_name="DummyLogFlow",
                                   token=self.token)

            with self.assertRaises(throttle.ErrorFlowDuplicate):
                throttler.EnforceLimits(self.client_id,
                                        self.token.username,
                                        "DummyLogFlow",
                                        None,
                                        token=self.token)

        # Now try a flow with more complicated args
        args = file_finder.FileFinderArgs(
            paths=["/tmp/1", "/tmp/2"],
            action=file_finder.FileFinderAction(action_type="STAT"))

        with test_lib.FakeTime(self.BASE_TIME):
            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    "FileFinder",
                                    args,
                                    token=self.token)

            flow.GRRFlow.StartFlow(
                client_id=self.client_id,
                flow_name="FileFinder",
                token=self.token,
                paths=["/tmp/1", "/tmp/2"],
                action=file_finder.FileFinderAction(action_type="STAT"))

            with self.assertRaises(throttle.ErrorFlowDuplicate):
                throttler.EnforceLimits(self.client_id,
                                        self.token.username,
                                        "FileFinder",
                                        args,
                                        token=self.token)

            # Different args should succeed.
            args = file_finder.FileFinderArgs(
                paths=["/tmp/1", "/tmp/3"],
                action=file_finder.FileFinderAction(action_type="STAT"))

            throttler.EnforceLimits(self.client_id,
                                    self.token.username,
                                    "FileFinder",
                                    args,
                                    token=self.token)
Exemple #10
0
    def testFileFinderWorkflowWorks(self):
        self.InitRouterConfig(self.__class__.FILE_FINDER_ROUTER_CONFIG %
                              self.token.username)

        client_ref = self.api.Client(client_id=self.client_id.Basename())

        args = file_finder.FileFinderArgs(
            paths=[
                os.path.join(self.base_path, "test.plist"),
                os.path.join(self.base_path, "numbers.txt"),
                os.path.join(self.base_path, "numbers.txt.ver2")
            ],
            action=file_finder.FileFinderAction(
                action_type=file_finder.FileFinderAction.Action.DOWNLOAD)
        ).AsPrimitiveProto()
        flow_obj = client_ref.CreateFlow(name=file_finder.FileFinder.__name__,
                                         args=args)
        self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING)

        # Now run the flow we just started.
        client_id = rdf_client.ClientURN(flow_obj.client_id)
        flow_urn = client_id.Add("flows").Add(flow_obj.flow_id)
        for _ in test_lib.TestFlowHelper(
                flow_urn,
                client_id=client_id,
                client_mock=action_mocks.FileFinderClientMock(),
                token=self.token):
            pass

        # Refresh flow.
        flow_obj = client_ref.Flow(flow_obj.flow_id).Get()
        self.assertEqual(flow_obj.data.state, flow_obj.data.TERMINATED)

        # Check that we got 3 results (we downloaded 3 files).
        results = list(flow_obj.ListResults())
        self.assertEqual(len(results), 3)
        # We expect results to be FileFinderResult.
        self.assertEqual(
            sorted(
                os.path.basename(r.payload.stat_entry.aff4path)
                for r in results),
            sorted(["test.plist", "numbers.txt", "numbers.txt.ver2"]))

        # Now downloads the files archive.
        zip_stream = StringIO.StringIO()
        flow_obj.GetFilesArchive().WriteToStream(zip_stream)
        zip_fd = zipfile.ZipFile(zip_stream)

        # Now check that the archive has only "test.plist" file, as it's the
        # only file that matches the whitelist (see FILE_FINDER_ROUTER_CONFIG).
        # There should be 3 items in the archive: the hash of the "test.plist"
        # file, the symlink to this hash and the MANIFEST file.
        namelist = zip_fd.namelist()
        self.assertEqual(len(namelist), 3)

        # First component of every path in the archive is the containing folder,
        # we should strip it.
        namelist = [os.path.join(*n.split(os.sep)[1:]) for n in namelist]
        with open(os.path.join(self.base_path, "test.plist")) as test_plist_fd:
            test_plist_hash = hashlib.sha256(test_plist_fd.read()).hexdigest()
        self.assertEqual(
            sorted([
                # pyformat: disable
                os.path.join(self.client_id.Basename(), "fs", "os",
                             self.base_path.strip("/"), "test.plist"),
                os.path.join("hashes", test_plist_hash),
                "MANIFEST"
                # pyformat: enable
            ]),
            sorted(namelist))
Exemple #11
0
"""GRR console script to collect bashrc."""

from grr.lib import flow_utils
from grr.lib.flows.general import file_finder

action = file_finder.FileFinderAction(action_type="DOWNLOAD")
ff_args = file_finder.FileFinderArgs(paths=["/home/*/.bashrc"], action=action)
newest_time = ""
target_client = None
for client in SearchClients("client-ubuntu-trusty-m"):
    if client[3] > newest_time:
        newest_time = client[3]
        target_client = client[0]

if target_client:
    flow_utils.StartFlowAndWait(target_client.urn,
                                token=None,
                                flow_name="FileFinder",
                                args=ff_args)