Esempio n. 1
0
  def testSingleResponseAndSingleFileParserWithStatResponse(self):

    class FooParser(parsers.SingleResponseParser[rdfvalue.RDFString]):

      supported_artifacts = ["Quux"]

      def ParseResponse(
          self,
          knowledge_base: rdf_client.KnowledgeBase,
          response: rdfvalue.RDFValue,
      ) -> Iterator[rdfvalue.RDFString]:
        del knowledge_base  # Unused.

        if not isinstance(response, rdf_client_fs.StatEntry):
          raise TypeError(f"Unexpected response type: {type(response)}")

        yield rdfvalue.RDFString(f"PATH('{response.pathspec.path}')")

    class BarParser(parsers.SingleFileParser[rdfvalue.RDFString]):

      supported_artifacts = ["Quux"]

      def ParseFile(
          self,
          knowledge_base: rdf_client.KnowledgeBase,
          pathspec: rdf_paths.PathSpec,
          filedesc: IO[bytes],
      ) -> Iterator[rdfvalue.RDFString]:
        raise NotImplementedError()

    with parser_test_lib._ParserContext("Foo", FooParser):
      with parser_test_lib._ParserContext("Bar", BarParser):
        factory = parsers.ArtifactParserFactory("Quux")
        knowledge_base = rdf_client.KnowledgeBase()

        stat_entry = rdf_client_fs.StatEntry()
        stat_entry.pathspec.path = "foo/bar/baz"
        stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS

        applicator = artifact.ParserApplicator(
            factory, client_id=self.client_id, knowledge_base=knowledge_base)

        applicator.Apply([stat_entry])

        responses = list(applicator.Responses())
        self.assertLen(responses, 1)
        self.assertEqual(responses[0], "PATH('foo/bar/baz')")
Esempio n. 2
0
  def testBigQueryPluginWithValuesOfSameType(self):
    responses = []
    for i in range(10):
      responses.append(
          rdf_client_fs.StatEntry(
              pathspec=rdf_paths.PathSpec(
                  path="/foo/bar/%d" % i, pathtype="OS"),
              st_mode=33184,  # octal = 100640 => u=rw,g=r,o= => -rw-r-----
              st_ino=1063090,
              st_dev=64512,
              st_nlink=1 + i,
              st_uid=139592,
              st_gid=5000,
              st_size=0,
              st_atime=1336469177,
              st_mtime=1336129892,
              st_ctime=1336129892))

    output = self.ProcessResponses(
        plugin_args=bigquery_plugin.BigQueryOutputPluginArgs(),
        responses=responses)

    self.assertEqual(len(output), 1)
    _, stream, schema, job_id = output[0]

    self.assertEqual(job_id,
                     "C-1000000000000000_Results_ExportedFile_1445995873")

    self.CompareSchemaToKnownGood(schema)

    actual_fd = gzip.GzipFile(
        None, "r", bigquery_plugin.BigQueryOutputPlugin.GZIP_COMPRESSION_LEVEL,
        stream)

    # Compare to our stored data.
    expected_fd = open(
        os.path.join(config.CONFIG["Test.data_dir"], "bigquery",
                     "ExportedFile.json"), "rb")

    # Bigquery expects a newline separarted list of JSON dicts, but this isn't
    # valid JSON so we can't just load the whole thing and compare.
    counter = 0
    for actual, expected in zip(actual_fd, expected_fd):
      self.assertEqual(json.loads(actual), json.loads(expected))
      counter += 1

    self.assertEqual(counter, 10)
Esempio n. 3
0
  def testCallsOutputMemoryDumpWithSingleBlob(self):
    yara_dump = rdf_memory.YaraProcessDumpResponse(dumped_processes=[
        rdf_memory.YaraProcessDumpInformation(dump_files=[
            rdf_paths.PathSpec(path="my_proc_123_f0_fa.tmp", pathtype=TMPFILE),
        ])
    ])
    stat_entry = rdf_client_fs.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path="my_proc_123_f0_fa.tmp", pathtype=TMPFILE))
    m0 = rdf_flows.GrrMessage(source=self.client_id, payload=yara_dump)
    m1 = rdf_flows.GrrMessage(source=self.client_id, payload=stat_entry)
    self._ProcessValuesWithPlugin([m0, m1])

    self.plugin.OutputMemoryDump.assert_called_once_with(
        rdf_memory.YaraProcessDumpInformation(dump_files=[
            rdf_paths.PathSpec(path="my_proc_123_f0_fa.tmp", pathtype=TMPFILE),
        ]), self.client_id)
Esempio n. 4
0
  def testShowsGenerateArchiveButtonForArtifactDownloaderHunt(self):
    stat_entry = rdf_client_fs.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
    values = [
        collectors.ArtifactFilesDownloaderResult(downloaded_file=stat_entry)
    ]

    self.CreateGenericHuntWithCollection(values=values)

    self.Open("/")
    self.Click("css=a[grrtarget=hunts]")
    self.Click("css=td:contains('GenericHunt')")
    self.Click("css=li[heading=Results]")

    self.WaitUntil(self.IsTextPresent,
                   "Files referenced in this collection can be downloaded")
Esempio n. 5
0
    def ProcessListDirectory(self, responses):
        """Processes the results of the ListDirectory client action.

    Args:
      responses: a flow Responses object.
    """
        if not responses.success:
            raise flow.FlowError("Unable to list directory.")

        with data_store.DB.GetMutationPool() as pool:
            for response in responses:
                stat_entry = rdf_client_fs.StatEntry(response)
                filesystem.CreateAFF4Object(stat_entry,
                                            self.client_urn,
                                            pool,
                                            token=self.token)
                self.SendReply(stat_entry)
Esempio n. 6
0
    def testExportCommandIsShownForStatEntryResults(self):
        stat_entry = rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
        values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)]

        hunt_urn, _ = self.CreateGenericHuntWithCollection(values=values)
        hunt_id = hunt_urn.Basename()

        self.Open("/#/hunts/%s/results" % hunt_id)
        self.Click("link=Show export command")

        self.WaitUntil(
            self.IsTextPresent,
            "/usr/bin/grr_api_shell 'http://localhost:8000/' "
            "--exec_code 'grrapi.Hunt(\"%s\").GetFilesArchive()."
            "WriteToFile(\"./hunt_results_%s.zip\")'" %
            (hunt_urn.Basename(), hunt_urn.Basename().replace(":", "_")))
Esempio n. 7
0
    def testYamlPluginWritesMoreThanOneBatchOfRowsCorrectly(self):
        num_rows = self.__class__.plugin_cls.ROW_BATCH * 2 + 1

        responses = []
        for i in range(num_rows):
            responses.append(
                rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/foo/bar/%d" % i, pathtype="OS")))

        zip_fd, prefix = self.ProcessValuesToZip(
            {rdf_client_fs.StatEntry: responses})
        parsed_output = yaml.load(
            zip_fd.open("%s/ExportedFile/from_StatEntry.yaml" % prefix))
        self.assertLen(parsed_output, num_rows)
        for i in range(num_rows):
            self.assertEqual(parsed_output[i]["urn"],
                             "aff4:/%s/fs/os/foo/bar/%d" % (self.client_id, i))
Esempio n. 8
0
  def testFromStatEntryMetadata(self):
    stat_entry = rdf_client_fs.StatEntry()
    stat_entry.pathspec.path = "foo/bar"
    stat_entry.pathspec.pathtype = rdf_paths.PathSpec.PathType.OS

    stat_obj = os.stat(tempfile.gettempdir())
    stat_entry.st_mode = stat_obj.st_mode
    stat_entry.st_ino = stat_obj.st_ino
    stat_entry.st_dev = stat_obj.st_dev

    path_info = rdf_objects.PathInfo.FromStatEntry(stat_entry)
    self.assertEqual(path_info.path_type, rdf_objects.PathInfo.PathType.OS)
    self.assertEqual(path_info.components, ["foo", "bar"])
    self.assertTrue(path_info.directory)
    self.assertEqual(path_info.stat_entry.st_mode, stat_obj.st_mode)
    self.assertEqual(path_info.stat_entry.st_ino, stat_obj.st_ino)
    self.assertEqual(path_info.stat_entry.st_dev, stat_obj.st_dev)
Esempio n. 9
0
    def ProcessFileStats(self, responses):
        """Extract DataBlob from Stat response."""
        if not responses.success:
            return

        system_root_paths = ["Windows", "WinNT", "WINNT35", "WTSRV", "WINDOWS"]
        for response in responses:
            if response.pathspec.path[4:] in system_root_paths:
                systemdrive = response.pathspec.path[1:3]
                systemroot = "%s\\%s" % (systemdrive,
                                         response.pathspec.path[4:])

                # Put the data back into the original format expected for the artifact
                data = rdf_protodict.DataBlob().SetValue(systemroot)
                self.SendReply(rdf_client_fs.StatEntry(registry_data=data))
                self.state.success = True
                break
Esempio n. 10
0
    def _ParsePCIDeviceTestData(self, test_data):
        """Given test_data dictionary, parse it using PCIDevicesInfoParser."""
        parser = linux_file_parser.PCIDevicesInfoParser()
        stats = []
        file_objs = []
        kb_objs = []

        # Populate stats, file_ojbs, kb_ojbs lists needed by the parser.
        for filename, data in iteritems(test_data):
            pathspec = rdf_paths.PathSpec(path=filename, pathtype="OS")
            stat = rdf_client_fs.StatEntry(pathspec=pathspec)
            file_obj = io.BytesIO(data)
            stats.append(stat)
            file_objs.append(file_obj)
            kb_objs.append(None)

        return list(parser.ParseMultiple(stats, file_objs, kb_objs))
Esempio n. 11
0
  def testPathSpecCasingIsCorrected(self):
    flow = memory.DumpProcessMemory(rdf_flow_objects.Flow())
    flow.SendReply = mock.Mock(spec=flow.SendReply)

    request = rdf_flow_objects.FlowRequest(
        request_data={
            "YaraProcessDumpResponse":
                rdf_memory.YaraProcessDumpResponse(dumped_processes=[
                    rdf_memory.YaraProcessDumpInformation(memory_regions=[
                        rdf_memory.ProcessMemoryRegion(
                            start=1,
                            size=1,
                            file=rdf_paths.PathSpec.Temp(
                                path="/C:/grr/x_1_0_1.tmp")),
                        rdf_memory.ProcessMemoryRegion(
                            start=1,
                            size=1,
                            file=rdf_paths.PathSpec.Temp(
                                path="/C:/GRR/x_1_1_2.tmp"))
                    ])
                ])
        })
    pathspecs = [
        rdf_paths.PathSpec.Temp(path="/C:/Grr/x_1_0_1.tmp"),
        rdf_paths.PathSpec.Temp(path="/C:/Grr/x_1_1_2.tmp")
    ]
    responses = flow_responses.Responses.FromResponses(request, [
        rdf_flow_objects.FlowResponse(
            payload=rdf_client_fs.StatEntry(pathspec=pathspec))
        for pathspec in pathspecs
    ])

    flow.ProcessMemoryRegions(responses)
    flow.SendReply.assert_any_call(
        rdf_memory.YaraProcessDumpResponse(dumped_processes=[
            rdf_memory.YaraProcessDumpInformation(memory_regions=[
                rdf_memory.ProcessMemoryRegion(
                    start=1,
                    size=1,
                    file=rdf_paths.PathSpec.Temp(path="/C:/Grr/x_1_0_1.tmp")),
                rdf_memory.ProcessMemoryRegion(
                    start=1,
                    size=1,
                    file=rdf_paths.PathSpec.Temp(path="/C:/Grr/x_1_1_2.tmp"))
            ])
        ]))
Esempio n. 12
0
def ParseResponse(processor_obj, response, knowledge_base):
    """Call the parser for the response and yield rdf values.

  Args:
    processor_obj: An instance of the parser.
    response: An rdf value response from a client action.
    knowledge_base: containing information about the client.
  Returns:
    An iterable of rdf value responses.
  Raises:
    ValueError: If the requested parser is not supported.
  """
    if processor_obj.process_together:
        parse_method = processor_obj.ParseMultiple
    else:
        parse_method = processor_obj.Parse

    if isinstance(processor_obj, parser.CommandParser):
        result_iterator = parse_method(cmd=response.request.cmd,
                                       args=response.request.args,
                                       stdout=response.stdout,
                                       stderr=response.stderr,
                                       return_val=response.exit_status,
                                       time_taken=response.time_used,
                                       knowledge_base=knowledge_base)
    elif isinstance(processor_obj, parser.WMIQueryParser):
        # At the moment no WMIQueryParser actually uses the passed arguments query
        # and knowledge_base.
        result_iterator = parse_method(None, response, None)
    elif isinstance(processor_obj, parser.FileParser):
        if processor_obj.process_together:
            raise NotImplementedError()
        else:
            file_obj = vfs.VFSOpen(response.pathspec)
            stat = rdf_client_fs.StatEntry(pathspec=response.pathspec)
            result_iterator = parse_method(stat, file_obj, None)
    elif isinstance(processor_obj,
                    (parser.RegistryParser, parser.RekallPluginParser,
                     parser.RegistryValueParser, parser.GenericResponseParser,
                     parser.GrepParser)):
        result_iterator = parse_method(response, knowledge_base)
    elif isinstance(processor_obj, parser.ArtifactFilesParser):
        raise NotImplementedError()
    else:
        raise ValueError("Unsupported parser: %s" % processor_obj)
    return result_iterator
Esempio n. 13
0
    def testYamlPluginWithValuesOfMultipleTypes(self):
        zip_fd, prefix = self.ProcessValuesToZip({
            rdf_client_fs.StatEntry: [
                rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/foo/bar", pathtype="OS"))
            ],
            rdf_client.Process: [rdf_client.Process(pid=42)]
        })
        self.assertEqual(
            set(zip_fd.namelist()), {
                "%s/MANIFEST" % prefix,
                "%s/ExportedFile/from_StatEntry.yaml" % prefix,
                "%s/ExportedProcess/from_Process.yaml" % prefix
            })

        parsed_manifest = yaml.load(zip_fd.read("%s/MANIFEST" % prefix))
        self.assertEqual(
            parsed_manifest, {
                "export_stats": {
                    "StatEntry": {
                        "ExportedFile": 1
                    },
                    "Process": {
                        "ExportedProcess": 1
                    }
                }
            })

        parsed_output = yaml.load(
            zip_fd.read("%s/ExportedFile/from_StatEntry.yaml" % prefix))
        self.assertLen(parsed_output, 1)

        # Only the client_urn is filled in by the plugin. Doing lookups for
        # all the clients metadata is possible but expensive. It doesn't seem to
        # be worth it.
        self.assertEqual(parsed_output[0]["metadata"]["client_urn"],
                         "aff4:/%s" % self.client_id)
        self.assertEqual(parsed_output[0]["metadata"]["source_urn"],
                         str(self.results_urn))
        self.assertEqual(parsed_output[0]["urn"],
                         "aff4:/%s/fs/os/foo/bar" % self.client_id)

        parsed_output = yaml.load(
            zip_fd.read("%s/ExportedProcess/from_Process.yaml" % prefix))
        self.assertLen(parsed_output, 1)
        self.assertEqual(parsed_output[0]["pid"], "42")
Esempio n. 14
0
def GenFileData(paths, data, stats=None, files=None, modes=None):
    """Generate a tuple of list of stats and list of file contents."""
    if stats is None:
        stats = []
    if files is None:
        files = []
    if modes is None:
        modes = {}
    modes.setdefault("st_uid", 0)
    modes.setdefault("st_gid", 0)
    modes.setdefault("st_mode", 0o0100644)
    for path in paths:
        p = rdf_paths.PathSpec(path=path, pathtype="OS")
        stats.append(rdf_client_fs.StatEntry(pathspec=p, **modes))
    for val in data:
        files.append(io.BytesIO(utils.SmartStr(val)))
    return stats, files
Esempio n. 15
0
  def testGenerateZipButtonGetsDisabledAfterClick(self):
    stat_entry = rdf_client_fs.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
    values = [rdf_file_finder.FileFinderResult(stat_entry=stat_entry)]

    hunt_urn = self.CreateGenericHuntWithCollection(values=values)
    self.RequestAndGrantHuntApproval(hunt_urn.Basename())

    self.Open("/")
    self.Click("css=a[grrtarget=hunts]")
    self.Click("css=td:contains('GenericHunt')")
    self.Click("css=li[heading=Results]")
    self.Click("css=button.DownloadButton")

    self.WaitUntil(self.IsElementPresent, "css=button.DownloadButton[disabled]")
    self.WaitUntil(self.IsTextPresent, "Generation has started")
Esempio n. 16
0
    def testYamlPluginWritesUnicodeValuesCorrectly(self):
        zip_fd, prefix = self.ProcessValuesToZip({
            rdf_client_fs.StatEntry: [
                rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/中国新闻网新闻中", pathtype="OS"))
            ]
        })
        self.assertEqual(set(zip_fd.namelist()), {
            "%s/MANIFEST" % prefix,
            "%s/ExportedFile/from_StatEntry.yaml" % prefix
        })

        parsed_output = yaml.load(
            zip_fd.open("%s/ExportedFile/from_StatEntry.yaml" % prefix))

        self.assertLen(parsed_output, 1)
        self.assertEqual(parsed_output[0]["urn"],
                         "aff4:/%s/fs/os/中国新闻网新闻中" % self.client_id)
Esempio n. 17
0
def StatEntryFromPath(path, pathspec, ext_attrs=True):
  """Builds a stat entry object from a given path.

  Args:
    path: A path (string value) to stat.
    pathspec: A `PathSpec` corresponding to the `path`.
    ext_attrs: Whether to include extended file attributes in the result.

  Returns:
    `StatEntry` object.
  """
  try:
    stat = utils.Stat(path)
  except (IOError, OSError) as error:
    logging.error("Failed to obtain stat for '%s': %s", pathspec, error)
    return rdf_client_fs.StatEntry(pathspec=pathspec)

  return StatEntryFromStat(stat, pathspec, ext_attrs=ext_attrs)
Esempio n. 18
0
def CreateDirectory(client_path):
    """Creates a directory in datastore-agnostic way.

  Args:
    client_path: A `ClientPath` instance specifying location of the file.
  """
    precondition.AssertType(client_path, db.ClientPath)

    stat_entry = rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
        pathtype=client_path.path_type, path="/".join(client_path.components)),
                                         st_mode=16895)

    path_info = rdf_objects.PathInfo()
    path_info.path_type = client_path.path_type
    path_info.components = client_path.components
    path_info.stat_entry = stat_entry

    data_store.REL_DB.WritePathInfos(client_path.client_id, [path_info])
Esempio n. 19
0
    def testMigrateAllClientsIsIndempotent(self):
        client_urns = list(map(self.SetupClient, range(11)))

        for client_urn in client_urns:
            with self._Aff4Open(
                    client_urn.Add("fs/os").Add("quux/norf")) as fd:
                fd.Set(fd.Schema.STAT, rdf_client_fs.StatEntry(st_size=42))

        migrator = data_migration.ClientVfsMigrator()
        migrator.MigrateAllClients()
        migrator.MigrateAllClients()  # Should not fail in any way.

        for client_urn in client_urns:
            path_info = data_store.REL_DB.ReadPathInfo(
                client_id=client_urn.Basename(),
                path_type=rdf_objects.PathInfo.PathType.OS,
                components=("quux", "norf"))
            self.assertEqual(path_info.stat_entry.st_size, 42)
Esempio n. 20
0
File: ntfs.py Progetto: syth3/grr
  def _Stat(self, entry,
            pathspec):
    st = rdf_client_fs.StatEntry()
    st.pathspec = pathspec

    st.st_atime = rdfvalue.RDFDatetimeSeconds.FromDatetime(
        entry.get_access_time())
    st.st_mtime = rdfvalue.RDFDatetimeSeconds.FromDatetime(
        entry.get_modification_time())
    st.st_crtime = rdfvalue.RDFDatetimeSeconds.FromDatetime(
        entry.get_creation_time())
    if entry.has_directory_entries_index():
      st.st_mode = stat.S_IFDIR
    else:
      st.st_mode = stat.S_IFREG
      if entry.has_default_data_stream():
        st.st_size = entry.get_size()
    return st
Esempio n. 21
0
  def ProcessListDirectory(self, responses):
    """Processes the results of the ListDirectory client action.

    Args:
      responses: a flow Responses object.
    """
    if not responses.success:
      raise flow.FlowError("Unable to list directory.")

    with data_store.DB.GetMutationPool() as pool:
      filesystem.WriteStatEntries(
          [rdf_client_fs.StatEntry(response) for response in responses],
          client_id=self.client_id,
          mutation_pool=pool,
          token=self.token)

    for response in responses:
      self.SendReply(response)
Esempio n. 22
0
    def testOSXLaunchdPlistParser(self):
        parser = osx_file_parser.OSXLaunchdPlistParser()
        plists = ["com.google.code.grr.plist", "com.google.code.grr.bplist"]
        results = []
        for plist in plists:
            path = os.path.join(self.base_path, "parser_test", plist)
            plist_file = open(path, "rb")
            stat = rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                path=path, pathtype=rdf_paths.PathSpec.PathType.OS),
                                           st_mode=16877)
            results.extend(list(parser.Parse(stat, plist_file, None)))

        for result in results:
            self.assertEqual(result.Label, "com.google.code.grr")
            self.assertCountEqual(result.ProgramArguments, [
                "/usr/lib/grr/grr_3.0.0.5_amd64/grr",
                "--config=/usr/lib/grr/grr_3.0.0.5_amd64/grr.yaml"
            ])
Esempio n. 23
0
    def testCSVPluginWritesMoreThanOneBatchOfRowsCorrectly(self):
        num_rows = csv_plugin.CSVInstantOutputPlugin.ROW_BATCH * 2 + 1

        responses = []
        for i in range(num_rows):
            responses.append(
                rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/foo/bar/%d" % i, pathtype="OS")))

        zip_fd, prefix = self.ProcessValuesToZip(
            {rdf_client_fs.StatEntry: responses})
        parsed_output = list(
            csv.DictReader(
                zip_fd.open("%s/ExportedFile/from_StatEntry.csv" % prefix)))
        self.assertLen(parsed_output, num_rows)
        for i in range(num_rows):
            self.assertEqual(parsed_output[i]["urn"],
                             self.client_id.Add("/fs/os/foo/bar/%d" % i))
Esempio n. 24
0
  def testShowsPerFileDownloadButtonForArtifactDownloaderHunt(self):
    stat_entry = rdf_client_fs.StatEntry(
        pathspec=rdf_paths.PathSpec(
            path="/foo/bar", pathtype=rdf_paths.PathSpec.PathType.OS))
    values = [
        collectors.ArtifactFilesDownloaderResult(downloaded_file=stat_entry)
    ]

    self.CreateGenericHuntWithCollection(values=values)

    self.Open("/")
    self.Click("css=a[grrtarget=hunts]")
    self.Click("css=td:contains('GenericHunt')")
    self.Click("css=li[heading=Results]")

    self.WaitUntil(
        self.IsElementPresent,
        "css=grr-results-collection button:has(span.glyphicon-download)")
Esempio n. 25
0
    def testFileFinderResultExportConverterConvertsBufferRefsWithoutPathspecs(
            self):
        pathspec = rdf_paths.PathSpec(path="/some/path",
                                      pathtype=rdf_paths.PathSpec.PathType.OS)

        match1 = rdf_client.BufferReference(offset=42,
                                            length=43,
                                            data=b"somedata1")
        match2 = rdf_client.BufferReference(offset=44,
                                            length=45,
                                            data=b"somedata2")
        stat_entry = rdf_client_fs.StatEntry(pathspec=pathspec,
                                             st_mode=33184,
                                             st_ino=1063090,
                                             st_atime=1336469177,
                                             st_mtime=1336129892,
                                             st_ctime=1336129892,
                                             st_btime=1313131313)

        file_finder_result = rdf_file_finder.FileFinderResult(
            stat_entry=stat_entry, matches=[match1, match2])

        converter = file.FileFinderResultConverter()
        results = list(converter.Convert(self.metadata, file_finder_result))

        # We expect 2 ExportedMatch instances in the results
        exported_matches = [
            result for result in results
            if isinstance(result, buffer_reference.ExportedMatch)
        ]
        exported_matches = sorted(exported_matches, key=lambda x: x.offset)
        self.assertLen(exported_matches, 2)

        self.assertEqual(exported_matches[0].offset, 42)
        self.assertEqual(exported_matches[0].length, 43)
        self.assertEqual(exported_matches[0].data, b"somedata1")
        self.assertEqual(exported_matches[0].urn,
                         "aff4:/%s/fs/os/some/path" % self.client_id)

        self.assertEqual(exported_matches[1].offset, 44)
        self.assertEqual(exported_matches[1].length, 45)
        self.assertEqual(exported_matches[1].data, b"somedata2")
        self.assertEqual(exported_matches[1].urn,
                         "aff4:/%s/fs/os/some/path" % self.client_id)
Esempio n. 26
0
    def testExportedRowsForValuesOfMultipleTypes(self):
        zip_fd, prefix = self.ProcessValuesToZip({
            rdf_client_fs.StatEntry: [
                rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/foo/bar", pathtype="OS"))
            ],
            rdf_client.Process: [rdf_client.Process(pid=42)]
        })
        with self.db_connection:
            stat_entry_script_path = "%s/ExportedFile_from_StatEntry.sql" % prefix
            stat_entry_script = zip_fd.read(stat_entry_script_path).decode(
                "utf-8")

            process_script_path = "%s/ExportedProcess_from_Process.sql" % prefix
            process_script = zip_fd.read(process_script_path).decode("utf-8")

            self.db_cursor.executescript(stat_entry_script)
            self.db_cursor.executescript(process_script)

        self.db_cursor.execute(
            "SELECT \"metadata.client_urn\", \"metadata.source_urn\", urn "
            "FROM \"ExportedFile.from_StatEntry\";")
        stat_entry_results = self.db_cursor.fetchall()
        self.assertLen(stat_entry_results, 1)
        # Client URN
        self.assertEqual(stat_entry_results[0][0], "aff4:/%s" % self.client_id)
        # Source URN
        self.assertEqual(stat_entry_results[0][1], str(self.results_urn))
        # URN
        self.assertEqual(stat_entry_results[0][2],
                         "aff4:/%s/fs/os/foo/bar" % self.client_id)

        self.db_cursor.execute(
            "SELECT \"metadata.client_urn\", \"metadata.source_urn\", pid "
            "FROM \"ExportedProcess.from_Process\";")
        process_results = self.db_cursor.fetchall()
        self.assertLen(process_results, 1)
        # Client URN
        self.assertEqual(stat_entry_results[0][0], "aff4:/%s" % self.client_id)
        # Source URN
        self.assertEqual(process_results[0][1], str(self.results_urn))
        # PID
        self.assertEqual(process_results[0][2], 42)
Esempio n. 27
0
    def testMigrateClientStatAndHashEntryFromSimpleFile(self):
        client_urn = self.SetupClient(0)

        with self._Aff4Open(client_urn.Add("fs/os").Add("foo")) as fd:
            stat_entry = rdf_client_fs.StatEntry(st_mode=108)
            fd.Set(fd.Schema.STAT, stat_entry)

            hash_entry = rdf_crypto.Hash(sha256=b"quux")
            fd.Set(fd.Schema.HASH, hash_entry)

        migrator = data_migration.ClientVfsMigrator()
        migrator.MigrateClient(client_urn)

        path_info = data_store.REL_DB.ReadPathInfo(
            client_id=client_urn.Basename(),
            path_type=rdf_objects.PathInfo.PathType.OS,
            components=("foo", ))
        self.assertEqual(path_info.stat_entry.st_mode, 108)
        self.assertEqual(path_info.hash_entry.sha256, b"quux")
Esempio n. 28
0
    def testMigrateClientStatFromTree(self):
        client_urn = self.SetupClient(0)

        with self._Aff4Open(client_urn.Add("fs/os").Add("foo/bar/baz")) as fd:
            stat_entry = rdf_client_fs.StatEntry(st_mtime=101)
            fd.Set(fd.Schema.STAT, stat_entry)

        migrator = data_migration.ClientVfsMigrator()
        migrator.MigrateClient(client_urn)

        path_infos = data_store.REL_DB.ReadPathInfos(
            client_id=client_urn.Basename(),
            path_type=rdf_objects.PathInfo.PathType.OS,
            components_list=[("foo", ), ("foo", "bar"), ("foo", "bar", "baz")])

        self.assertEqual(path_infos[("foo", )].stat_entry.st_mtime, None)
        self.assertEqual(path_infos[("foo", "bar")].stat_entry.st_mtime, None)
        self.assertEqual(path_infos[("foo", "bar", "baz")].stat_entry.st_mtime,
                         101)
Esempio n. 29
0
    def _InitializeFiles(self, hashing=False):
        path1 = self.client_id.Add("fs/os/foo/bar/hello1.txt")
        archive_path1 = (u"test_prefix/%s/fs/os/foo/bar/hello1.txt" %
                         self.client_id.Basename())
        self._CreateFile(path=path1, content="hello1", hashing=hashing)

        path2 = self.client_id.Add(u"fs/os/foo/bar/中国新闻网新闻中.txt")
        archive_path2 = (u"test_prefix/%s/fs/os/foo/bar/"
                         u"中国新闻网新闻中.txt") % self.client_id.Basename()
        self._CreateFile(path=path2, content="hello2", hashing=hashing)

        self.stat_entries = []
        self.paths = [path1, path2]
        self.archive_paths = [archive_path1, archive_path2]
        for path in self.paths:
            self.stat_entries.append(
                rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="foo/bar/" + str(path).split("/")[-1],
                    pathtype=rdf_paths.PathSpec.PathType.OS)))
Esempio n. 30
0
    def testBigQueryPluginWithValuesOfMultipleTypes(self):
        output = self.ProcessResponses(
            plugin_args=bigquery_plugin.BigQueryOutputPluginArgs(),
            responses=[
                rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/中国新闻网新闻中", pathtype="OS")),
                rdf_client.Process(pid=42)
            ],
            process_responses_separately=True)

        # Should have two separate output streams for the two types
        self.assertLen(output, 2)

        for name, stream, _, job_id in output:
            self.assertIn(job_id, [
                "C-1000000000000000_Results_ExportedFile_1445995873",
                "C-1000000000000000_Results_ExportedProcess_1445995873"
            ])
            self._parseOutput(name, stream)