Exemple #1
0
  def testDownloadCollectionWithFoldersEntries(self):
    """Check we can download a collection that also references folders."""
    fd = sequential_collection.GeneralIndexedCollection(self.collection_urn)
    with data_store.DB.GetMutationPool() as pool:
      fd.Add(
          rdf_file_finder.FileFinderResult(
              stat_entry=rdf_client_fs.StatEntry(
                  pathspec=rdf_paths.PathSpec(path="testfile5",
                                              pathtype="OS"))),
          mutation_pool=pool)
      fd.Add(
          rdf_file_finder.FileFinderResult(
              stat_entry=rdf_client_fs.StatEntry(
                  pathspec=rdf_paths.PathSpec(path="testdir1", pathtype="OS"),
                  st_mode=stat.S_IFDIR)),
          mutation_pool=pool)

    with utils.TempDirectory() as tmpdir:
      export_utils.DownloadCollection(
          self.collection_urn,
          tmpdir,
          overwrite=True,
          dump_client_info=True,
          token=self.token,
          max_threads=2)
      expected_outdir = os.path.join(tmpdir, self.out.Path()[1:])

      # Check we found both files.
      self.assertIn("testfile5", os.listdir(expected_outdir))
      self.assertIn("testdir1", os.listdir(expected_outdir))
Exemple #2
0
    def GetClientConfig(self, context, validate=True, deploy_timestamp=True):
        """Generates the client config file for inclusion in deployable binaries."""
        with utils.TempDirectory() as tmp_dir:
            # Make sure we write the file in yaml format.
            filename = os.path.join(
                tmp_dir,
                config.CONFIG.Get("ClientBuilder.config_filename",
                                  context=context))

            new_config = config.CONFIG.MakeNewConfig()
            new_config.Initialize(reset=True, data="")
            new_config.SetWriteBack(filename)

            # Only copy certain sections to the client. We enumerate all
            # defined options and then resolve those from the config in the
            # client's context. The result is the raw option as if the
            # client read our config file.
            client_context = context[:]
            while contexts.CLIENT_BUILD_CONTEXT in client_context:
                client_context.remove(contexts.CLIENT_BUILD_CONTEXT)
            for descriptor in sorted(config.CONFIG.type_infos,
                                     key=lambda x: x.name):
                if descriptor.name in self.SKIP_OPTION_LIST:
                    continue

                if descriptor.section in self.CONFIG_SECTIONS:
                    value = config.CONFIG.GetRaw(descriptor.name,
                                                 context=client_context,
                                                 default=None)

                    if value is not None:
                        logging.debug("Copying config option to client: %s",
                                      descriptor.name)

                        new_config.SetRaw(descriptor.name, value)

            if deploy_timestamp:
                deploy_time_string = str(rdfvalue.RDFDatetime.Now())
                new_config.Set("Client.deploy_time", deploy_time_string)
            new_config.Write()

            if validate:
                self.ValidateEndConfig(new_config)

            private_validator = config.CONFIG.Get(
                "ClientBuilder.private_config_validator_class",
                context=context)
            if private_validator:
                try:
                    validator = config_validator_base.PrivateConfigValidator.classes[
                        private_validator]()
                except KeyError:
                    logging.error(
                        "Couldn't find config validator class %s, "
                        "you probably need to copy it into lib/local",
                        private_validator)
                    raise
                validator.ValidateEndConfig(new_config, self.context)

            return io.open(filename, "r").read()
Exemple #3
0
    def SignGRRPyinstallerBinaries(self):
        cert_name = config.CONFIG.Get("ClientBuilder.signing_cert_name",
                                      context=self.context)
        keychain_file = config.CONFIG.Get(
            "ClientBuilder.signing_keychain_file", context=self.context)
        if not keychain_file:
            print("No keychain file specified in the config, skipping "
                  "binaries signing...")
            return

        print("Signing binaries with keychain: %s" % keychain_file)

        with utils.TempDirectory() as temp_dir:
            # codesign needs the directory name to adhere to a particular
            # naming format.
            bundle_dir = os.path.join(
                temp_dir, "%s_%s" % (self.client_name, self.version))
            shutil.move(self.target_binary_dir, bundle_dir)
            temp_binary_path = os.path.join(
                bundle_dir,
                config.CONFIG.Get("Client.binary_name", context=self.context))
            subprocess.check_call([
                "codesign", "--verbose", "--deep", "--force", "--sign",
                cert_name, "--keychain", keychain_file, temp_binary_path
            ])
            shutil.move(bundle_dir, self.target_binary_dir)
Exemple #4
0
    def testConfigFileInclusionCanBeTurnedOff(self):
        one = r"""
Config.includes:
  - 2.yaml

Section1.int: 1
"""
        two = r"""
SecondaryFileIncluded: true
Section1.int: 2
"""

        with utils.TempDirectory() as temp_dir:
            configone = os.path.join(temp_dir, "1.yaml")
            configtwo = os.path.join(temp_dir, "2.yaml")
            with io.open(configone, "w") as fd:
                fd.write(one)
            with io.open(configtwo, "w") as fd:
                fd.write(two)

            # Using filename
            conf = self._GetNewConf()
            conf.Initialize(parser=config_lib.YamlParser,
                            filename=configone,
                            process_includes=False)

            self.assertFalse(conf.Get("SecondaryFileIncluded"))
            self.assertEqual(conf.Get("Section1.int"), 1)
Exemple #5
0
    def testGeneratesTarGzArchive(self):
        result = self.handler.Handle(hunt_plugin.ApiGetHuntFilesArchiveArgs(
            hunt_id=self.hunt_id, archive_format="TAR_GZ"),
                                     context=self.context)

        with utils.TempDirectory() as temp_dir:
            tar_path = os.path.join(temp_dir, "archive.tar.gz")
            with open(tar_path, "wb") as fd:
                for chunk in result.GenerateContent():
                    fd.write(chunk)

            with tarfile.open(tar_path) as tar_fd:
                tar_fd.extractall(path=temp_dir)

            manifest_file_path = None
            for parent, _, files in os.walk(temp_dir):
                if "MANIFEST" in files:
                    manifest_file_path = os.path.join(parent, "MANIFEST")
                    break

            self.assertTrue(manifest_file_path)
            with open(manifest_file_path, "rb") as fd:
                manifest = yaml.safe_load(fd.read())

                self.assertDictContainsSubset(
                    {
                        "archived_files": 10,
                        "failed_files": 0,
                        "processed_files": 10,
                        "ignored_files": 0,
                    }, manifest)
Exemple #6
0
    def testGeneratesTarGzArchive(self):
        result = self.handler.Handle(flow_plugin.ApiGetFlowFilesArchiveArgs(
            client_id=self.client_id,
            flow_id=self.flow_id,
            archive_format="TAR_GZ"),
                                     token=self.token)

        with utils.TempDirectory() as temp_dir:
            tar_path = os.path.join(temp_dir, "archive.tar.gz")
            with open(tar_path, "wb") as fd:
                for chunk in result.GenerateContent():
                    fd.write(chunk)

            with tarfile.open(tar_path) as tar_fd:
                tar_fd.extractall(path=temp_dir)

            manifest_file_path = None
            for parent, _, files in os.walk(temp_dir):
                if "MANIFEST" in files:
                    manifest_file_path = os.path.join(parent, "MANIFEST")
                    break

            self.assertTrue(manifest_file_path)
            with open(manifest_file_path, "rb") as fd:
                manifest = yaml.safe_load(fd.read())

                self.assertEqual(manifest["archived_files"], 1)
                self.assertEqual(manifest["failed_files"], 0)
                self.assertEqual(manifest["processed_files"], 1)
                self.assertEqual(manifest["ignored_files"], 0)
Exemple #7
0
    def setUp(self):
        super().setUp()

        stack = contextlib.ExitStack()
        self.addCleanup(stack.close)
        self._tmp_dir = stack.enter_context(utils.TempDirectory())

        self.client_id = self.SetupClient(0)
        self.procs = [
            client_test_lib.MockWindowsProcess(pid=101, name="proc101.exe"),
            client_test_lib.MockWindowsProcess(pid=102,
                                               name="proc102.exe",
                                               ppid=101),
            client_test_lib.MockWindowsProcess(pid=103,
                                               name="proc103.exe",
                                               ppid=1),
            client_test_lib.MockWindowsProcess(pid=104,
                                               name="proc104.exe",
                                               ppid=103),
            client_test_lib.MockWindowsProcess(pid=105,
                                               name="proc105.exe",
                                               ppid=1),
            client_test_lib.MockWindowsProcess(pid=106,
                                               name="proc106.exe",
                                               ppid=104),
            client_test_lib.MockWindowsProcess(pid=108, name="proc108.exe"),
            client_test_lib.MockWindowsProcess(pid=109, name="proc109.exe"),
            client_test_lib.MockWindowsProcess(pid=110, name="proc110.exe"),
        ]
Exemple #8
0
    def setUp(self):
        super(WindowsMsiTest, self).setUp()

        stack = contextlib.ExitStack()
        self.addCleanup(stack.close)

        self._tmp_dir = stack.enter_context(utils.TempDirectory())
Exemple #9
0
 def testExportFile(self):
   """Check we can export a file without errors."""
   with utils.TempDirectory() as tmpdir:
     export_utils.CopyAFF4ToLocal(
         self.out.Add("testfile1"), tmpdir, overwrite=True, token=self.token)
     expected_outdir = os.path.join(tmpdir, self.out.Path()[1:])
     self.assertIn("testfile1", os.listdir(expected_outdir))
Exemple #10
0
 def testRecursiveDownload(self):
   """Check we can export a file without errors."""
   with utils.TempDirectory() as tmpdir:
     export_utils.RecursiveDownload(
         aff4.FACTORY.Open(self.out, token=self.token), tmpdir, overwrite=True)
     expected_outdir = os.path.join(tmpdir, self.out.Path()[1:])
     self.assertIn("testfile1", os.listdir(expected_outdir))
     full_outdir = os.path.join(expected_outdir, "testdir1", "testdir2")
     self.assertIn("testfile4", os.listdir(full_outdir))
Exemple #11
0
  def testConfigFileInclusion(self):
    one = r"""
Config.includes:
  - 2.yaml

Section1.int: 1
"""
    two = r"""
SecondaryFileIncluded: true
Section1.int: 2
Config.includes:
  - subdir/3.yaml
"""
    three = r"""
TertiaryFileIncluded: true
Section1.int: 3
"""

    with utils.TempDirectory() as temp_dir:
      configone = os.path.join(temp_dir, "1.yaml")
      configtwo = os.path.join(temp_dir, "2.yaml")
      subdir = os.path.join(temp_dir, "subdir")
      os.makedirs(subdir)
      configthree = os.path.join(subdir, "3.yaml")
      with open(configone, "wb") as fd:
        fd.write(one)

      with open(configtwo, "wb") as fd:
        fd.write(two)

      with open(configthree, "wb") as fd:
        fd.write(three)

      # Using filename
      conf = self._GetNewConf()
      conf.Initialize(parser=config_lib.YamlParser, filename=configone)
      self._CheckConf(conf)

      # Using fd with no fd.name should raise because there is no way to resolve
      # the relative path.
      conf = self._GetNewConf()
      fd = io.StringIO(one)
      self.assertRaises(
          config_lib.ConfigFileNotFound,
          conf.Initialize,
          parser=config_lib.YamlParser,
          fd=fd)

      # Using data
      conf = self._GetNewConf()
      self.assertRaises(
          config_lib.ConfigFileNotFound,
          conf.Initialize,
          parser=config_lib.YamlParser,
          data=one)
Exemple #12
0
    def testFileCasing(self):
        """Test our ability to read the correct casing from filesystem."""
        try:
            os.lstat(os.path.join(self.base_path, "nUmBeRs.txt"))
            os.lstat(os.path.join(self.base_path, "nuMbErs.txt"))
            # If we reached this point we are on a case insensitive file system
            # and the tests below do not make any sense.
            logging.warning(
                "Case insensitive file system detected. Skipping test.")
            return
        except (IOError, OSError):
            pass

        # Create 2 files with names that differ only in casing.
        with utils.TempDirectory() as temp_dir:
            path1 = os.path.join(temp_dir, "numbers.txt")
            shutil.copy(os.path.join(self.base_path, "numbers.txt"), path1)

            path2 = os.path.join(temp_dir, "numbers.TXT")
            shutil.copy(os.path.join(self.base_path, "numbers.txt.ver2"),
                        path2)

            fd = vfs.VFSOpen(
                rdf_paths.PathSpec(path=path1,
                                   pathtype=rdf_paths.PathSpec.PathType.OS))
            self.assertEqual(fd.pathspec.Basename(), "numbers.txt")

            fd = vfs.VFSOpen(
                rdf_paths.PathSpec(path=path2,
                                   pathtype=rdf_paths.PathSpec.PathType.OS))
            self.assertEqual(fd.pathspec.Basename(), "numbers.TXT")

            path = os.path.join(self.base_path, "Numbers.txt")
            fd = vfs.VFSOpen(
                rdf_paths.PathSpec(path=path,
                                   pathtype=rdf_paths.PathSpec.PathType.OS))
            read_path = fd.pathspec.Basename()

            # The exact file now is non deterministic but should be either of the two:
            if read_path != "numbers.txt" and read_path != "numbers.TXT":
                raise RuntimeError("read path is %s" % read_path)

            # Ensure that the produced pathspec specified no case folding:
            s = fd.Stat()
            self.assertEqual(s.pathspec.path_options,
                             rdf_paths.PathSpec.Options.CASE_LITERAL)

            # Case folding will only occur when requested - this should raise because
            # we have the CASE_LITERAL option:
            pathspec = rdf_paths.PathSpec(
                path=path,
                pathtype=rdf_paths.PathSpec.PathType.OS,
                path_options=rdf_paths.PathSpec.Options.CASE_LITERAL)
            self.assertRaises(IOError, vfs.VFSOpen, pathspec)
Exemple #13
0
    def testRepackAll(self):
        """Test repacking all binaries."""
        self.executables_dir = package.ResourcePath("grr-response-core",
                                                    "executables")
        with utils.TempDirectory() as tmp_dir:
            new_dir = os.path.join(tmp_dir, "grr", "executables")
            os.makedirs(new_dir)

            # Copy unzipsfx so it can be used in repacking/
            shutil.copy(
                os.path.join(self.executables_dir,
                             "windows/templates/unzipsfx/unzipsfx-i386.exe"),
                new_dir)
            shutil.copy(
                os.path.join(self.executables_dir,
                             "windows/templates/unzipsfx/unzipsfx-amd64.exe"),
                new_dir)

            with test_lib.ConfigOverrider({
                    "ClientBuilder.executables_dir":
                    new_dir,
                    "ClientBuilder.unzipsfx_stub_dir":
                    new_dir
            }):
                repacking.TemplateRepacker().RepackAllTemplates()

            self.assertEqual(
                len(glob.glob(os.path.join(new_dir, "installers/*.deb"))), 2)
            self.assertEqual(
                len(glob.glob(os.path.join(new_dir, "installers/*.rpm"))), 2)
            self.assertEqual(
                len(glob.glob(os.path.join(new_dir, "installers/*.exe"))), 4)
            self.assertEqual(
                len(glob.glob(os.path.join(new_dir, "installers/*.pkg"))), 1)

            # Validate the config appended to the OS X package.
            zf = zipfile.ZipFile(glob.glob(
                os.path.join(new_dir, "installers/*.pkg")).pop(),
                                 mode="r")
            fd = zf.open("config.yaml")

            # We can't load the included build.yaml because the package hasn't been
            # installed.
            loaded = yaml.safe_load(fd)
            loaded.pop("Config.includes")

            packaged_config = config.CONFIG.MakeNewConfig()
            data = yaml.safe_dump(loaded)
            packaged_config.Initialize(parser=config_lib.YamlParser,
                                       data=data.decode("utf-8"))
            packaged_config.Validate(
                sections=build.ClientRepacker.CONFIG_SECTIONS)
            repacker = build.ClientRepacker()
            repacker.ValidateEndConfig(packaged_config)
Exemple #14
0
    def setUp(self):
        super().setUp()

        stack = contextlib.ExitStack()
        self.addCleanup(stack.close)

        self._tmp_dir = stack.enter_context(utils.TempDirectory())
        # This file can't be located in self._tmp_dir, since self._tmp_dir is
        # created with secure permissions and unaccessible to the fake fleetspeak
        # service.
        self._fake_fleetspeak_service_log_file = os.path.join(
            tempfile.gettempdir(), "fake_fleetspeak_service_log_file.txt")
        stack.callback(os.unlink, self._fake_fleetspeak_service_log_file)
Exemple #15
0
    def testMergeDirectories(self):
        stack = contextlib.ExitStack()
        self.addCleanup(stack.close)

        src_dir = stack.enter_context(utils.TempDirectory())
        dst_dir = stack.enter_context(utils.TempDirectory())

        def SrcPath(*components):
            return os.path.join(src_dir, *components)

        def DstPath(*components):
            return os.path.join(dst_dir, *components)

        def WriteFile(path, contents):
            utils.EnsureDirExists(os.path.dirname(path))
            with open(path, "w") as f:
                f.write(contents)

        def ReadFile(path):
            with open(path) as f:
                return f.read()

        WriteFile(SrcPath("a", "b", "c", "file1.txt"), "file1")
        WriteFile(SrcPath("a", "b", "c", "file2.txt"), "file2")
        WriteFile(SrcPath("file3.txt"), "file3")

        WriteFile(DstPath("a", "file4.txt"), "file4")
        WriteFile(DstPath("file5.txt"), "file5")

        utils.MergeDirectories(src_dir, dst_dir)

        self.assertEqual(ReadFile(DstPath("a", "b", "c", "file1.txt")),
                         "file1")
        self.assertEqual(ReadFile(DstPath("a", "b", "c", "file2.txt")),
                         "file2")
        self.assertEqual(ReadFile(DstPath("file3.txt")), "file3")
        self.assertEqual(ReadFile(DstPath("a", "file4.txt")), "file4")
        self.assertEqual(ReadFile(DstPath("file5.txt")), "file5")
Exemple #16
0
 def testUploadOverlyLargeSignedBinary(self):
   with mock.patch.object(config_updater_util, "_MAX_SIGNED_BINARY_BYTES", 5):
     with utils.TempDirectory() as dir_path:
       executable_path = os.path.join(dir_path, "foo.exe")
       with open(executable_path, "wb") as f:
         f.write(b"\xaa\xbb\xcc\xdd\xee\xff")
       expected_message = (
           "File [%s] is of size 6 (bytes), which exceeds the allowed maximum "
           "of 5 bytes." % executable_path)
       with self.assertRaisesWithLiteralMatch(
           config_updater_util.BinaryTooLargeError, expected_message):
         config_updater_util.UploadSignedBinary(
             executable_path, config_pb2.ApiGrrBinary.Type.EXECUTABLE,
             "windows")
Exemple #17
0
    def _GetTarGzManifest(self, result):
        with utils.TempDirectory() as temp_dir:
            tar_path = os.path.join(temp_dir, "archive.tar.gz")
            with open(tar_path, "wb") as fd:
                for chunk in result.GenerateContent():
                    fd.write(chunk)

            with tarfile.open(tar_path) as tar_fd:
                tar_fd.extractall(path=temp_dir)

            for parent, _, files in os.walk(temp_dir):
                if "MANIFEST" in files:
                    with open(os.path.join(parent, "MANIFEST"), "rb") as fd:
                        return yaml.safe_load(fd.read())

        raise RuntimeError("MANIFEST not found")
Exemple #18
0
  def testDownloadCollectionIgnoresArtifactResultsWithoutFiles(self):
    # Create a collection with URNs to some files.
    fd = sequential_collection.GeneralIndexedCollection(self.collection_urn)
    with data_store.DB.GetMutationPool() as pool:
      fd.Add(collectors.ArtifactFilesDownloaderResult(), mutation_pool=pool)

    with utils.TempDirectory() as tmpdir:
      export_utils.DownloadCollection(
          self.collection_urn,
          tmpdir,
          overwrite=True,
          dump_client_info=True,
          token=self.token,
          max_threads=2)
      expected_outdir = os.path.join(tmpdir, self.out.Path()[1:])
      self.assertFalse(os.path.exists(expected_outdir))
Exemple #19
0
 def testUploadPythonHack(self):
     with utils.TempDirectory() as dir_path:
         python_hack_path = os.path.join(dir_path, "hello_world.py")
         with open(python_hack_path, "wb") as f:
             f.write(b"print('Hello, world!')")
         config_updater_util.UploadSignedBinary(
             python_hack_path,
             objects_pb2.SignedBinaryID.BinaryType.PYTHON_HACK,
             "linux",
             upload_subdirectory="test")
         python_hack_urn = rdfvalue.RDFURN(
             "aff4:/config/python_hacks/linux/test/hello_world.py")
         blob_iterator, _ = signed_binary_utils.FetchBlobsForSignedBinaryByURN(
             python_hack_urn)
         uploaded_blobs = list(
             signed_binary_utils.StreamSignedBinaryContents(blob_iterator))
         uploaded_content = b"".join(uploaded_blobs)
         self.assertEqual(uploaded_content, b"print('Hello, world!')")
    def _VerifyDownload(self):
        with utils.TempDirectory() as tmpdir:
            export_utils.DownloadCollection(self.collection_urn,
                                            tmpdir,
                                            overwrite=True,
                                            dump_client_info=True,
                                            token=self.token,
                                            max_threads=2)
            expected_outdir = os.path.join(tmpdir, self.out.Path()[1:])

            # Check we found both files.
            self.assertTrue("testfile1" in os.listdir(expected_outdir))
            self.assertTrue("testfile2" in os.listdir(expected_outdir))
            self.assertTrue("testfile5" in os.listdir(expected_outdir))
            self.assertTrue("testfile6" in os.listdir(expected_outdir))

            # Check we dumped a YAML file to the root of the client.
            expected_rootdir = os.path.join(tmpdir, self.client_id.Basename())
            self.assertTrue("client_info.yaml" in os.listdir(expected_rootdir))
Exemple #21
0
 def testUploadExecutable(self):
     with utils.TempDirectory() as dir_path:
         executable_path = os.path.join(dir_path, "foo.exe")
         with open(executable_path, "wb") as f:
             f.write(b"\xaa\xbb\xcc\xdd")
         config_updater_util.UploadSignedBinary(
             executable_path,
             objects_pb2.SignedBinaryID.BinaryType.EXECUTABLE,
             "windows",
             upload_subdirectory="anti-malware/registry-tools")
         executable_urn = rdfvalue.RDFURN(
             "aff4:/config/executables/windows/anti-malware/registry-tools/"
             "foo.exe")
         blob_iterator, _ = signed_binary_utils.FetchBlobsForSignedBinaryByURN(
             executable_urn)
         uploaded_blobs = list(
             signed_binary_utils.StreamSignedBinaryContents(blob_iterator))
         uploaded_content = b"".join(uploaded_blobs)
         self.assertEqual(uploaded_content, b"\xaa\xbb\xcc\xdd")
Exemple #22
0
    def testConfigFileIncludeAbsolutePaths(self):
        one = r"""
Section1.int: 1
"""
        with utils.TempDirectory() as temp_dir:
            configone = os.path.join(temp_dir, "1.yaml")
            with io.open(configone, "w") as fd:
                fd.write(one)

            absolute_include = (r"""
Config.includes:
  - %s

Section1.int: 2
""" % configone)

            conf = self._GetNewConf()
            conf.Initialize(parser=config_parser.YamlConfigFileParser,
                            data=absolute_include)
            self.assertEqual(conf["Section1.int"], 1)

            relative_include = r"""
Config.includes:
  - 1.yaml

Section1.int: 2
"""
            conf = self._GetNewConf()
            # Can not include a relative path from config without a filename.
            self.assertRaises(config_lib.ConfigFileNotFound,
                              conf.Initialize,
                              parser=config_parser.YamlConfigFileParser,
                              data=relative_include)

            # If we write it to a file it should work though.
            configtwo = os.path.join(temp_dir, "2.yaml")
            with io.open(configtwo, "w") as fd:
                fd.write(relative_include)

            conf.Initialize(parser=config_parser.YamlConfigFileParser,
                            filename=configtwo)
            self.assertEqual(conf["Section1.int"], 1)
Exemple #23
0
    def _MakeDeployableBinaryV2(self, template_path, output_path):
        context = self.context + ["Client Context"]
        utils.EnsureDirExists(os.path.dirname(output_path))

        fleetspeak_enabled = config.CONFIG.Get("Client.fleetspeak_enabled",
                                               context=self.context)
        fleetspeak_bundled = config.CONFIG.Get(
            "ClientBuilder.fleetspeak_bundled", context=self.context)

        with contextlib.ExitStack() as stack:
            tmp_dir = stack.enter_context(utils.TempDirectory())
            shutil.unpack_archive(template_path, tmp_dir, format="zip")

            if fleetspeak_bundled:
                variant = "fleetspeak-bundled"
            elif fleetspeak_enabled:
                variant = "fleetspeak-enabled"
            else:
                variant = "legacy"

            pkg_utils.JoinPkg(os.path.join(tmp_dir, variant),
                              os.path.join(tmp_dir, "blocks"), output_path)

            zf = stack.enter_context(zipfile.ZipFile(output_path, mode="a"))
            with open(os.path.join(tmp_dir, "build.yaml"),
                      "r") as build_yaml_file:
                zf.writestr("build.yaml", build_yaml_file.read())

            client_config_data = build_helpers.GetClientConfig(context)
            zf.writestr("config.yaml", client_config_data)

            if fleetspeak_bundled:
                fleetspeak_client_config = config.CONFIG.Get(
                    "ClientBuilder.fleetspeak_client_config",
                    context=self.context)
                with open(fleetspeak_client_config,
                          "r") as fleetspeak_client_config_file:
                    zf.writestr("client.config",
                                fleetspeak_client_config_file.read())

        return output_path
Exemple #24
0
  def testDownloadCollectionWithFlattenOption(self):
    """Check we can download files references in a collection."""
    # Create a collection with URNs to some files.
    fd = sequential_collection.GeneralIndexedCollection(self.collection_urn)
    with data_store.DB.GetMutationPool() as pool:
      fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1")), mutation_pool=pool)
      fd.Add(
          rdf_client_fs.StatEntry(
              pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")),
          mutation_pool=pool)
      fd.Add(
          rdf_file_finder.FileFinderResult(
              stat_entry=rdf_client_fs.StatEntry(
                  pathspec=rdf_paths.PathSpec(path="testfile5",
                                              pathtype="OS"))),
          mutation_pool=pool)

    with utils.TempDirectory() as tmpdir:
      export_utils.DownloadCollection(
          self.collection_urn,
          tmpdir,
          overwrite=True,
          dump_client_info=True,
          flatten=True,
          token=self.token,
          max_threads=2)

      # Check that "files" folder is filled with symlinks to downloaded files.
      symlinks = os.listdir(os.path.join(tmpdir, "files"))
      self.assertLen(symlinks, 3)
      self.assertListEqual(
          sorted(symlinks), [
              "C.1000000000000000_fs_os_testfile1",
              "C.1000000000000000_fs_os_testfile2",
              "C.1000000000000000_fs_os_testfile5"
          ])
      self.assertEqual(
          os.readlink(
              os.path.join(tmpdir, "files",
                           "C.1000000000000000_fs_os_testfile1")),
          os.path.join(tmpdir, "C.1000000000000000", "fs", "os", "testfile1"))
Exemple #25
0
 def _GenerateFleetspeakServiceConfig(self, zip_file):
   orig_fs_config_path = config.CONFIG["ClientBuilder.fleetspeak_config_path"]
   final_fs_config_fname = config.CONFIG[
       "Client.fleetspeak_unsigned_config_fname"]
   if orig_fs_config_path.endswith(".in"):
     logging.info("Interpolating %s", orig_fs_config_path)
     logging.warning("Backslashes will be naively re-escaped after "
                     "interpolation. If this is not desired, use a Fleetspeak "
                     "config file without the '.in' extension.")
     with utils.TempDirectory() as temp_dir:
       temp_fs_config_path = os.path.join(temp_dir, final_fs_config_fname)
       with io.open(orig_fs_config_path, "r") as source:
         with io.open(temp_fs_config_path, "w") as dest:
           interpolated = config.CONFIG.InterpolateValue(
               source.read(), context=self.context)
           dest.write(re.sub(r"\\", r"\\\\", interpolated))
       self._ValidateFleetspeakServiceConfig(temp_fs_config_path)
       zip_file.write(temp_fs_config_path, final_fs_config_fname)
   else:
     self._ValidateFleetspeakServiceConfig(orig_fs_config_path)
     zip_file.write(orig_fs_config_path, final_fs_config_fname)
Exemple #26
0
    def testConfigFileInclusionWithContext(self):
        one = r"""
Client Context:
  Config.includes:
    - 2.yaml

Section1.int: 1
"""
        two = r"""
Section1.int: 2
SecondaryFileIncluded: true
"""
        with utils.TempDirectory() as temp_dir:
            configone = os.path.join(temp_dir, "1.yaml")
            configtwo = os.path.join(temp_dir, "2.yaml")
            with io.open(configone, "w") as fd:
                fd.write(one)

            with io.open(configtwo, "w") as fd:
                fd.write(two)

            # Without specifying the context the includes are not processed.
            conf = self._GetNewConf()
            conf.Initialize(parser=config_parser.YamlConfigFileParser,
                            filename=configone)
            self.assertEqual(conf["Section1.int"], 1)

            # Only one config is loaded.
            self.assertEqual(conf.files, [configone])

            # Now we specify the context.
            conf = self._GetNewConf()
            conf.AddContext("Client Context")
            conf.Initialize(parser=config_parser.YamlConfigFileParser,
                            filename=configone)

            # Both config files were loaded. Note that load order is important and
            # well defined.
            self.assertEqual(conf.files, [configone, configtwo])
            self.assertEqual(conf["Section1.int"], 2)
Exemple #27
0
    def testZipFileWithSymlink(self):
        """Test that symlinks are preserved when unpacking generated zips."""

        compressions = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED]
        for compression in compressions:
            outfd = io.BytesIO()

            infd1 = io.BytesIO(b"this is a test string")
            infd2 = io.BytesIO(b"this is another test string")
            with utils.StreamingZipWriter(outfd,
                                          compression=compression) as writer:
                writer.WriteFromFD(infd1, "test1.txt")
                writer.WriteFromFD(infd2, "subdir/test2.txt")

                writer.WriteSymlink("test1.txt", "test1.txt.link")
                writer.WriteSymlink("subdir/test2.txt", "test2.txt.link")

            with utils.TempDirectory() as temp_dir:
                zip_path = os.path.join(temp_dir, "archive.zip")
                with open(zip_path, "wb") as fd:
                    fd.write(outfd.getvalue())

                zip_fd = zipfile.ZipFile(outfd, "r")

                link_info = zip_fd.getinfo("test1.txt.link")
                self.assertEqual(link_info.external_attr,
                                 (0o644 | 0o120000) << 16)
                self.assertEqual(link_info.create_system, 3)

                link_contents = zip_fd.read("test1.txt.link")
                self.assertEqual(link_contents, "test1.txt")

                link_info = zip_fd.getinfo("test2.txt.link")
                self.assertEqual(link_info.external_attr,
                                 (0o644 | 0o120000) << 16)
                self.assertEqual(link_info.create_system, 3)

                link_contents = zip_fd.read("test2.txt.link")
                self.assertEqual(link_contents, "subdir/test2.txt")
Exemple #28
0
    def __init__(self, path: str):
        self._olefile = olefile.OleFileIO(path, write_mode=True)
        self._stack = contextlib.ExitStack()
        self._tmp_dir = self._stack.enter_context(utils.TempDirectory())

        def ReadStream(name):
            with self._olefile.openstream(name) as stream:
                return stream.read(self._olefile.get_size(name))

        string_pool_raw = ReadStream(STRING_POOL_STREAM_NAME)
        string_data_raw = ReadStream(STRING_DATA_STREAM_NAME)
        self._string_pool = StringPool(string_pool_raw, string_data_raw)
        feature_raw = ReadStream(FEATURE_STREAM_NAME)
        self._feature_table = FeatureTable(feature_raw, self._string_pool)

        cab_path = os.path.join(self._tmp_dir, "input.cab")
        cab_tmp_path = os.path.join(self._tmp_dir, "cab_tmp_dir")
        with open(cab_path, "wb") as f:
            f.write(ReadStream(GRR_CAB_STREAM_NAME))
        self._cab = cab_utils.Cab(cab_path, cab_tmp_path)
        self._cab.ExtractFiles()
        self._cab.WriteFile("PaddingFile", b"")
Exemple #29
0
    def MakeDeployableBinary(self, template_path: str,
                             output_path: str) -> str:
        context = self.context + ["Client Context"]
        utils.EnsureDirExists(os.path.dirname(output_path))

        def GetConfig(name: str) -> Any:
            return config.CONFIG.Get(name, context=self.context)

        fleetspeak_enabled = GetConfig("Client.fleetspeak_enabled")
        fleetspeak_bundled = GetConfig("ClientBuilder.fleetspeak_bundled")

        legacy = not (fleetspeak_enabled or fleetspeak_bundled)

        with contextlib.ExitStack() as stack:
            tmp_dir = stack.enter_context(utils.TempDirectory())
            shutil.unpack_archive(template_path, tmp_dir, format="zip")
            msi_file = MsiFile(os.path.join(tmp_dir, "installer.msi"))

            def EnableFeature(name: str) -> None:
                msi_file.EnableFeature(name.encode("utf-8"))

            def ReplaceString(src: str, dst: str) -> None:
                msi_file.ReplaceString(src.encode("utf-8"),
                                       dst.encode("utf-8"))

            def RenameFile(src: str, dst: str) -> None:
                msi_file.RenameFile(src.encode("utf-8"), dst.encode("utf-8"))

            def ReplaceStringConfig(src: str, dst: str) -> None:
                ReplaceString(src, GetConfig(dst))

            def RenameFileConfig(src: str, dst: str) -> None:
                RenameFile(src, GetConfig(dst))

            # Set product information

            ReplaceStringConfig("__ProductName", "Client.name")
            ReplaceStringConfig("__ProductManufacturer", "Client.company_name")

            # Enable features

            if GetConfig("ClientBuilder.console"):
                EnableFeature("DbgGrrExe")
            else:
                EnableFeature("GrrExe")

            if legacy:
                if GetConfig("ClientBuilder.console"):
                    EnableFeature("DbgNanny")
                else:
                    EnableFeature("Nanny")

            if fleetspeak_bundled:
                EnableFeature("FleetspeakClient")

            if fleetspeak_enabled or fleetspeak_bundled:
                EnableFeature("FleetspeakServiceRegistryEntry")

            # Rename directories

            RenameFileConfig("__GrrDirectory", "Client.name")
            RenameFileConfig("__GrrVersion", "Source.version_string")

            # Rename files

            if GetConfig("ClientBuilder.console"):
                RenameFileConfig("__dbg_grr-client.exe", "Client.binary_name")
                RenameFileConfig("__dbg_GRRService.exe",
                                 "Nanny.service_binary_name")
            else:
                RenameFileConfig("__grr-client.exe", "Client.binary_name")
                RenameFileConfig("__GRRService.exe",
                                 "Nanny.service_binary_name")

            # Write Configs

            if fleetspeak_bundled:
                with open(GetConfig("ClientBuilder.fleetspeak_client_config"),
                          "rb") as f:
                    msi_file.SetFleetspeakConfig(f.read())

            RenameFileConfig("grr-config.yaml",
                             "ClientBuilder.config_filename")
            msi_file.SetGrrConfig(
                build_helpers.GetClientConfig(context).encode("utf-8"))

            # Write Fleetspeak service registry data

            if fleetspeak_enabled or fleetspeak_bundled:
                key_name = GetConfig(
                    "Client.fleetspeak_unsigned_services_regkey")
                key_name = key_name.replace("HKEY_LOCAL_MACHINE\\", "")
                ReplaceString("__FleetspeakServiceRegistryKey", key_name)
                ReplaceStringConfig("__FleetspeakServiceRegistryName",
                                    "Client.name")
                ReplaceString(
                    "__FleetspeakServiceRegistryValue",
                    f"[INSTALLDIR]{GetConfig('Client.fleetspeak_unsigned_config_fname')}"
                )

            if fleetspeak_bundled:
                ReplaceStringConfig("FleetspeakClientService",
                                    "Client.fleetspeak_service_name")

            # Write Fleetspeak service config

            # If we don't need to re-write the file after installation, just run
            # a dummy command.
            gen_fleespeak_service_file_cmd = "cmd.exe /c exit"

            if fleetspeak_enabled or fleetspeak_bundled:
                path = GetConfig("ClientBuilder.fleetspeak_config_path")
                with open(path, "rb") as f:
                    msi_file.SetFleetspeakServiceConfig(f.read())
                RenameFileConfig("fleetspeak-service-config.txt",
                                 "Client.fleetspeak_unsigned_config_fname")
                if path.endswith(".in"):
                    args = [
                        "[INSTALLDIR]" + GetConfig("Client.binary_name"),
                        "--config",
                        "[INSTALLDIR]" +
                        GetConfig("ClientBuilder.config_filename"),
                        "-p",
                        "Client.install_path=[INSTALLDIR]",
                        "--install",
                        "--interpolate_fleetspeak_service_config",
                        "[INSTALLDIR]" +
                        GetConfig("Client.fleetspeak_unsigned_config_fname"),
                    ]
                    gen_fleespeak_service_file_cmd = subprocess.list2cmdline(
                        args)

            ReplaceString("__GenFleetspeakServiceFileCmd",
                          gen_fleespeak_service_file_cmd)

            # Configure nanny service

            if legacy:
                nanny_args = ["--service_key", GetConfig("Client.config_key")]
                ReplaceString("__NannyArguments",
                              subprocess.list2cmdline(nanny_args))
                ReplaceStringConfig("__NannyServiceDescription",
                                    "Nanny.service_description")
                if GetConfig("ClientBuilder.console"):
                    ReplaceStringConfig("__DbgNannyRegistryKey",
                                        "Client.config_key")
                    ReplaceStringConfig("__DbgNannyServiceName",
                                        "Nanny.service_name")

                else:
                    ReplaceStringConfig("__NannyRegistryKey",
                                        "Client.config_key")
                    ReplaceStringConfig("__NannyServiceName",
                                        "Nanny.service_name")
                grr_binary = GetConfig("Client.binary_name")
                grr_config = GetConfig("ClientBuilder.config_filename")
                ReplaceString("__NannyChildBinary",
                              f"[INSTALLDIR]{grr_binary}")
                child_args = [
                    f"[INSTALLDIR]{grr_binary}", "--config",
                    f"[INSTALLDIR]{grr_config}"
                ]
                ReplaceString("__NannyChildCommandLine",
                              subprocess.list2cmdline(child_args))

            msi_file.Write()
            msi_file.Close()

            if os.path.exists(output_path):
                os.remove(output_path)
            shutil.move(os.path.join(tmp_dir, "installer.msi"), output_path)

        return output_path
Exemple #30
0
    def MakeDeployableBinary(self, template_path, output_path):
        """This will add the config to the client template and create a .rpm."""

        rpmbuild_binary = "/usr/bin/rpmbuild"
        if not os.path.exists(rpmbuild_binary):
            logging.error("rpmbuild not found, unable to repack client.")
            return

        with utils.TempDirectory() as tmp_dir:
            template_dir = os.path.join(tmp_dir, "dist")
            utils.EnsureDirExists(template_dir)

            zf = zipfile.ZipFile(template_path)
            for name in zf.namelist():
                dirname = os.path.dirname(name)
                utils.EnsureDirExists(os.path.join(template_dir, dirname))
                with open(os.path.join(template_dir, name), "wb") as fd:
                    fd.write(zf.read(name))

            # Set up a RPM building environment.

            rpm_root_dir = os.path.join(tmp_dir, "rpmbuild")

            rpm_build_dir = os.path.join(rpm_root_dir, "BUILD")
            utils.EnsureDirExists(rpm_build_dir)

            rpm_buildroot_dir = os.path.join(rpm_root_dir, "BUILDROOT")
            utils.EnsureDirExists(rpm_buildroot_dir)

            rpm_rpms_dir = os.path.join(rpm_root_dir, "RPMS")
            utils.EnsureDirExists(rpm_rpms_dir)

            rpm_specs_dir = os.path.join(rpm_root_dir, "SPECS")
            utils.EnsureDirExists(rpm_specs_dir)

            template_binary_dir = os.path.join(tmp_dir,
                                               "dist/rpmbuild/grr-client")

            target_binary_dir = "%s%s" % (rpm_build_dir,
                                          config.CONFIG.Get(
                                              "ClientBuilder.target_dir",
                                              context=self.context))

            utils.EnsureDirExists(os.path.dirname(target_binary_dir))
            try:
                shutil.rmtree(target_binary_dir)
            except OSError:
                pass
            shutil.move(template_binary_dir, target_binary_dir)
            client_name = config.CONFIG.Get("Client.name",
                                            context=self.context)
            client_binary_name = config.CONFIG.Get("Client.binary_name",
                                                   context=self.context)
            if client_binary_name != "grr-client":
                shutil.move(
                    os.path.join(target_binary_dir, "grr-client"),
                    os.path.join(target_binary_dir, client_binary_name))

            # Generate spec
            spec_filename = os.path.join(rpm_specs_dir,
                                         "%s.spec" % client_name)
            self.GenerateFile(
                os.path.join(tmp_dir, "dist/rpmbuild/grr.spec.in"),
                spec_filename)

            initd_target_filename = os.path.join(rpm_build_dir, "etc/init.d",
                                                 client_name)

            # Generate init.d
            utils.EnsureDirExists(os.path.dirname(initd_target_filename))
            self.GenerateFile(
                os.path.join(tmp_dir, "dist/rpmbuild/grr-client.initd.in"),
                initd_target_filename)

            # Generate systemd unit
            if config.CONFIG["Template.version_numeric"] >= 3125:
                systemd_target_filename = os.path.join(
                    rpm_build_dir, "usr/lib/systemd/system/",
                    "%s.service" % client_name)

                utils.EnsureDirExists(os.path.dirname(systemd_target_filename))
                self.GenerateFile(
                    os.path.join(tmp_dir,
                                 "dist/rpmbuild/grr-client.service.in"),
                    systemd_target_filename)

                # Generate prelinking blacklist file
                prelink_target_filename = os.path.join(rpm_build_dir,
                                                       "etc/prelink.conf.d",
                                                       "%s.conf" % client_name)

                utils.EnsureDirExists(os.path.dirname(prelink_target_filename))
                self.GenerateFile(
                    os.path.join(tmp_dir,
                                 "dist/rpmbuild/prelink_blacklist.conf.in"),
                    prelink_target_filename)

            # Create a client config.
            client_context = ["Client Context"] + self.context
            client_config_content = self.GetClientConfig(client_context)

            with open(
                    os.path.join(
                        target_binary_dir,
                        config.CONFIG.Get("ClientBuilder.config_filename",
                                          context=self.context)), "wb") as fd:
                fd.write(client_config_content)

            # Set the daemon to executable.
            os.chmod(os.path.join(target_binary_dir, client_binary_name),
                     0o755)

            client_arch = config.CONFIG.Get("Template.arch",
                                            context=self.context)
            if client_arch == "amd64":
                client_arch = "x86_64"

            command = [
                rpmbuild_binary, "--define", "_topdir " + rpm_root_dir,
                "--target", client_arch, "--buildroot", rpm_buildroot_dir,
                "-bb", spec_filename
            ]
            try:
                subprocess.check_output(command, stderr=subprocess.STDOUT)
            except subprocess.CalledProcessError as e:
                logging.error("Error calling %s.", command)
                logging.error(e.output)
                raise

            client_version = config.CONFIG.Get("Template.version_string",
                                               context=self.context)
            rpm_filename = os.path.join(
                rpm_rpms_dir, client_arch,
                "%s-%s-1.%s.rpm" % (client_name, client_version, client_arch))

            utils.EnsureDirExists(os.path.dirname(output_path))
            shutil.move(rpm_filename, output_path)

            logging.info("Created package %s", output_path)
            self.Sign(output_path)
            return output_path