def testExportFile(self): """Check we can export a file without errors.""" with utils.TempDirectory() as tmpdir: export_utils.CopyAFF4ToLocal( self.out.Add("testfile1"), tmpdir, overwrite=True, token=self.token) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) self.assertTrue("testfile1" in os.listdir(expected_outdir))
def testDownloadCollectionWithFoldersEntries(self): """Check we can download RDFValueCollection that also references folders.""" fd = aff4.FACTORY.Create("aff4:/testcoll", "RDFValueCollection", token=self.token) fd.Add( file_finder.FileFinderResult(stat_entry=rdfvalue.StatEntry( aff4path=self.out.Add("testfile5")))) fd.Add( file_finder.FileFinderResult(stat_entry=rdfvalue.StatEntry( aff4path=self.out.Add("testdir1"), st_mode=stat.S_IFDIR))) fd.Close() with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection("aff4:/testcoll", tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile5" in os.listdir(expected_outdir)) self.assertTrue("testdir1" in os.listdir(expected_outdir))
def testTarFileWithSymlink(self): outfd = StringIO.StringIO() infd1 = StringIO.StringIO("this is a test string") st1 = os.stat_result( (0644, 0, 0, 0, 0, 0, len(infd1.getvalue()), 0, 0, 0)) infd2 = StringIO.StringIO("this is another test string") st2 = os.stat_result( (0644, 0, 0, 0, 0, 0, len(infd2.getvalue()), 0, 0, 0)) # Write the zip into a file like object. with utils.StreamingTarWriter(outfd, mode="w:gz") as writer: writer.WriteFromFD(infd1, "test1.txt", st=st1) writer.WriteFromFD(infd2, "subdir/test2.txt", st=st2) writer.WriteSymlink("test1.txt", "test1.txt.link") writer.WriteSymlink("subdir/test2.txt", "test2.txt.link") with utils.TempDirectory() as temp_dir: tar_path = os.path.join(temp_dir, "archive.tar.gz") with open(tar_path, "w") as fd: fd.write(outfd.getvalue()) # Builtin python ZipFile implementation doesn't support symlinks, # so we have to extract the files with command line tool. subprocess.check_call(["tar", "-xzf", tar_path, "-C", temp_dir]) link_path = os.path.join(temp_dir, "test1.txt.link") self.assertTrue(os.path.islink(link_path)) self.assertEqual(os.readlink(link_path), "test1.txt") link_path = os.path.join(temp_dir, "test2.txt.link") self.assertTrue(os.path.islink(link_path)) self.assertEqual(os.readlink(link_path), "subdir/test2.txt")
def testDownloadCollection(self): """Check we can download files references in RDFValueCollection.""" # Create a collection with URNs to some files. fd = aff4.FACTORY.Create("aff4:/testcoll", "RDFValueCollection", token=self.token) fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1"))) fd.Add(rdfvalue.StatEntry(aff4path=self.out.Add("testfile2"))) fd.Add( file_finder.FileFinderResult(stat_entry=rdfvalue.StatEntry( aff4path=self.out.Add("testfile5")))) fd.Close() with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection("aff4:/testcoll", tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile1" in os.listdir(expected_outdir)) self.assertTrue("testfile2" in os.listdir(expected_outdir)) self.assertTrue("testfile5" in os.listdir(expected_outdir)) # Check we dumped a YAML file to the root of the client. expected_rootdir = os.path.join(tmpdir, self.client_id.Basename()) self.assertTrue("client_info.yaml" in os.listdir(expected_rootdir))
def SignGRRPyinstallerBinaries(self): cert_name = config.CONFIG.Get("ClientBuilder.signing_cert_name", context=self.context) keychain_file = config.CONFIG.Get( "ClientBuilder.signing_keychain_file", context=self.context) if not keychain_file: print( "No keychain file specified in the config, skipping " "binaries signing...") return print "Signing binaries with keychain: %s" % keychain_file with utils.TempDirectory() as temp_dir: # codesign needs the directory name to adhere to a particular # naming format. bundle_dir = os.path.join( temp_dir, "%s_%s" % (self.client_name, self.version)) shutil.move(self.target_binary_dir, bundle_dir) temp_binary_path = os.path.join( bundle_dir, config.CONFIG.Get("Client.binary_name", context=self.context)) subprocess.check_call([ "codesign", "--verbose", "--deep", "--force", "--sign", cert_name, "--keychain", keychain_file, temp_binary_path ]) shutil.move(bundle_dir, self.target_binary_dir)
def testCreatesTarContainingDeduplicatedCollectionFiles(self): self._RunFlow(archive_format="TAR_GZ") user_fd = aff4.FACTORY.Open(aff4.ROOT_URN.Add("users").Add("test"), token=self.token) notifications = user_fd.Get(user_fd.Schema.PENDING_NOTIFICATIONS) self.assertEqual(len(notifications), 1) tar_fd = aff4.FACTORY.Open(notifications[0].subject, aff4_type="AFF4Stream", token=self.token) tar_fd_contents = tar_fd.Read(len(tar_fd)) with utils.TempDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.tar.gz") with open(archive_path, "w") as out_fd: out_fd.write(tar_fd_contents) subprocess.check_call(["tar", "-xf", archive_path, "-C", temp_dir]) prefix = os.path.join(temp_dir, "prefix", "C.0000000000000000/fs/os/foo/bar") self.assertTrue(os.path.islink(os.path.join(prefix, "hello1.txt"))) self.assertTrue(os.path.islink( utils.SmartStr(os.path.join(prefix, u"中国新闻网新闻中.txt")))) with open(os.path.join(prefix, "hello1.txt"), "r") as fd: self.assertEqual(fd.read(), "hello1") with open(utils.SmartStr( os.path.join(prefix, u"中国新闻网新闻中.txt")), "r") as fd: self.assertEqual(fd.read(), "hello2")
def testDownloadCollectionWithFlattenOption(self): """Check we can download files references in RDFValueCollection.""" # Create a collection with URNs to some files. fd = aff4.FACTORY.Create("aff4:/testcoll", collects.RDFValueCollection, token=self.token) fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1"))) fd.Add(rdf_client.StatEntry(aff4path=self.out.Add("testfile2"))) fd.Add(file_finder.FileFinderResult(stat_entry=rdf_client.StatEntry( aff4path=self.out.Add("testfile5")))) fd.Close() with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection("aff4:/testcoll", tmpdir, overwrite=True, dump_client_info=True, flatten=True, token=self.token, max_threads=2) # Check that "files" folder is filled with symlinks to downloaded files. symlinks = os.listdir(os.path.join(tmpdir, "files")) self.assertEqual(len(symlinks), 3) self.assertListEqual( sorted(symlinks), ["C.1000000000000000_fs_os_testfile1", "C.1000000000000000_fs_os_testfile2", "C.1000000000000000_fs_os_testfile5"]) self.assertEqual( os.readlink(os.path.join(tmpdir, "files", "C.1000000000000000_fs_os_testfile1")), os.path.join(tmpdir, "C.1000000000000000", "fs", "os", "testfile1"))
def testDownloadCollectionWithFoldersEntries(self): """Check we can download a collection that also references folders.""" fd = sequential_collection.GeneralIndexedCollection( self.collection_urn, token=self.token) with data_store.DB.GetMutationPool(token=self.token) as pool: fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testfile5", pathtype="OS"))), mutation_pool=pool) fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testdir1", pathtype="OS"), st_mode=stat.S_IFDIR)), mutation_pool=pool) with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile5" in os.listdir(expected_outdir)) self.assertTrue("testdir1" in os.listdir(expected_outdir))
def Run(self, args): """Initializes the driver.""" # This action might crash the box so we need to flush the transaction log. self.SyncTransactionLog() if not args.driver: raise IOError("No driver supplied.") pub_key = config_lib.CONFIG.Get("Client.driver_signing_public_key") if not args.driver.Verify(pub_key): raise OSError("Driver signature signing failure.") if args.force_reload: client_utils_osx.UninstallDriver(args.driver_name) # Wrap the tarball in a file like object for tarfile to handle it. driver_buf = StringIO.StringIO(args.driver.data) # Unpack it to a temporary directory. with utils.TempDirectory() as kext_tmp_dir: driver_archive = tarfile.open(fileobj=driver_buf, mode="r:gz") driver_archive.extractall(kext_tmp_dir) # Now load it. kext_path = self._FindKext(kext_tmp_dir) logging.debug("Loading kext {0}".format(kext_path)) client_utils_osx.InstallDriver(kext_path)
def testGeneratesTarGzArchive(self): result = self.handler.Handle(hunt_plugin.ApiGetHuntFilesArchiveArgs( hunt_id=self.hunt.urn.Basename(), archive_format="TAR_GZ"), token=self.token) with utils.TempDirectory() as temp_dir: tar_path = os.path.join(temp_dir, "archive.tar.gz") with open(tar_path, "wb") as fd: for chunk in result.GenerateContent(): fd.write(chunk) with tarfile.open(tar_path) as tar_fd: tar_fd.extractall(path=temp_dir) manifest_file_path = None for parent, _, files in os.walk(temp_dir): if "MANIFEST" in files: manifest_file_path = os.path.join(parent, "MANIFEST") break self.assertTrue(manifest_file_path) with open(manifest_file_path, "rb") as fd: manifest = yaml.safe_load(fd.read()) self.assertEqual(manifest["archived_files"], 10) self.assertEqual(manifest["failed_files"], 0) self.assertEqual(manifest["processed_files"], 10) self.assertEqual(manifest["ignored_files"], 0)
def testRepackAll(self): """Test repacking all binaries.""" with utils.TempDirectory() as tmp_dir: new_dir = os.path.join(tmp_dir, "grr", "executables") os.makedirs(new_dir) # Copy unzipsfx so it can be used in repacking/ shutil.copy(os.path.join( self.executables_dir, "windows/templates/unzipsfx/unzipsfx-i386.exe"), new_dir) shutil.copy(os.path.join( self.executables_dir, "windows/templates/unzipsfx/unzipsfx-amd64.exe"), new_dir) with test_lib.ConfigOverrider({"ClientBuilder.executables_dir": new_dir}): with test_lib.ConfigOverrider( {"ClientBuilder.unzipsfx_stub_dir": new_dir}): maintenance_utils.RepackAllBinaries() self.assertEqual(len(glob.glob( os.path.join(new_dir, "linux/installers/*.deb"))), 2) self.assertEqual(len(glob.glob(os.path.join( new_dir, "linux/installers/*.rpm"))), 2) self.assertEqual(len(glob.glob(os.path.join( new_dir, "windows/installers/*.exe"))), 2) self.assertEqual(len(glob.glob(os.path.join( new_dir, "darwin/installers/*.pkg"))), 1)
def testExportDir(self): """Check we can export a dir without errors.""" self.CreateDir("testdir") self.CreateFile("testdir/testfile1") self.CreateFile("testdir/testfile2") self.CreateDir("testdir/testdir1") self.CreateFile("testdir/testdir1/testfile3") self.CreateDir("testdir/testdir1/testdir2") self.CreateFile("testdir/testdir1/testdir2/testfile4") plugin = file_plugin.FileExportPlugin() parser = argparse.ArgumentParser() plugin.ConfigureArgParser(parser) with utils.TempDirectory() as tmpdir: plugin.Run( parser.parse_args(args=[ "--path", str(self.out.Add("testdir")), "--output", tmpdir ])) expected_outdir = os.path.join(tmpdir, self.out.Add("testdir").Path()[1:]) self.assertTrue("testfile1" in os.listdir(expected_outdir)) full_outdir = os.path.join(expected_outdir, "testdir1", "testdir2") self.assertTrue("testfile4" in os.listdir(full_outdir))
def testZipFileWithSymlink(self): """Test that symlinks are preserved when unpacking generated zips.""" compressions = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED] for compression in compressions: outfd = StringIO.StringIO() infd1 = StringIO.StringIO("this is a test string") infd2 = StringIO.StringIO("this is another test string") with utils.StreamingZipWriter(outfd, compression=compression) as writer: writer.WriteFromFD(infd1, "test1.txt") writer.WriteFromFD(infd2, "subdir/test2.txt") writer.WriteSymlink("test1.txt", "test1.txt.link") writer.WriteSymlink("subdir/test2.txt", "test2.txt.link") with utils.TempDirectory() as temp_dir: zip_path = os.path.join(temp_dir, "archive.zip") with open(zip_path, "w") as fd: fd.write(outfd.getvalue()) # Builtin python ZipFile implementation doesn't support symlinks, # so we have to extract the files with command line tool. subprocess.check_call( ["unzip", "-x", zip_path, "-d", temp_dir]) link_path = os.path.join(temp_dir, "test1.txt.link") self.assertTrue(os.path.islink(link_path)) self.assertEqual(os.readlink(link_path), "test1.txt") link_path = os.path.join(temp_dir, "test2.txt.link") self.assertTrue(os.path.islink(link_path)) self.assertEqual(os.readlink(link_path), "subdir/test2.txt")
def testCreatesZipContainingDeduplicatedCollectionFiles(self): self._RunFlow(archive_format="ZIP") user_fd = aff4.FACTORY.Open(aff4.ROOT_URN.Add("users").Add("test"), token=self.token) notifications = user_fd.Get(user_fd.Schema.PENDING_NOTIFICATIONS) self.assertEqual(len(notifications), 1) zip_fd = aff4.FACTORY.Open(notifications[0].subject, aff4_type="AFF4Stream", token=self.token) zip_fd_contents = zip_fd.Read(len(zip_fd)) with utils.TempDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.zip") with open(archive_path, "w") as out_fd: out_fd.write(zip_fd_contents) # Builtin python ZipFile implementation doesn't support symlinks, # so we have to extract the files with command line tool. subprocess.check_call(["unzip", "-x", archive_path, "-d", temp_dir]) prefix = os.path.join(temp_dir, "prefix", "C.0000000000000000/fs/os/foo/bar") self.assertTrue(os.path.islink(os.path.join(prefix, "hello1.txt"))) self.assertTrue(os.path.islink(utils.SmartStr( os.path.join(prefix, u"中国新闻网新闻中.txt")))) with open(os.path.join(prefix, "hello1.txt"), "r") as fd: self.assertEqual(fd.read(), "hello1") with open(utils.SmartStr( os.path.join(prefix, u"中国新闻网新闻中.txt")), "r") as fd: self.assertEqual(fd.read(), "hello2")
def GetClientConfig(self, context, validate=True, deploy_timestamp=True): """Generates the client config file for inclusion in deployable binaries.""" with utils.TempDirectory() as tmp_dir: # Make sure we write the file in yaml format. filename = os.path.join(tmp_dir, config.CONFIG.Get( "ClientBuilder.config_filename", context=context)) new_config = config.CONFIG.MakeNewConfig() new_config.Initialize(reset=True, data="") new_config.SetWriteBack(filename) # Only copy certain sections to the client. We enumerate all # defined options and then resolve those from the config in the # client's context. The result is the raw option as if the # client read our config file. client_context = context[:] while contexts.CLIENT_BUILD_CONTEXT in client_context: client_context.remove(contexts.CLIENT_BUILD_CONTEXT) for descriptor in sorted(config.CONFIG.type_infos, key=lambda x: x.name): if descriptor.name in self.SKIP_OPTION_LIST: continue if descriptor.section in self.CONFIG_SECTIONS: value = config.CONFIG.GetRaw( descriptor.name, context=client_context, default=None) if value is not None: logging.debug("Copying config option to client: %s", descriptor.name) new_config.SetRaw(descriptor.name, value) if config.CONFIG.Get("ClientBuilder.fleetspeak_enabled", context=context): new_config.Set("Client.fleetspeak_enabled", True) if deploy_timestamp: new_config.Set("Client.deploy_time", str(rdfvalue.RDFDatetime.Now())) new_config.Write() if validate: self.ValidateEndConfig(new_config) private_validator = config.CONFIG.Get( "ClientBuilder.private_config_validator_class", context=context) if private_validator: try: validator = config_validator_base.PrivateConfigValidator.classes[ private_validator]() except KeyError: logging.error("Couldn't find config validator class %s, " "you probably need to copy it into lib/local", private_validator) raise validator.ValidateEndConfig(new_config, self.context) return open(filename, "rb").read()
def testRecursiveDownload(self): """Check we can export a file without errors.""" with utils.TempDirectory() as tmpdir: export_utils.RecursiveDownload( aff4.FACTORY.Open(self.out, token=self.token), tmpdir, overwrite=True) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) self.assertTrue("testfile1" in os.listdir(expected_outdir)) full_outdir = os.path.join(expected_outdir, "testdir1", "testdir2") self.assertTrue("testfile4" in os.listdir(full_outdir))
def testConfigFileInclusion(self): one = r""" Config.includes: - 2.yaml Section1.int: 1 """ two = r""" SecondaryFileIncluded: true Section1.int: 2 Config.includes: - subdir/3.yaml """ three = r""" TertiaryFileIncluded: true Section1.int: 3 """ with utils.TempDirectory() as temp_dir: configone = os.path.join(temp_dir, "1.yaml") configtwo = os.path.join(temp_dir, "2.yaml") subdir = os.path.join(temp_dir, "subdir") os.makedirs(subdir) configthree = os.path.join(subdir, "3.yaml") with open(configone, "wb") as fd: fd.write(one) with open(configtwo, "wb") as fd: fd.write(two) with open(configthree, "wb") as fd: fd.write(three) # Using filename conf = self._GetNewConf() conf.Initialize(parser=config_lib.YamlParser, filename=configone) self._CheckConf(conf) # Using fd with no fd.name should raise because there is no way to resolve # the relative path. conf = self._GetNewConf() fd = StringIO.StringIO(one) self.assertRaises( config_lib.ConfigFileNotFound, conf.Initialize, parser=config_lib.YamlParser, fd=fd) # Using data conf = self._GetNewConf() self.assertRaises( config_lib.ConfigFileNotFound, conf.Initialize, parser=config_lib.YamlParser, data=one)
def testConfigFileInclusion(self): one = r""" ConfigIncludes: - 2.yaml Section1.int: 1 """ two = r""" SecondaryFileIncluded: true Section1.int: 2 ConfigIncludes: - subdir/3.yaml """ three = r""" TertiaryFileIncluded: true Section1.int: 3 """ with utils.TempDirectory() as temp_dir: configone = os.path.join(temp_dir, "1.yaml") configtwo = os.path.join(temp_dir, "2.yaml") subdir = os.path.join(temp_dir, "subdir") os.makedirs(subdir) configthree = os.path.join(subdir, "3.yaml") with open(configone, "w") as fd: fd.write(one) with open(configtwo, "w") as fd: fd.write(two) with open(configthree, "w") as fd: fd.write(three) # Using filename conf = self._GetNewConf() conf.Initialize(parser=config_lib.YamlParser, filename=configone) self._CheckConf(conf) # If we don't get a filename or a handle with a .name we look in the cwd # for the specified path, check this works. olddir = os.getcwd() os.chdir(temp_dir) # Using fd with no fd.name conf = self._GetNewConf() fd = StringIO.StringIO(one) conf.Initialize(parser=config_lib.YamlParser, fd=fd) self._CheckConf(conf) # Using data conf = self._GetNewConf() conf.Initialize(parser=config_lib.YamlParser, data=one) self._CheckConf(conf) os.chdir(olddir)
def testFileCasing(self): """Test our ability to read the correct casing from filesystem.""" try: os.lstat(os.path.join(self.base_path, "nUmBeRs.txt")) os.lstat(os.path.join(self.base_path, "nuMbErs.txt")) # If we reached this point we are on a case insensitive file system # and the tests below do not make any sense. logging.warning( "Case insensitive file system detected. Skipping test.") return except (IOError, OSError): pass # Create 2 files with names that differ only in casing. with utils.TempDirectory() as temp_dir: path1 = os.path.join(temp_dir, "numbers.txt") shutil.copy(os.path.join(self.base_path, "numbers.txt"), path1) path2 = os.path.join(temp_dir, "numbers.TXT") shutil.copy(os.path.join(self.base_path, "numbers.txt.ver2"), path2) fd = vfs.VFSOpen( rdf_paths.PathSpec(path=path1, pathtype=rdf_paths.PathSpec.PathType.OS)) self.assertEqual(fd.pathspec.Basename(), "numbers.txt") fd = vfs.VFSOpen( rdf_paths.PathSpec(path=path2, pathtype=rdf_paths.PathSpec.PathType.OS)) self.assertEqual(fd.pathspec.Basename(), "numbers.TXT") path = os.path.join(self.base_path, "Numbers.txt") fd = vfs.VFSOpen( rdf_paths.PathSpec(path=path, pathtype=rdf_paths.PathSpec.PathType.OS)) read_path = fd.pathspec.Basename() # The exact file now is non deterministic but should be either of the two: if read_path != "numbers.txt" and read_path != "numbers.TXT": raise RuntimeError("read path is %s" % read_path) # Ensure that the produced pathspec specified no case folding: s = fd.Stat() self.assertEqual(s.pathspec.path_options, rdf_paths.PathSpec.Options.CASE_LITERAL) # Case folding will only occur when requested - this should raise because # we have the CASE_LITERAL option: pathspec = rdf_paths.PathSpec( path=path, pathtype=rdf_paths.PathSpec.PathType.OS, path_options=rdf_paths.PathSpec.Options.CASE_LITERAL) self.assertRaises(IOError, vfs.VFSOpen, pathspec)
def GetClientConfig(self, context, validate=True): """Generates the client config file for inclusion in deployable binaries.""" with utils.TempDirectory() as tmp_dir: # Make sure we write the file in yaml format. filename = os.path.join( tmp_dir, config_lib.CONFIG.Get("ClientBuilder.config_filename", context=context)) new_config = config_lib.CONFIG.MakeNewConfig() new_config.Initialize(reset=True, data="") new_config.SetWriteBack(filename) # Only copy certain sections to the client. We enumerate all # defined options and then resolve those from the config in the # client's context. The result is the raw option as if the # client read our config file. for descriptor in sorted(config_lib.CONFIG.type_infos, key=lambda x: x.name): if descriptor.name in self.SKIP_OPTION_LIST: continue if descriptor.section in self.CONFIG_SECTIONS: value = config_lib.CONFIG.GetRaw(descriptor.name, context=context, default=None) if value is not None: logging.debug("Copying config option to client: %s", descriptor.name) new_config.SetRaw(descriptor.name, value) new_config.Set("Client.build_time", str(rdfvalue.RDFDatetime().Now())) # Mark the client with the current build environment. new_config.Set("Client.build_environment", rdf_client.Uname.FromCurrentSystem().signature()) # Update the plugins list in the configuration file. Note that by # stripping away directory information, the client will load these from # its own install path. plugins = [] for plugin in config_lib.CONFIG["Client.plugins"]: plugins.append(os.path.basename(plugin)) new_config.SetRaw("Client.plugins", plugins) new_config.Write() if validate: self.ValidateEndConfig(new_config) return open(filename, "rb").read()
def testRepackAll(self): """Test repacking all binaries.""" self.executables_dir = config_lib.Resource().Filter("executables") with utils.TempDirectory() as tmp_dir: new_dir = os.path.join(tmp_dir, "grr", "executables") os.makedirs(new_dir) # Copy unzipsfx so it can be used in repacking/ shutil.copy( os.path.join(self.executables_dir, "windows/templates/unzipsfx/unzipsfx-i386.exe"), new_dir) shutil.copy( os.path.join(self.executables_dir, "windows/templates/unzipsfx/unzipsfx-amd64.exe"), new_dir) with test_lib.ConfigOverrider({ "ClientBuilder.executables_dir": new_dir, "ClientBuilder.unzipsfx_stub_dir": new_dir }): repacking.TemplateRepacker().RepackAllTemplates() self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.deb"))), 2) self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.rpm"))), 2) self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.exe"))), 4) self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.pkg"))), 1) # Validate the config appended to the OS X package. zf = zipfile.ZipFile(glob.glob( os.path.join(new_dir, "installers/*.pkg")).pop(), mode="r") fd = zf.open("config.yaml") # We can't load the included build.yaml because the package hasn't been # installed. loaded = yaml.safe_load(fd) loaded.pop("Config.includes") packaged_config = config.CONFIG.MakeNewConfig() packaged_config.Initialize(parser=config_lib.YamlParser, data=yaml.safe_dump(loaded)) packaged_config.Validate( sections=build.ClientRepacker.CONFIG_SECTIONS) repacker = build.ClientRepacker() repacker.ValidateEndConfig(packaged_config)
def testCreatesZipContainingDeduplicatedHuntResultsFiles(self): with utils.Stubber(email_alerts.EMAIL_ALERTER, "SendEmail", self.SendEmailMock): self.email_messages = [] for _ in test_lib.TestFlowHelper("ExportHuntResultFilesAsArchive", None, hunt_urn=self.hunt_urn, format="ZIP", token=self.token): pass self._CheckEmailMessage(self.email_messages) user_fd = aff4.FACTORY.Open(aff4.ROOT_URN.Add("users").Add("test"), token=self.token) notifications = user_fd.Get(user_fd.Schema.PENDING_NOTIFICATIONS) self.assertEqual(len(notifications), 1) zip_fd = aff4.FACTORY.Open(notifications[0].subject, aff4_type="AFF4Stream", token=self.token) zip_fd_contents = zip_fd.Read(len(zip_fd)) with utils.TempDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.zip") with open(archive_path, "w") as out_fd: out_fd.write(zip_fd_contents) # Builtin python ZipFile implementation doesn't support symlinks, # so we have to extract the files with command line tool. subprocess.check_call( ["unzip", "-x", archive_path, "-d", temp_dir]) friendly_hunt_name = self.hunt_urn.Basename().replace(":", "_") prefix = os.path.join(temp_dir, friendly_hunt_name, "C.0000000000000000/fs/os/foo/bar") self.assertTrue(os.path.islink(os.path.join(prefix, "hello1.txt"))) self.assertTrue( os.path.islink( utils.SmartStr(os.path.join(prefix, u"中国新闻网新闻中.txt")))) with open(os.path.join(prefix, "hello1.txt"), "r") as fd: self.assertEqual(fd.read(), "hello1") with open(utils.SmartStr(os.path.join(prefix, u"中国新闻网新闻中.txt")), "r") as fd: self.assertEqual(fd.read(), "hello2")
def testDownloadCollectionIgnoresArtifactResultsWithoutFiles(self): # Create a collection with URNs to some files. fd = sequential_collection.GeneralIndexedCollection( self.collection_urn, token=self.token) fd.Add(collectors.ArtifactFilesDownloaderResult()) with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) self.assertFalse(os.path.exists(expected_outdir))
def testRepackAll(self): """Testing repacking all binaries.""" with utils.TempDirectory() as tmp_dir: new_dir = os.path.join(tmp_dir, "grr", "executables") # Copy templates and ensure our resulting directory is writeable. shutil.copytree(self.executables_dir, new_dir) for root, dirs, _ in os.walk(new_dir): for this_dir in dirs: os.chmod(os.path.join(root, this_dir), stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) with test_lib.ConfigOverrider({"ClientBuilder.source": tmp_dir}): # If this doesn't raise, it means that there were either no templates, # or all of them were repacked successfully. maintenance_utils.RepackAllBinaries()
def testDownloadCollectionIgnoresArtifactResultsWithoutFiles(self): # Create a collection with URNs to some files. fd = aff4.FACTORY.Create("aff4:/testcoll", collects.RDFValueCollection, token=self.token) fd.Add(collectors.ArtifactFilesDownloaderResult()) fd.Close() with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection("aff4:/testcoll", tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) self.assertFalse(os.path.exists(expected_outdir))
def testExportFile(self): """Check we can export a file without errors.""" self.CreateFile("testfile1") plugin = file_plugin.FileExportPlugin() parser = argparse.ArgumentParser() plugin.ConfigureArgParser(parser) with utils.TempDirectory() as tmpdir: plugin.Run( parser.parse_args(args=[ "--no_legacy_warning_pause", "--path", str( self.out.Add("testfile1")), "--output", tmpdir ])) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) self.assertTrue("testfile1" in os.listdir(expected_outdir))
def testCreatesTarContainingDeduplicatedHuntResultsFiles(self): with utils.Stubber(email_alerts, "SendEmail", self.SendEmailMock): self.email_messages = [] for _ in test_lib.TestFlowHelper("ExportHuntResultFilesAsArchive", None, hunt_urn=self.hunt_urn, format="TAR_GZIP", token=self.token): pass self._CheckEmailMessage(self.email_messages) user_fd = aff4.FACTORY.Open(aff4.ROOT_URN.Add("users").Add("test"), token=self.token) notifications = user_fd.Get(user_fd.Schema.PENDING_NOTIFICATIONS) self.assertEqual(len(notifications), 1) tar_fd = aff4.FACTORY.Open(notifications[0].subject, aff4_type="AFF4Stream", token=self.token) tar_fd_contents = tar_fd.Read(len(tar_fd)) with utils.TempDirectory() as temp_dir: archive_path = os.path.join(temp_dir, "archive.tar.gz") with open(archive_path, "w") as out_fd: out_fd.write(tar_fd_contents) subprocess.check_call(["tar", "-xf", archive_path, "-C", temp_dir]) friendly_hunt_name = self.hunt_urn.Basename().replace(":", "_") prefix = os.path.join(temp_dir, friendly_hunt_name, "C.0000000000000000/fs/os/foo/bar") self.assertTrue(os.path.islink(os.path.join(prefix, "hello1.txt"))) self.assertTrue( os.path.islink( utils.SmartStr(os.path.join(prefix, u"中国新闻网新闻中.txt")))) with open(os.path.join(prefix, "hello1.txt"), "r") as fd: self.assertEqual(fd.read(), "hello1") with open(utils.SmartStr(os.path.join(prefix, u"中国新闻网新闻中.txt")), "r") as fd: self.assertEqual(fd.read(), "hello2")
def testRepackAll(self): """Test repacking all binaries.""" with utils.TempDirectory() as tmp_dir: new_dir = os.path.join(tmp_dir, "grr", "executables") os.makedirs(new_dir) # Copy unzipsfx so it can be used in repacking/ shutil.copy( os.path.join(self.executables_dir, "windows/templates/unzipsfx/unzipsfx-i386.exe"), new_dir) shutil.copy( os.path.join(self.executables_dir, "windows/templates/unzipsfx/unzipsfx-amd64.exe"), new_dir) # Since we want to be able to increase the client version in the repo # without immediately making a client template release, just check we can # repack whatever templates we have installed. version = pkg_resources.get_distribution( "grr-response-templates").version.replace("post", "") major, minor, revision, release = version.split(".") with test_lib.ConfigOverrider({ "ClientBuilder.executables_dir": new_dir, "ClientBuilder.unzipsfx_stub_dir": new_dir, "Client.version_major": major, "Client.version_minor": minor, "Client.version_revision": revision, "Client.version_release": release, }): maintenance_utils.RepackAllBinaries() self.assertEqual( len(glob.glob(os.path.join(new_dir, "linux/installers/*.deb"))), 2) self.assertEqual( len(glob.glob(os.path.join(new_dir, "linux/installers/*.rpm"))), 2) self.assertEqual( len( glob.glob(os.path.join(new_dir, "windows/installers/*.exe"))), 2) self.assertEqual( len(glob.glob(os.path.join(new_dir, "darwin/installers/*.pkg"))), 1)
def _VerifyDownload(self): with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile1" in os.listdir(expected_outdir)) self.assertTrue("testfile2" in os.listdir(expected_outdir)) self.assertTrue("testfile5" in os.listdir(expected_outdir)) self.assertTrue("testfile6" in os.listdir(expected_outdir)) # Check we dumped a YAML file to the root of the client. expected_rootdir = os.path.join(tmpdir, self.client_id.Basename()) self.assertTrue("client_info.yaml" in os.listdir(expected_rootdir))
def testConfigFileIncludeAbsolutePaths(self): one = r""" Section1.int: 1 """ with utils.TempDirectory() as temp_dir: configone = os.path.join(temp_dir, "1.yaml") with open(configone, "wb") as fd: fd.write(one) absolute_include = r""" Config.includes: - %s Section1.int: 2 """ % configone conf = self._GetNewConf() conf.Initialize(parser=config_lib.YamlParser, data=absolute_include) self.assertEqual(conf["Section1.int"], 1) relative_include = r""" Config.includes: - 1.yaml Section1.int: 2 """ conf = self._GetNewConf() # Can not include a relative path from config without a filename. self.assertRaises(config_lib.ConfigFileNotFound, conf.Initialize, parser=config_lib.YamlParser, data=relative_include) # If we write it to a file it should work though. configtwo = os.path.join(temp_dir, "2.yaml") with open(configtwo, "wb") as fd: fd.write(relative_include) conf.Initialize(parser=config_lib.YamlParser, filename=configtwo) self.assertEqual(conf["Section1.int"], 1)