def testDownloadCollectionWithFoldersEntries(self): """Check we can download a collection that also references folders.""" fd = sequential_collection.GeneralIndexedCollection( self.collection_urn) with data_store.DB.GetMutationPool() as pool: fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testfile5", pathtype="OS"))), mutation_pool=pool) fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testdir1", pathtype="OS"), st_mode=stat.S_IFDIR)), mutation_pool=pool) with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile5" in os.listdir(expected_outdir)) self.assertTrue("testdir1" in os.listdir(expected_outdir))
def testGeneratesTarGzArchive(self): result = self.handler.Handle(hunt_plugin.ApiGetHuntFilesArchiveArgs( hunt_id=self.hunt.urn.Basename(), archive_format="TAR_GZ"), token=self.token) with utils.TempDirectory() as temp_dir: tar_path = os.path.join(temp_dir, "archive.tar.gz") with open(tar_path, "wb") as fd: for chunk in result.GenerateContent(): fd.write(chunk) with tarfile.open(tar_path) as tar_fd: tar_fd.extractall(path=temp_dir) manifest_file_path = None for parent, _, files in os.walk(temp_dir): if "MANIFEST" in files: manifest_file_path = os.path.join(parent, "MANIFEST") break self.assertTrue(manifest_file_path) with open(manifest_file_path, "rb") as fd: manifest = yaml.safe_load(fd.read()) self.assertEqual(manifest["archived_files"], 10) self.assertEqual(manifest["failed_files"], 0) self.assertEqual(manifest["processed_files"], 10) self.assertEqual(manifest["ignored_files"], 0)
def SignGRRPyinstallerBinaries(self): cert_name = config.CONFIG.Get( "ClientBuilder.signing_cert_name", context=self.context) keychain_file = config.CONFIG.Get( "ClientBuilder.signing_keychain_file", context=self.context) if not keychain_file: print("No keychain file specified in the config, skipping " "binaries signing...") return print "Signing binaries with keychain: %s" % keychain_file with utils.TempDirectory() as temp_dir: # codesign needs the directory name to adhere to a particular # naming format. bundle_dir = os.path.join(temp_dir, "%s_%s" % (self.client_name, self.version)) shutil.move(self.target_binary_dir, bundle_dir) temp_binary_path = os.path.join(bundle_dir, config.CONFIG.Get( "Client.binary_name", context=self.context)) subprocess.check_call([ "codesign", "--verbose", "--deep", "--force", "--sign", cert_name, "--keychain", keychain_file, temp_binary_path ]) shutil.move(bundle_dir, self.target_binary_dir)
def testExportFile(self): """Check we can export a file without errors.""" with utils.TempDirectory() as tmpdir: export_utils.CopyAFF4ToLocal(self.out.Add("testfile1"), tmpdir, overwrite=True, token=self.token) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) self.assertTrue("testfile1" in os.listdir(expected_outdir))
def testFileCasing(self): """Test our ability to read the correct casing from filesystem.""" try: os.lstat(os.path.join(self.base_path, "nUmBeRs.txt")) os.lstat(os.path.join(self.base_path, "nuMbErs.txt")) # If we reached this point we are on a case insensitive file system # and the tests below do not make any sense. logging.warning( "Case insensitive file system detected. Skipping test.") return except (IOError, OSError): pass # Create 2 files with names that differ only in casing. with utils.TempDirectory() as temp_dir: path1 = os.path.join(temp_dir, "numbers.txt") shutil.copy(os.path.join(self.base_path, "numbers.txt"), path1) path2 = os.path.join(temp_dir, "numbers.TXT") shutil.copy(os.path.join(self.base_path, "numbers.txt.ver2"), path2) fd = vfs.VFSOpen( rdf_paths.PathSpec(path=path1, pathtype=rdf_paths.PathSpec.PathType.OS)) self.assertEqual(fd.pathspec.Basename(), "numbers.txt") fd = vfs.VFSOpen( rdf_paths.PathSpec(path=path2, pathtype=rdf_paths.PathSpec.PathType.OS)) self.assertEqual(fd.pathspec.Basename(), "numbers.TXT") path = os.path.join(self.base_path, "Numbers.txt") fd = vfs.VFSOpen( rdf_paths.PathSpec(path=path, pathtype=rdf_paths.PathSpec.PathType.OS)) read_path = fd.pathspec.Basename() # The exact file now is non deterministic but should be either of the two: if read_path != "numbers.txt" and read_path != "numbers.TXT": raise RuntimeError("read path is %s" % read_path) # Ensure that the produced pathspec specified no case folding: s = fd.Stat() self.assertEqual(s.pathspec.path_options, rdf_paths.PathSpec.Options.CASE_LITERAL) # Case folding will only occur when requested - this should raise because # we have the CASE_LITERAL option: pathspec = rdf_paths.PathSpec( path=path, pathtype=rdf_paths.PathSpec.PathType.OS, path_options=rdf_paths.PathSpec.Options.CASE_LITERAL) self.assertRaises(IOError, vfs.VFSOpen, pathspec)
def testRecursiveDownload(self): """Check we can export a file without errors.""" with utils.TempDirectory() as tmpdir: export_utils.RecursiveDownload(aff4.FACTORY.Open(self.out, token=self.token), tmpdir, overwrite=True) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) self.assertTrue("testfile1" in os.listdir(expected_outdir)) full_outdir = os.path.join(expected_outdir, "testdir1", "testdir2") self.assertTrue("testfile4" in os.listdir(full_outdir))
def testConfigFileInclusion(self): one = r""" Config.includes: - 2.yaml Section1.int: 1 """ two = r""" SecondaryFileIncluded: true Section1.int: 2 Config.includes: - subdir/3.yaml """ three = r""" TertiaryFileIncluded: true Section1.int: 3 """ with utils.TempDirectory() as temp_dir: configone = os.path.join(temp_dir, "1.yaml") configtwo = os.path.join(temp_dir, "2.yaml") subdir = os.path.join(temp_dir, "subdir") os.makedirs(subdir) configthree = os.path.join(subdir, "3.yaml") with open(configone, "wb") as fd: fd.write(one) with open(configtwo, "wb") as fd: fd.write(two) with open(configthree, "wb") as fd: fd.write(three) # Using filename conf = self._GetNewConf() conf.Initialize(parser=config_lib.YamlParser, filename=configone) self._CheckConf(conf) # Using fd with no fd.name should raise because there is no way to resolve # the relative path. conf = self._GetNewConf() fd = StringIO.StringIO(one) self.assertRaises(config_lib.ConfigFileNotFound, conf.Initialize, parser=config_lib.YamlParser, fd=fd) # Using data conf = self._GetNewConf() self.assertRaises(config_lib.ConfigFileNotFound, conf.Initialize, parser=config_lib.YamlParser, data=one)
def testRepackAll(self): """Test repacking all binaries.""" self.executables_dir = config_lib.Resource().Filter("executables") with utils.TempDirectory() as tmp_dir: new_dir = os.path.join(tmp_dir, "grr", "executables") os.makedirs(new_dir) # Copy unzipsfx so it can be used in repacking/ shutil.copy( os.path.join(self.executables_dir, "windows/templates/unzipsfx/unzipsfx-i386.exe"), new_dir) shutil.copy( os.path.join(self.executables_dir, "windows/templates/unzipsfx/unzipsfx-amd64.exe"), new_dir) with test_lib.ConfigOverrider({ "ClientBuilder.executables_dir": new_dir, "ClientBuilder.unzipsfx_stub_dir": new_dir }): repacking.TemplateRepacker().RepackAllTemplates() self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.deb"))), 2) self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.rpm"))), 2) self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.exe"))), 4) self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.pkg"))), 1) # Validate the config appended to the OS X package. zf = zipfile.ZipFile(glob.glob( os.path.join(new_dir, "installers/*.pkg")).pop(), mode="r") fd = zf.open("config.yaml") # We can't load the included build.yaml because the package hasn't been # installed. loaded = yaml.safe_load(fd) loaded.pop("Config.includes") packaged_config = config.CONFIG.MakeNewConfig() packaged_config.Initialize(parser=config_lib.YamlParser, data=yaml.safe_dump(loaded)) packaged_config.Validate( sections=build.ClientRepacker.CONFIG_SECTIONS) repacker = build.ClientRepacker() repacker.ValidateEndConfig(packaged_config)
def testDownloadCollectionIgnoresArtifactResultsWithoutFiles(self): # Create a collection with URNs to some files. fd = sequential_collection.GeneralIndexedCollection( self.collection_urn) with data_store.DB.GetMutationPool() as pool: fd.Add(collectors.ArtifactFilesDownloaderResult(), mutation_pool=pool) with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) self.assertFalse(os.path.exists(expected_outdir))
def _VerifyDownload(self): with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, token=self.token, max_threads=2) expected_outdir = os.path.join(tmpdir, self.out.Path()[1:]) # Check we found both files. self.assertTrue("testfile1" in os.listdir(expected_outdir)) self.assertTrue("testfile2" in os.listdir(expected_outdir)) self.assertTrue("testfile5" in os.listdir(expected_outdir)) self.assertTrue("testfile6" in os.listdir(expected_outdir)) # Check we dumped a YAML file to the root of the client. expected_rootdir = os.path.join(tmpdir, self.client_id.Basename()) self.assertTrue("client_info.yaml" in os.listdir(expected_rootdir))
def testConfigFileIncludeAbsolutePaths(self): one = r""" Section1.int: 1 """ with utils.TempDirectory() as temp_dir: configone = os.path.join(temp_dir, "1.yaml") with open(configone, "wb") as fd: fd.write(one) absolute_include = r""" Config.includes: - %s Section1.int: 2 """ % configone conf = self._GetNewConf() conf.Initialize(parser=config_lib.YamlParser, data=absolute_include) self.assertEqual(conf["Section1.int"], 1) relative_include = r""" Config.includes: - 1.yaml Section1.int: 2 """ conf = self._GetNewConf() # Can not include a relative path from config without a filename. self.assertRaises(config_lib.ConfigFileNotFound, conf.Initialize, parser=config_lib.YamlParser, data=relative_include) # If we write it to a file it should work though. configtwo = os.path.join(temp_dir, "2.yaml") with open(configtwo, "wb") as fd: fd.write(relative_include) conf.Initialize(parser=config_lib.YamlParser, filename=configtwo) self.assertEqual(conf["Section1.int"], 1)
def testDownloadCollectionWithFlattenOption(self): """Check we can download files references in a collection.""" # Create a collection with URNs to some files. fd = sequential_collection.GeneralIndexedCollection( self.collection_urn) with data_store.DB.GetMutationPool() as pool: fd.Add(rdfvalue.RDFURN(self.out.Add("testfile1")), mutation_pool=pool) fd.Add(rdf_client.StatEntry( pathspec=rdf_paths.PathSpec(path="testfile2", pathtype="OS")), mutation_pool=pool) fd.Add(rdf_file_finder.FileFinderResult( stat_entry=rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="testfile5", pathtype="OS"))), mutation_pool=pool) with utils.TempDirectory() as tmpdir: export_utils.DownloadCollection(self.collection_urn, tmpdir, overwrite=True, dump_client_info=True, flatten=True, token=self.token, max_threads=2) # Check that "files" folder is filled with symlinks to downloaded files. symlinks = os.listdir(os.path.join(tmpdir, "files")) self.assertEqual(len(symlinks), 3) self.assertListEqual(sorted(symlinks), [ "C.1000000000000000_fs_os_testfile1", "C.1000000000000000_fs_os_testfile2", "C.1000000000000000_fs_os_testfile5" ]) self.assertEqual( os.readlink( os.path.join(tmpdir, "files", "C.1000000000000000_fs_os_testfile1")), os.path.join(tmpdir, "C.1000000000000000", "fs", "os", "testfile1"))
def testZipFileWithSymlink(self): """Test that symlinks are preserved when unpacking generated zips.""" compressions = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED] for compression in compressions: outfd = StringIO.StringIO() infd1 = StringIO.StringIO("this is a test string") infd2 = StringIO.StringIO("this is another test string") with utils.StreamingZipWriter(outfd, compression=compression) as writer: writer.WriteFromFD(infd1, "test1.txt") writer.WriteFromFD(infd2, "subdir/test2.txt") writer.WriteSymlink("test1.txt", "test1.txt.link") writer.WriteSymlink("subdir/test2.txt", "test2.txt.link") with utils.TempDirectory() as temp_dir: zip_path = os.path.join(temp_dir, "archive.zip") with open(zip_path, "wb") as fd: fd.write(outfd.getvalue()) zip_fd = zipfile.ZipFile(outfd, "r") link_info = zip_fd.getinfo("test1.txt.link") self.assertEqual(link_info.external_attr, (0644 | 0120000) << 16) self.assertEqual(link_info.create_system, 3) link_contents = zip_fd.read("test1.txt.link") self.assertEqual(link_contents, "test1.txt") link_info = zip_fd.getinfo("test2.txt.link") self.assertEqual(link_info.external_attr, (0644 | 0120000) << 16) self.assertEqual(link_info.create_system, 3) link_contents = zip_fd.read("test2.txt.link") self.assertEqual(link_contents, "subdir/test2.txt")
def testConfigFileInclusionWithContext(self): one = r""" Client Context: Config.includes: - 2.yaml Section1.int: 1 """ two = r""" Section1.int: 2 SecondaryFileIncluded: true """ with utils.TempDirectory() as temp_dir: configone = os.path.join(temp_dir, "1.yaml") configtwo = os.path.join(temp_dir, "2.yaml") with open(configone, "wb") as fd: fd.write(one) with open(configtwo, "wb") as fd: fd.write(two) # Without specifying the context the includes are not processed. conf = self._GetNewConf() conf.Initialize(parser=config_lib.YamlParser, filename=configone) self.assertEqual(conf["Section1.int"], 1) # Only one config is loaded. self.assertEqual(conf.files, [configone]) # Now we specify the context. conf = self._GetNewConf() conf.AddContext("Client Context") conf.Initialize(parser=config_lib.YamlParser, filename=configone) # Both config files were loaded. Note that load order is important and # well defined. self.assertEqual(conf.files, [configone, configtwo]) self.assertEqual(conf["Section1.int"], 2)
def _GenerateFleetspeakServiceConfig(self, zip_file): orig_fs_config_path = config.CONFIG[ "ClientBuilder.fleetspeak_config_path"] final_fs_config_fname = config.CONFIG[ "Client.fleetspeak_unsigned_config_fname"] if orig_fs_config_path.endswith(".in"): logging.info("Interpolating %s", orig_fs_config_path) logging.warning( "Backslashes will be naively re-escaped after " "interpolation. If this is not desired, use a Fleetspeak " "config file without the '.in' extension.") with utils.TempDirectory() as temp_dir: temp_fs_config_path = os.path.join(temp_dir, final_fs_config_fname) with open(orig_fs_config_path, "rb") as source: with open(temp_fs_config_path, "wb") as dest: interpolated = config.CONFIG.InterpolateValue( source.read(), context=self.context) dest.write(re.sub(r"\\", r"\\\\", interpolated)) self._ValidateFleetspeakServiceConfig(temp_fs_config_path) zip_file.write(temp_fs_config_path, final_fs_config_fname) else: self._ValidateFleetspeakServiceConfig(orig_fs_config_path) zip_file.write(orig_fs_config_path, final_fs_config_fname)
def GetClientConfig(self, context, validate=True, deploy_timestamp=True): """Generates the client config file for inclusion in deployable binaries.""" with utils.TempDirectory() as tmp_dir: # Make sure we write the file in yaml format. filename = os.path.join( tmp_dir, config.CONFIG.Get("ClientBuilder.config_filename", context=context)) new_config = config.CONFIG.MakeNewConfig() new_config.Initialize(reset=True, data="") new_config.SetWriteBack(filename) # Only copy certain sections to the client. We enumerate all # defined options and then resolve those from the config in the # client's context. The result is the raw option as if the # client read our config file. client_context = context[:] while contexts.CLIENT_BUILD_CONTEXT in client_context: client_context.remove(contexts.CLIENT_BUILD_CONTEXT) for descriptor in sorted(config.CONFIG.type_infos, key=lambda x: x.name): if descriptor.name in self.SKIP_OPTION_LIST: continue if descriptor.section in self.CONFIG_SECTIONS: value = config.CONFIG.GetRaw(descriptor.name, context=client_context, default=None) if value is not None: logging.debug("Copying config option to client: %s", descriptor.name) new_config.SetRaw(descriptor.name, value) if config.CONFIG.Get("ClientBuilder.fleetspeak_enabled", context=context): new_config.Set("Client.fleetspeak_enabled", True) if deploy_timestamp: new_config.Set("Client.deploy_time", str(rdfvalue.RDFDatetime.Now())) new_config.Write() if validate: self.ValidateEndConfig(new_config) private_validator = config.CONFIG.Get( "ClientBuilder.private_config_validator_class", context=context) if private_validator: try: validator = config_validator_base.PrivateConfigValidator.classes[ private_validator]() except KeyError: logging.error( "Couldn't find config validator class %s, " "you probably need to copy it into lib/local", private_validator) raise validator.ValidateEndConfig(new_config, self.context) return open(filename, "rb").read()
def MakeDeployableBinary(self, template_path, output_path): """This will add the config to the client template and create a .deb.""" buildpackage_binary = "/usr/bin/dpkg-buildpackage" if not os.path.exists(buildpackage_binary): logging.error( "dpkg-buildpackage not found, unable to repack client.") return with utils.TempDirectory() as tmp_dir: template_dir = os.path.join(tmp_dir, "dist") utils.EnsureDirExists(template_dir) zf = zipfile.ZipFile(template_path) for name in zf.namelist(): dirname = os.path.dirname(name) utils.EnsureDirExists(os.path.join(template_dir, dirname)) with open(os.path.join(template_dir, name), "wb") as fd: fd.write(zf.read(name)) # Generate the dpkg files. self.GenerateDPKGFiles(tmp_dir) # Create a client config. client_context = ["Client Context"] + self.context client_config_content = self.GetClientConfig(client_context) # We need to strip leading /'s or .join will ignore everything that comes # before it. target_dir = config.CONFIG.Get("ClientBuilder.target_dir", context=self.context).lstrip("/") agent_dir = os.path.join( template_dir, "debian", config.CONFIG.Get("ClientBuilder.package_name", context=self.context), target_dir) with open( os.path.join( agent_dir, config.CONFIG.Get("ClientBuilder.config_filename", context=self.context)), "wb") as fd: fd.write(client_config_content) # Set the daemon to executable. os.chmod( os.path.join( agent_dir, config.CONFIG.Get("Client.binary_name", context=self.context)), 0755) arch = config.CONFIG.Get("Template.arch", context=self.context) try: old_working_dir = os.getcwd() except OSError: old_working_dir = os.environ.get("HOME", "/tmp") try: os.chdir(template_dir) command = [ buildpackage_binary, "-uc", "-d", "-b", "-a%s" % arch ] try: subprocess.check_output(command, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: if "Failed to sign" not in e.output: logging.error("Error calling %s.", command) logging.error(e.output) raise filename_base = config.CONFIG.Get( "ClientBuilder.debian_package_base", context=self.context) output_base = config.CONFIG.Get( "ClientRepacker.output_basename", context=self.context) finally: try: os.chdir(old_working_dir) except OSError: pass utils.EnsureDirExists(os.path.dirname(output_path)) for extension in [ ".changes", config.CONFIG.Get("ClientBuilder.output_extension", context=self.context) ]: input_name = "%s%s" % (filename_base, extension) output_name = "%s%s" % (output_base, extension) shutil.move( os.path.join(tmp_dir, input_name), os.path.join(os.path.dirname(output_path), output_name)) logging.info("Created package %s", output_path) return output_path
def MakeDeployableBinary(self, template_path, output_path): """This will add the config to the client template and create a .rpm.""" rpmbuild_binary = "/usr/bin/rpmbuild" if not os.path.exists(rpmbuild_binary): logging.error("rpmbuild not found, unable to repack client.") return with utils.TempDirectory() as tmp_dir: template_dir = os.path.join(tmp_dir, "dist") utils.EnsureDirExists(template_dir) zf = zipfile.ZipFile(template_path) for name in zf.namelist(): dirname = os.path.dirname(name) utils.EnsureDirExists(os.path.join(template_dir, dirname)) with open(os.path.join(template_dir, name), "wb") as fd: fd.write(zf.read(name)) # Set up a RPM building environment. rpm_root_dir = os.path.join(tmp_dir, "rpmbuild") rpm_build_dir = os.path.join(rpm_root_dir, "BUILD") utils.EnsureDirExists(rpm_build_dir) rpm_buildroot_dir = os.path.join(rpm_root_dir, "BUILDROOT") utils.EnsureDirExists(rpm_buildroot_dir) rpm_rpms_dir = os.path.join(rpm_root_dir, "RPMS") utils.EnsureDirExists(rpm_rpms_dir) rpm_specs_dir = os.path.join(rpm_root_dir, "SPECS") utils.EnsureDirExists(rpm_specs_dir) template_binary_dir = os.path.join(tmp_dir, "dist/rpmbuild/grr-client") target_binary_dir = "%s%s" % (rpm_build_dir, config.CONFIG.Get( "ClientBuilder.target_dir", context=self.context)) utils.EnsureDirExists(os.path.dirname(target_binary_dir)) try: shutil.rmtree(target_binary_dir) except OSError: pass shutil.move(template_binary_dir, target_binary_dir) client_name = config.CONFIG.Get("Client.name", context=self.context) client_binary_name = config.CONFIG.Get("Client.binary_name", context=self.context) if client_binary_name != "grr-client": shutil.move( os.path.join(target_binary_dir, "grr-client"), os.path.join(target_binary_dir, client_binary_name)) # Generate spec spec_filename = os.path.join(rpm_specs_dir, "%s.spec" % client_name) self.GenerateFile( os.path.join(tmp_dir, "dist/rpmbuild/grr.spec.in"), spec_filename) initd_target_filename = os.path.join(rpm_build_dir, "etc/init.d", client_name) # Generate init.d utils.EnsureDirExists(os.path.dirname(initd_target_filename)) self.GenerateFile( os.path.join(tmp_dir, "dist/rpmbuild/grr-client.initd.in"), initd_target_filename) # Generate systemd unit if config.CONFIG["Template.version_numeric"] >= 3125: systemd_target_filename = os.path.join( rpm_build_dir, "usr/lib/systemd/system/", "%s.service" % client_name) utils.EnsureDirExists(os.path.dirname(systemd_target_filename)) self.GenerateFile( os.path.join(tmp_dir, "dist/rpmbuild/grr-client.service.in"), systemd_target_filename) # Generate prelinking blacklist file prelink_target_filename = os.path.join(rpm_build_dir, "etc/prelink.conf.d", "%s.conf" % client_name) utils.EnsureDirExists(os.path.dirname(prelink_target_filename)) self.GenerateFile( os.path.join(tmp_dir, "dist/rpmbuild/prelink_blacklist.conf.in"), prelink_target_filename) # Create a client config. client_context = ["Client Context"] + self.context client_config_content = self.GetClientConfig(client_context) with open( os.path.join( target_binary_dir, config.CONFIG.Get("ClientBuilder.config_filename", context=self.context)), "wb") as fd: fd.write(client_config_content) # Set the daemon to executable. os.chmod(os.path.join(target_binary_dir, client_binary_name), 0755) client_arch = config.CONFIG.Get("Template.arch", context=self.context) if client_arch == "amd64": client_arch = "x86_64" command = [ rpmbuild_binary, "--define", "_topdir " + rpm_root_dir, "--target", client_arch, "--buildroot", rpm_buildroot_dir, "-bb", spec_filename ] try: subprocess.check_output(command, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logging.error("Error calling %s.", command) logging.error(e.output) raise client_version = config.CONFIG.Get("Template.version_string", context=self.context) rpm_filename = os.path.join( rpm_rpms_dir, client_arch, "%s-%s-1.%s.rpm" % (client_name, client_version, client_arch)) utils.EnsureDirExists(os.path.dirname(output_path)) shutil.move(rpm_filename, output_path) logging.info("Created package %s", output_path) self.Sign(output_path) return output_path