def ManageBinaries(config=None, token=None): """Repack templates into installers.""" print "\nStep 4: Repackaging clients with new configuration." redownload_templates = False repack_templates = False if flags.FLAGS.noprompt: redownload_templates = flags.FLAGS.redownload_templates repack_templates = not flags.FLAGS.norepack_templates else: redownload_templates = RetryBoolQuestion( "Server debs include client templates. Re-download templates?", False) repack_templates = RetryBoolQuestion("Repack client templates?", True) if redownload_templates: InstallTemplatePackage() # Build debug binaries, then build release binaries. if repack_templates: repacking.TemplateRepacker().RepackAllTemplates(upload=True, token=token) print "\nInitialization complete, writing configuration." config.Write() print "Please restart the service for it to take effect.\n\n"
def testRepackAll(self): """Test repacking all binaries.""" self.executables_dir = config_lib.Resource().Filter("executables") with utils.TempDirectory() as tmp_dir: new_dir = os.path.join(tmp_dir, "grr", "executables") os.makedirs(new_dir) # Copy unzipsfx so it can be used in repacking/ shutil.copy( os.path.join(self.executables_dir, "windows/templates/unzipsfx/unzipsfx-i386.exe"), new_dir) shutil.copy( os.path.join(self.executables_dir, "windows/templates/unzipsfx/unzipsfx-amd64.exe"), new_dir) with test_lib.ConfigOverrider({ "ClientBuilder.executables_dir": new_dir, "ClientBuilder.unzipsfx_stub_dir": new_dir }): repacking.TemplateRepacker().RepackAllTemplates() self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.deb"))), 2) self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.rpm"))), 2) self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.exe"))), 4) self.assertEqual( len(glob.glob(os.path.join(new_dir, "installers/*.pkg"))), 1) # Validate the config appended to the OS X package. zf = zipfile.ZipFile(glob.glob( os.path.join(new_dir, "installers/*.pkg")).pop(), mode="r") fd = zf.open("config.yaml") # We can't load the included build.yaml because the package hasn't been # installed. loaded = yaml.safe_load(fd) loaded.pop("Config.includes") packaged_config = config.CONFIG.MakeNewConfig() packaged_config.Initialize(parser=config_lib.YamlParser, data=yaml.safe_dump(loaded)) packaged_config.Validate( sections=build.ClientRepacker.CONFIG_SECTIONS) repacker = build.ClientRepacker() repacker.ValidateEndConfig(packaged_config)
except AttributeError: raise RuntimeError("No valid config specified.") if flags.FLAGS.subparser_name == "generate_keys": try: GenerateKeys(grr_config.CONFIG, overwrite_keys=flags.FLAGS.overwrite_keys) except RuntimeError, e: # GenerateKeys will raise if keys exist and overwrite_keys is not set. print "ERROR: %s" % e sys.exit(1) grr_config.CONFIG.Write() elif flags.FLAGS.subparser_name == "repack_clients": upload = not flags.FLAGS.noupload repacking.TemplateRepacker().RepackAllTemplates(upload=upload, token=token) elif flags.FLAGS.subparser_name == "show_user": maintenance_utils.ShowUser(flags.FLAGS.username, token=token) elif flags.FLAGS.subparser_name == "update_user": try: maintenance_utils.UpdateUser(flags.FLAGS.username, flags.FLAGS.password, flags.FLAGS.add_labels, flags.FLAGS.delete_labels, token=token) except maintenance_utils.UserError as e: print e elif flags.FLAGS.subparser_name == "delete_user":
def main(_): """Launch the appropriate builder.""" grr_config.CONFIG.AddContext(contexts.CLIENT_BUILD_CONTEXT) args = flags.FLAGS if args.subparser_name == "generate_client_config": # We don't need a full init to just build a config. GetClientConfig(args.client_config_output) return # We deliberately use args.context because client_startup.py pollutes # grr_config.CONFIG.context with the running system context. context = args.context context.append(contexts.CLIENT_BUILD_CONTEXT) client_startup.ClientInit() # Use basic console output logging so we can see what is happening. logger = logging.getLogger() handler = logging.StreamHandler() handler.setLevel(logging.INFO) logger.handlers = [handler] if args.subparser_name == "build": if grr_config.CONFIG.Get("ClientBuilder.fleetspeak_enabled"): if "Target:Darwin" in context: if not args.fleetspeak_service_config: raise RuntimeError( "--fleetspeak_service_config must be provided.") if not grr_config.CONFIG.Get("ClientBuilder.install_dir"): raise RuntimeError( "ClientBuilder.install_dir must be set.") if not grr_config.CONFIG.Get( "ClientBuilder.fleetspeak_plist_path"): raise RuntimeError( "ClientBuilder.fleetspeak_plist_path must be set.") grr_config.CONFIG.Set("ClientBuilder.client_path", "grr_response_client.grr_fs_client") TemplateBuilder().BuildTemplate(context=context, output=args.output) elif args.subparser_name == "repack": if args.debug_build: context.append("DebugClientBuild Context") result_path = repacking.TemplateRepacker().RepackTemplate( args.template, args.output_dir, context=context, sign=args.sign, signed_template=args.signed_template) if not result_path: raise ErrorDuringRepacking(" ".join(sys.argv[:])) elif args.subparser_name == "repack_multiple": # Resolve globs manually on Windows. templates = [] for template in args.templates: if "*" in template: templates.extend(glob.glob(template)) else: # This could go through glob but then we'd swallow errors for # non existing files. templates.append(template) repack_configs = [] for repack_config in args.repack_configs: if "*" in repack_config: repack_configs.extend(glob.glob(repack_config)) else: # This could go through glob but then we'd swallow errors for # non existing files. repack_configs.append(repack_config) MultiTemplateRepacker().RepackTemplates( repack_configs, templates, args.output_dir, config=args.config, sign=args.sign, signed_template=args.signed_template) elif args.subparser_name == "sign_template": repacking.TemplateRepacker().SignTemplate(args.template, args.output_file, context=context) if not os.path.exists(args.output_file): raise RuntimeError("Signing failed: output not written")
def RepackTemplates(self, repack_configs, templates, output_dir, config=None, sign=False, signed_template=False): """Call repacker in a subprocess.""" pool = multiprocessing.Pool(processes=10) results = [] bulk_sign_installers = False for repack_config in repack_configs: for template in templates: repack_args = ["grr_client_build"] if config: repack_args.extend(["--config", config]) repack_args.extend([ "--secondary_configs", repack_config, "repack", "--template", template, "--output_dir", self.GetOutputDir(output_dir, repack_config) ]) # We only sign exes and rpms at the moment. The others will raise if we # try to ask for signing. passwd = None if sign: if template.endswith(".exe.zip"): # This is for osslsigncode only. if platform.system() != "Windows": passwd = self.GetWindowsPassphrase() repack_args.append("--sign") else: bulk_sign_installers = True if signed_template: repack_args.append("--signed_template") elif template.endswith(".rpm.zip"): bulk_sign_installers = True print "Calling %s" % " ".join(repack_args) results.append( pool.apply_async(SpawnProcess, (repack_args, ), dict(passwd=passwd))) # Also build debug if it's windows. if template.endswith(".exe.zip"): debug_args = [] debug_args.extend(repack_args) debug_args.append("--debug_build") print "Calling %s" % " ".join(debug_args) results.append( pool.apply_async(SpawnProcess, (debug_args, ), dict(passwd=passwd))) try: pool.close() # Workaround to handle keyboard kills # http://stackoverflow.com/questions/1408356/keyboard-interrupts-with-pythons-multiprocessing-pool # get will raise if the child raises. for result_obj in results: result_obj.get(9999) pool.join() except KeyboardInterrupt: print "parent received control-c" pool.terminate() except ErrorDuringRepacking: pool.terminate() raise if bulk_sign_installers: to_sign = {} for root, _, files in os.walk(output_dir): for f in files: if f.endswith(".exe"): to_sign.setdefault("windows", []).append(os.path.join(root, f)) elif f.endswith(".rpm"): to_sign.setdefault("rpm", []).append(os.path.join(root, f)) if to_sign.get("windows"): signer = repacking.TemplateRepacker().GetSigner([ "ClientBuilder Context", "Platform:%s" % platform.system(), "Target:Windows" ]) signer.SignFiles(to_sign.get("windows")) if to_sign.get("rpm"): signer = repacking.TemplateRepacker().GetSigner([ "ClientBuilder Context", "Platform:%s" % platform.system(), "Target:Linux", "Target:LinuxRpm" ]) signer.AddSignatureToRPMs(to_sign.get("rpm"))
def main(argv): """Main.""" del argv # Unused. token = GetToken() grr_config.CONFIG.AddContext(contexts.COMMAND_LINE_CONTEXT) grr_config.CONFIG.AddContext(contexts.CONFIG_UPDATER_CONTEXT) if flags.FLAGS.subparser_name == "initialize": config_lib.ParseConfigCommandLine() if flags.FLAGS.noprompt: InitializeNoPrompt(grr_config.CONFIG, token=token) else: Initialize(grr_config.CONFIG, token=token) return server_startup.Init() try: print("Using configuration %s" % grr_config.CONFIG) except AttributeError: raise RuntimeError("No valid config specified.") if flags.FLAGS.subparser_name == "generate_keys": try: GenerateKeys(grr_config.CONFIG, overwrite_keys=flags.FLAGS.overwrite_keys) except RuntimeError as e: # GenerateKeys will raise if keys exist and overwrite_keys is not set. print("ERROR: %s" % e) sys.exit(1) grr_config.CONFIG.Write() elif flags.FLAGS.subparser_name == "repack_clients": upload = not flags.FLAGS.noupload repacking.TemplateRepacker().RepackAllTemplates(upload=upload, token=token) elif flags.FLAGS.subparser_name == "show_user": maintenance_utils.ShowUser(flags.FLAGS.username, token=token) elif flags.FLAGS.subparser_name == "update_user": try: maintenance_utils.UpdateUser(flags.FLAGS.username, flags.FLAGS.password, flags.FLAGS.add_labels, flags.FLAGS.delete_labels, token=token) except maintenance_utils.UserError as e: print(e) elif flags.FLAGS.subparser_name == "delete_user": maintenance_utils.DeleteUser(flags.FLAGS.username, token=token) elif flags.FLAGS.subparser_name == "add_user": labels = [] if not flags.FLAGS.noadmin: labels.append("admin") if flags.FLAGS.labels: labels.extend(flags.FLAGS.labels) try: maintenance_utils.AddUser(flags.FLAGS.username, flags.FLAGS.password, labels, token=token) except maintenance_utils.UserError as e: print(e) elif flags.FLAGS.subparser_name == "upload_python": python_hack_root_urn = grr_config.CONFIG.Get("Config.python_hack_root") content = open(flags.FLAGS.file, "rb").read(1024 * 1024 * 30) aff4_path = flags.FLAGS.dest_path platform = flags.FLAGS.platform if not aff4_path: aff4_path = python_hack_root_urn.Add(platform.lower()).Add( os.path.basename(flags.FLAGS.file)) if not str(aff4_path).startswith(str(python_hack_root_urn)): raise ValueError("AFF4 path must start with %s." % python_hack_root_urn) context = ["Platform:%s" % platform.title(), "Client Context"] maintenance_utils.UploadSignedConfigBlob(content, aff4_path=aff4_path, client_context=context, token=token) elif flags.FLAGS.subparser_name == "upload_exe": content = open(flags.FLAGS.file, "rb").read(1024 * 1024 * 30) context = [ "Platform:%s" % flags.FLAGS.platform.title(), "Client Context" ] if flags.FLAGS.dest_path: dest_path = rdfvalue.RDFURN(flags.FLAGS.dest_path) else: dest_path = grr_config.CONFIG.Get( "Executables.aff4_path", context=context).Add(os.path.basename(flags.FLAGS.file)) # Now upload to the destination. maintenance_utils.UploadSignedConfigBlob(content, aff4_path=dest_path, client_context=context, token=token) print("Uploaded to %s" % dest_path) elif flags.FLAGS.subparser_name == "set_var": config = grr_config.CONFIG print("Setting %s to %s" % (flags.FLAGS.var, flags.FLAGS.val)) if flags.FLAGS.val.startswith("["): # Allow setting of basic lists. flags.FLAGS.val = flags.FLAGS.val[1:-1].split(",") config.Set(flags.FLAGS.var, flags.FLAGS.val) config.Write() elif flags.FLAGS.subparser_name == "upload_raw": if not flags.FLAGS.dest_path: flags.FLAGS.dest_path = aff4.ROOT_URN.Add("config").Add("raw") uploaded = UploadRaw(flags.FLAGS.file, flags.FLAGS.dest_path, token=token) print("Uploaded to %s" % uploaded) elif flags.FLAGS.subparser_name == "upload_artifact": yaml.load(open(flags.FLAGS.file, "rb")) # Check it will parse. try: artifact.UploadArtifactYamlFile( open(flags.FLAGS.file, "rb").read(), overwrite=flags.FLAGS.overwrite_artifact) except rdf_artifacts.ArtifactDefinitionError as e: print("Error %s. You may need to set --overwrite_artifact." % e) elif flags.FLAGS.subparser_name == "delete_artifacts": artifact_list = flags.FLAGS.artifact if not artifact_list: raise ValueError("No artifact to delete given.") artifact_registry.DeleteArtifactsFromDatastore(artifact_list, token=token) print("Artifacts %s deleted." % artifact_list) elif flags.FLAGS.subparser_name == "download_missing_rekall_profiles": print("Downloading missing Rekall profiles.") s = rekall_profile_server.GRRRekallProfileServer() s.GetMissingProfiles() elif flags.FLAGS.subparser_name == "set_global_notification": notification = aff4_users.GlobalNotification( type=flags.FLAGS.type, header=flags.FLAGS.header, content=flags.FLAGS.content, link=flags.FLAGS.link) if flags.FLAGS.show_from: notification.show_from = rdfvalue.RDFDatetime( ).ParseFromHumanReadable(flags.FLAGS.show_from) if flags.FLAGS.duration: notification.duration = rdfvalue.Duration().ParseFromHumanReadable( flags.FLAGS.duration) print("Setting global notification.") print(notification) with aff4.FACTORY.Create( aff4_users.GlobalNotificationStorage.DEFAULT_PATH, aff4_type=aff4_users.GlobalNotificationStorage, mode="rw", token=token) as storage: storage.AddNotification(notification) elif flags.FLAGS.subparser_name == "rotate_server_key": print(""" You are about to rotate the server key. Note that: - Clients might experience intermittent connection problems after the server keys rotated. - It's not possible to go back to an earlier key. Clients that see a new certificate will remember the cert's serial number and refuse to accept any certificate with a smaller serial number from that point on. """) if raw_input("Continue? [yN]: ").upper() == "Y": if flags.FLAGS.keylength: keylength = int(flags.FLAGS.keylength) else: keylength = grr_config.CONFIG["Server.rsa_key_length"] maintenance_utils.RotateServerKey(cn=flags.FLAGS.common_name, keylength=keylength) elif flags.FLAGS.subparser_name == "migrate_data": data_migration.Migrate()