def SetUpBinaries(self): with test_lib.FakeTime(42): code = "I am a binary file" upload_path = signed_binary_utils.GetAFF4ExecutablesRoot().Add( "windows/test.exe") maintenance_utils.UploadSignedConfigBlob( code.encode("utf-8"), aff4_path=upload_path) with test_lib.FakeTime(43): code = "I'm a python hack" upload_path = signed_binary_utils.GetAFF4PythonHackRoot().Add("test") maintenance_utils.UploadSignedConfigBlob( code.encode("utf-8"), aff4_path=upload_path)
def SetUpBinaries(self): with test_lib.FakeTime(42): code = "I am a binary file" upload_path = config.CONFIG.Get("Config.aff4_root").Add( "executables/windows/test.exe") maintenance_utils.UploadSignedConfigBlob( code, aff4_path=upload_path, token=self.token) with test_lib.FakeTime(43): code = "I'm a python hack" upload_path = config.CONFIG.Get("Config.python_hack_root").Add("test") maintenance_utils.UploadSignedConfigBlob( code, aff4_path=upload_path, token=self.token)
def testExecutePythonHack(self): client_mock = action_mocks.ActionMock(standard.ExecutePython) # This is the code we test. If this runs on the client mock we can check for # this attribute. sys.test_code_ran_here = False client_id = self.SetupClient(0) code = """ import sys sys.test_code_ran_here = True """ maintenance_utils.UploadSignedConfigBlob( code.encode("utf-8"), aff4_path="aff4:/config/python_hacks/test", token=self.token) flow_test_lib.TestFlowHelper( administrative.ExecutePythonHack.__name__, client_mock, client_id=client_id, hack_name="test", token=self.token) self.assertTrue(sys.test_code_ran_here)
def testExecuteBinaryWeirdOutput(self): binary_path = signed_binary_utils.GetAFF4ExecutablesRoot().Add( "foo.exe") maintenance_utils.UploadSignedConfigBlob(b"foobarbaz", aff4_path=binary_path) client_id = self.SetupClient(0) def Run(self, args): del args # Unused. stdout = "żółć %s gęślą {} jaźń # ⛷".encode("utf-8") stderr = b"\x00\xff\x00\xff\x00" response = rdf_client_action.ExecuteBinaryResponse(stdout=stdout, stderr=stderr, exit_status=0, time_used=0) self.SendReply(response) with mock.patch.object(standard.ExecuteBinaryCommand, "Run", new=Run): # Should not fail. flow_test_lib.TestFlowHelper(administrative.LaunchBinary.__name__, action_mocks.ActionMock( standard.ExecuteBinaryCommand), binary=binary_path, client_id=client_id, command_line="--bar --baz", token=self.token)
def testExecutePythonHackWithResult(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) code = """ magic_return_str = str(py_args["foobar"]) """ maintenance_utils.UploadSignedConfigBlob( content=code.encode("utf-8"), aff4_path="aff4:/config/python_hacks/quux") flow_id = flow_test_lib.TestFlowHelper( administrative.ExecutePythonHack.__name__, client_mock=action_mocks.ActionMock(standard.ExecutePython), client_id=client_id, hack_name="quux", py_args={"foobar": 42}, token=self.token) flow_test_lib.FinishAllFlowsOnClient(client_id=client_id) results = flow_test_lib.GetFlowResults(client_id=client_id, flow_id=flow_id) self.assertLen(results, 1) self.assertIsInstance(results[0], administrative.ExecutePythonHackResult) self.assertEqual(results[0].result_string, "42")
def _UploadBinary(self, bin_name, server_path): """Uploads a binary from the GRR installation dir to the datastore.""" # TODO(user): Upload binaries via the GRR API. logging.info("Uploading %s binary to server.", server_path) package_dir = package.ResourcePath("grr-response-test", "grr_response_test") with open(os.path.join(package_dir, "test_data", bin_name), "rb") as f: maintenance_utils.UploadSignedConfigBlob( f.read(), "aff4:/config/executables/%s" % server_path)
def testScheduleLaunchExecutePythonHackFlow(self): self._SetUpAdminUser() maintenance_utils.UploadSignedConfigBlob( b'foo', aff4_path=signed_binary_utils.GetAFF4PythonHackRoot().Add( 'windows/a.py')) maintenance_utils.UploadSignedConfigBlob( b'foo', aff4_path=signed_binary_utils.GetAFF4PythonHackRoot().Add( 'windows/test.py')) self.Open(f'/v2/clients/{self.client_id}') self.WaitUntilContains('No access', self.GetText, 'css=client-overview') self.Type('css=flow-form input[name=flowSearchBox]', 'python', end_with_enter=True) self.Type('css=flow-args-form input[name=hackName]', 'test', end_with_enter=True) self.Click('css=flow-args-form button:contains("Add argument")') self.Type('css=flow-args-form .key-input input', 'fookey') self.Type('css=flow-args-form .value-input input', 'foovalue') self.Click('css=flow-form button:contains("Schedule")') def GetFirstScheduledFlow(): scheduled_flows = _ListScheduledFlows(self.client_id, self.test_username) return scheduled_flows[0] if len(scheduled_flows) == 1 else None scheduled_flow = self.WaitUntil(GetFirstScheduledFlow) self.assertEqual(scheduled_flow.flow_name, administrative.ExecutePythonHack.__name__) self.assertEqual(scheduled_flow.flow_args.hack_name, 'windows/test.py') self.assertEqual(scheduled_flow.flow_args.py_args['fookey'], 'foovalue')
def testExecuteLargeBinaries(self): client_mock = action_mocks.ActionMock(standard.ExecuteBinaryCommand) code = "I am a large binary file" * 100 upload_path = config.CONFIG["Executables.aff4_path"].Add("test.exe") maintenance_utils.UploadSignedConfigBlob(code, aff4_path=upload_path, limit=100, token=self.token) # Ensure the aff4 collection has many items. fd = aff4.FACTORY.Open(upload_path, token=self.token) # Total size is 2400. self.assertEqual(len(fd), 2400) # There should be 24 parts to this binary. self.assertEqual(len(fd.collection), 24) # This flow has an acl, the user needs to be admin. user = aff4.FACTORY.Create("aff4:/users/%s" % self.token.username, mode="rw", aff4_type=users.GRRUser, token=self.token) user.SetLabel("admin", owner="GRRTest") user.Close() with utils.Stubber(subprocess, "Popen", client_test_lib.Popen): flow_test_lib.TestFlowHelper(administrative.LaunchBinary.__name__, client_mock, client_id=test_lib.TEST_CLIENT_ID, binary=upload_path, command_line="--value 356", token=self.token) # Check that the executable file contains the code string. self.assertEqual(client_test_lib.Popen.binary, code) # At this point, the actual binary should have been cleaned up by the # client action so it should not exist. self.assertRaises(IOError, open, client_test_lib.Popen.running_args[0]) # Check the binary was run with the correct command line. self.assertEqual(client_test_lib.Popen.running_args[1], "--value") self.assertEqual(client_test_lib.Popen.running_args[2], "356") # Check the command was in the tmp file. self.assertTrue(client_test_lib.Popen.running_args[0].startswith( config.CONFIG["Client.tempdir_roots"][0]))
def testExecuteLargeBinaries(self): client_mock = action_mocks.ActionMock(standard.ExecuteBinaryCommand) code = b"I am a large binary file" * 100 upload_path = signed_binary_utils.GetAFF4ExecutablesRoot().Add( config.CONFIG["Client.platform"]).Add("test.exe") maintenance_utils.UploadSignedConfigBlob(code, aff4_path=upload_path, limit=100, token=self.token) binary_urn = rdfvalue.RDFURN(upload_path) binary_size = signed_binary_utils.FetchSizeOfSignedBinary( binary_urn, token=self.token) blob_iterator, _ = signed_binary_utils.FetchBlobsForSignedBinary( binary_urn, token=self.token) # Total size is 2400. self.assertEqual(binary_size, 2400) # There should be 24 parts to this binary. self.assertLen(list(blob_iterator), 24) # This flow has an acl, the user needs to be admin. acl_test_lib.CreateAdminUser(self.token.username) with utils.Stubber(subprocess, "Popen", client_test_lib.Popen): flow_test_lib.TestFlowHelper(compatibility.GetName( administrative.LaunchBinary), client_mock, client_id=self.SetupClient(0), binary=upload_path, command_line="--value 356", token=self.token) # Check that the executable file contains the code string. self.assertEqual(client_test_lib.Popen.binary, code) # At this point, the actual binary should have been cleaned up by the # client action so it should not exist. self.assertRaises(IOError, open, client_test_lib.Popen.running_args[0]) # Check the binary was run with the correct command line. self.assertEqual(client_test_lib.Popen.running_args[1], "--value") self.assertEqual(client_test_lib.Popen.running_args[2], "356") # Check the command was in the tmp file. self.assertStartsWith(client_test_lib.Popen.running_args[0], config.CONFIG["Client.tempdir_roots"][0])
def testScheduleLaunchBinaryFlow(self): self._SetUpAdminUser() maintenance_utils.UploadSignedConfigBlob( b'foo', aff4_path=signed_binary_utils.GetAFF4ExecutablesRoot().Add( 'windows/a.exe')) maintenance_utils.UploadSignedConfigBlob( b'foo', aff4_path=signed_binary_utils.GetAFF4ExecutablesRoot().Add( 'windows/test.exe')) self.Open(f'/v2/clients/{self.client_id}') self.WaitUntilContains('No access', self.GetText, 'css=client-overview') self.Type('css=flow-form input[name=flowSearchBox]', 'binary', end_with_enter=True) self.Type('css=flow-args-form input[name=binary]', 'test', end_with_enter=True) self.Type('css=flow-args-form input[name=commandLine]', '--foo --bar') self.Click('css=flow-form button:contains("Schedule")') def GetFirstScheduledFlow(): scheduled_flows = _ListScheduledFlows(self.client_id, self.test_username) return scheduled_flows[0] if len(scheduled_flows) == 1 else None scheduled_flow = self.WaitUntil(GetFirstScheduledFlow) self.assertEqual(scheduled_flow.flow_name, administrative.LaunchBinary.__name__) self.assertEqual(scheduled_flow.flow_args.binary, 'aff4:/config/executables/windows/test.exe') self.assertEqual(scheduled_flow.flow_args.command_line, '--foo --bar')
def testUpdateClient(self): client_mock = action_mocks.UpdateAgentClientMock() fake_installer = b"FakeGRRDebInstaller" * 20 upload_path = signed_binary_utils.GetAFF4ExecutablesRoot().Add( config.CONFIG["Client.platform"]).Add("test.deb") maintenance_utils.UploadSignedConfigBlob( fake_installer, aff4_path=upload_path, limit=100, token=self.token) acl_test_lib.CreateAdminUser(self.token.username) flow_test_lib.TestFlowHelper( administrative.UpdateClient.__name__, client_mock, client_id=self.SetupClient(0, system=""), blob_path=upload_path, token=self.token) self.assertEqual(client_mock.GetDownloadedFileContents(), fake_installer)
def testExecutePythonHackWithArgs(self): client_mock = action_mocks.ActionMock(standard.ExecutePython) sys.test_code_ran_here = 1234 code = """ import sys sys.test_code_ran_here = py_args['value'] """ maintenance_utils.UploadSignedConfigBlob( code, aff4_path="aff4:/config/python_hacks/test", token=self.token) flow_test_lib.TestFlowHelper(administrative.ExecutePythonHack.__name__, client_mock, client_id=test_lib.TEST_CLIENT_ID, hack_name="test", py_args=dict(value=5678), token=self.token) self.assertEqual(sys.test_code_ran_here, 5678)
def testExecutePythonHackWithArgs(self): client_mock = action_mocks.ActionMock(standard.ExecutePython) sys.test_code_ran_here = 1234 code = "import sys\nsys.test_code_ran_here = py_args['value']\n" client_id = self.SetupClient(0) maintenance_utils.UploadSignedConfigBlob( code.encode("utf-8"), aff4_path="aff4:/config/python_hacks/test") flow_test_lib.TestFlowHelper(administrative.ExecutePythonHack.__name__, client_mock, client_id=client_id, hack_name="test", py_args=dict(value=5678), creator=self.test_username) self.assertEqual(sys.test_code_ran_here, 5678)
def UploadSignedBinary(source_path, binary_type, platform, upload_subdirectory="", token=None): """Signs a binary and uploads it to the datastore. Args: source_path: Path to the binary to upload. binary_type: Type of the binary, e.g python-hack or executable. platform: Client platform where the binary is intended to be run. upload_subdirectory: Path of a subdirectory to upload the binary to, relative to the canonical path for binaries of the given type and platform. token: ACL token to use for uploading. Raises: BinaryTooLargeError: If the binary to upload is too large. """ if binary_type == rdf_objects.SignedBinaryID.BinaryType.PYTHON_HACK: root_urn = signed_binary_utils.GetAFF4PythonHackRoot() elif binary_type == rdf_objects.SignedBinaryID.BinaryType.EXECUTABLE: root_urn = signed_binary_utils.GetAFF4ExecutablesRoot() else: raise ValueError("Unknown binary type %s." % binary_type) file_size = os.path.getsize(source_path) if file_size > _MAX_SIGNED_BINARY_BYTES: raise BinaryTooLargeError( "File [%s] is of size %d (bytes), which exceeds the allowed maximum " "of %d bytes." % (source_path, file_size, _MAX_SIGNED_BINARY_BYTES)) binary_urn = root_urn.Add(platform.lower()).Add(upload_subdirectory).Add( os.path.basename(source_path)) context = ["Platform:%s" % platform.title(), "Client Context"] with open(source_path, "rb") as f: file_content = f.read() maintenance_utils.UploadSignedConfigBlob(file_content, aff4_path=binary_urn, client_context=context, token=token) print("Uploaded to %s" % binary_urn)
def testExecuteBinariesWithArgs(self): client_mock = action_mocks.ActionMock(standard.ExecuteBinaryCommand) code = b"I am a binary file" upload_path = signed_binary_utils.GetAFF4ExecutablesRoot().Add( config.CONFIG["Client.platform"]).Add("test.exe") maintenance_utils.UploadSignedConfigBlob(code, aff4_path=upload_path, token=self.token) # This flow has an acl, the user needs to be admin. acl_test_lib.CreateAdminUser(self.token.username) with utils.Stubber(subprocess, "Popen", client_test_lib.Popen): flow_test_lib.TestFlowHelper(administrative.LaunchBinary.__name__, client_mock, client_id=self.SetupClient(0), binary=upload_path, command_line="--value 356", token=self.token) # Check that the executable file contains the code string. self.assertEqual(client_test_lib.Popen.binary, code) # At this point, the actual binary should have been cleaned up by the # client action so it should not exist. self.assertRaises(IOError, open, client_test_lib.Popen.running_args[0]) # Check the binary was run with the correct command line. self.assertEqual(client_test_lib.Popen.running_args[1], "--value") self.assertEqual(client_test_lib.Popen.running_args[2], "356") # Check the command was in the tmp file. self.assertTrue(client_test_lib.Popen.running_args[0].startswith( config.CONFIG["Client.tempdir_roots"][0]))
def testUpdateClientSingleBlob(self): client_mock = action_mocks.UpdateAgentClientMock() fake_installer = b"FakeGRRDebInstaller" * 20 upload_path = signed_binary_utils.GetAFF4ExecutablesRoot().Add( config.CONFIG["Client.platform"]).Add("test.deb") maintenance_utils.UploadSignedConfigBlob(fake_installer, aff4_path=upload_path, limit=1000) blob_list, _ = signed_binary_utils.FetchBlobsForSignedBinaryByURN( upload_path) self.assertLen(list(blob_list), 1) acl_test_lib.CreateAdminUser(self.token.username) flow_test_lib.TestFlowHelper( compatibility.GetName(administrative.UpdateClient), client_mock, client_id=self.SetupClient(0, system=""), binary_path=os.path.join(config.CONFIG["Client.platform"], "test.deb"), token=self.token) self.assertEqual(client_mock.GetDownloadedFileContents(), fake_installer)
def RepackTemplate(self, template_path, output_dir, upload=False, token=None, sign=False, context=None, signed_template=False): """Repack binaries based on the configuration. We repack all templates in the templates directory. We expect to find only functioning templates, all other files should be removed. Each template contains a build.yaml that specifies how it was built and how it should be repacked. Args: template_path: template path string output_dir: Output files will be put in this directory. upload: If specified we also upload the repacked binary into the token: Token to use when uploading to the datastore. sign: If true, we want to digitally sign the installer. context: Array of context strings signed_template: If true, the libraries in the template are already signed. This is only used for windows when repacking the template multiple times. Returns: A list of output installers generated. """ orig_config = config.CONFIG repack_config = RepackConfig() print("Repacking template: %s" % template_path) config.CONFIG = repack_config.GetConfigFromTemplate(template_path) result_path = None try: repack_context = config.CONFIG["Template.build_context"] if context: repack_context.extend(context) output_path = os.path.join( output_dir, config.CONFIG.Get("ClientRepacker.output_filename", context=repack_context)) print("Using context: %s and labels: %s" % (repack_context, config.CONFIG.Get("Client.labels", context=repack_context))) try: signer = None if sign: signer = self.GetSigner(repack_context) builder_obj = self.GetRepacker(context=repack_context, signer=signer) builder_obj.signed_template = signed_template result_path = builder_obj.MakeDeployableBinary( template_path, output_path) except Exception: # pylint: disable=broad-except logging.exception("Repacking template %s failed:", template_path) if result_path: print("Repacked into %s" % result_path) if upload: # We delay import here so we don't have to import the entire server # codebase and do full server init if we're just building and # repacking clients. This codepath is used by config_updater # initialize # pylint: disable=g-import-not-at-top from grr_response_server import maintenance_utils # pylint: enable=g-import-not-at-top dest = config.CONFIG.Get("Executables.installer", context=repack_context) maintenance_utils.UploadSignedConfigBlob( open(result_path, "rb").read(100 * 1024 * 1024), dest, client_context=repack_context, token=token) else: print("Failed to repack %s." % template_path) finally: config.CONFIG = orig_config return result_path
def main(argv): """Main.""" del argv # Unused. if flags.FLAGS.subparser_name == "version": version = config_server.VERSION["packageversion"] print("GRR configuration updater {}".format(version)) return token = config_updater_util.GetToken() grr_config.CONFIG.AddContext(contexts.COMMAND_LINE_CONTEXT) grr_config.CONFIG.AddContext(contexts.CONFIG_UPDATER_CONTEXT) if flags.FLAGS.subparser_name == "initialize": config_lib.ParseConfigCommandLine() if flags.FLAGS.noprompt: config_updater_util.InitializeNoPrompt(grr_config.CONFIG, token=token) else: config_updater_util.Initialize(grr_config.CONFIG, token=token) return server_startup.Init() try: print("Using configuration %s" % grr_config.CONFIG) except AttributeError: raise RuntimeError("No valid config specified.") if flags.FLAGS.subparser_name == "generate_keys": try: config_updater_util.GenerateKeys( grr_config.CONFIG, overwrite_keys=flags.FLAGS.overwrite_keys) except RuntimeError as e: # GenerateKeys will raise if keys exist and overwrite_keys is not set. print("ERROR: %s" % e) sys.exit(1) grr_config.CONFIG.Write() elif flags.FLAGS.subparser_name == "repack_clients": upload = not flags.FLAGS.noupload repacking.TemplateRepacker().RepackAllTemplates(upload=upload, token=token) elif flags.FLAGS.subparser_name == "show_user": maintenance_utils.ShowUser(flags.FLAGS.username, token=token) elif flags.FLAGS.subparser_name == "update_user": try: maintenance_utils.UpdateUser(flags.FLAGS.username, flags.FLAGS.password, flags.FLAGS.add_labels, flags.FLAGS.delete_labels, token=token) except maintenance_utils.UserError as e: print(e) elif flags.FLAGS.subparser_name == "delete_user": maintenance_utils.DeleteUser(flags.FLAGS.username, token=token) elif flags.FLAGS.subparser_name == "add_user": labels = [] if not flags.FLAGS.noadmin: labels.append("admin") if flags.FLAGS.labels: labels.extend(flags.FLAGS.labels) try: maintenance_utils.AddUser(flags.FLAGS.username, flags.FLAGS.password, labels, token=token) except maintenance_utils.UserError as e: print(e) elif flags.FLAGS.subparser_name == "upload_python": python_hack_root_urn = grr_config.CONFIG.Get("Config.python_hack_root") content = open(flags.FLAGS.file, "rb").read(1024 * 1024 * 30) aff4_path = flags.FLAGS.dest_path platform = flags.FLAGS.platform if not aff4_path: aff4_path = python_hack_root_urn.Add(platform.lower()).Add( os.path.basename(flags.FLAGS.file)) if not str(aff4_path).startswith(str(python_hack_root_urn)): raise ValueError("AFF4 path must start with %s." % python_hack_root_urn) context = ["Platform:%s" % platform.title(), "Client Context"] maintenance_utils.UploadSignedConfigBlob(content, aff4_path=aff4_path, client_context=context, token=token) elif flags.FLAGS.subparser_name == "upload_exe": content = open(flags.FLAGS.file, "rb").read(1024 * 1024 * 30) context = [ "Platform:%s" % flags.FLAGS.platform.title(), "Client Context" ] if flags.FLAGS.dest_path: dest_path = rdfvalue.RDFURN(flags.FLAGS.dest_path) else: dest_path = grr_config.CONFIG.Get( "Executables.aff4_path", context=context).Add(os.path.basename(flags.FLAGS.file)) # Now upload to the destination. maintenance_utils.UploadSignedConfigBlob(content, aff4_path=dest_path, client_context=context, token=token) print("Uploaded to %s" % dest_path) elif flags.FLAGS.subparser_name == "set_var": config = grr_config.CONFIG print("Setting %s to %s" % (flags.FLAGS.var, flags.FLAGS.val)) if flags.FLAGS.val.startswith("["): # Allow setting of basic lists. flags.FLAGS.val = flags.FLAGS.val[1:-1].split(",") config.Set(flags.FLAGS.var, flags.FLAGS.val) config.Write() elif flags.FLAGS.subparser_name == "upload_raw": if not flags.FLAGS.dest_path: flags.FLAGS.dest_path = aff4.ROOT_URN.Add("config").Add("raw") uploaded = config_updater_util.UploadRaw(flags.FLAGS.file, flags.FLAGS.dest_path, token=token) print("Uploaded to %s" % uploaded) elif flags.FLAGS.subparser_name == "upload_artifact": yaml.load(open(flags.FLAGS.file, "rb")) # Check it will parse. try: artifact.UploadArtifactYamlFile( open(flags.FLAGS.file, "rb").read(), overwrite=flags.FLAGS.overwrite_artifact) except rdf_artifacts.ArtifactDefinitionError as e: print("Error %s. You may need to set --overwrite_artifact." % e) elif flags.FLAGS.subparser_name == "delete_artifacts": artifact_list = flags.FLAGS.artifact if not artifact_list: raise ValueError("No artifact to delete given.") artifact_registry.DeleteArtifactsFromDatastore(artifact_list, token=token) print("Artifacts %s deleted." % artifact_list) elif flags.FLAGS.subparser_name == "download_missing_rekall_profiles": print("Downloading missing Rekall profiles.") s = rekall_profile_server.GRRRekallProfileServer() s.GetMissingProfiles() elif flags.FLAGS.subparser_name == "set_global_notification": notification = aff4_users.GlobalNotification( type=flags.FLAGS.type, header=flags.FLAGS.header, content=flags.FLAGS.content, link=flags.FLAGS.link) if flags.FLAGS.show_from: notification.show_from = rdfvalue.RDFDatetime( ).ParseFromHumanReadable(flags.FLAGS.show_from) if flags.FLAGS.duration: notification.duration = rdfvalue.Duration().ParseFromHumanReadable( flags.FLAGS.duration) print("Setting global notification.") print(notification) with aff4.FACTORY.Create( aff4_users.GlobalNotificationStorage.DEFAULT_PATH, aff4_type=aff4_users.GlobalNotificationStorage, mode="rw", token=token) as storage: storage.AddNotification(notification) elif flags.FLAGS.subparser_name == "rotate_server_key": print(""" You are about to rotate the server key. Note that: - Clients might experience intermittent connection problems after the server keys rotated. - It's not possible to go back to an earlier key. Clients that see a new certificate will remember the cert's serial number and refuse to accept any certificate with a smaller serial number from that point on. """) if builtins.input("Continue? [yN]: ").upper() == "Y": if flags.FLAGS.keylength: keylength = int(flags.FLAGS.keylength) else: keylength = grr_config.CONFIG["Server.rsa_key_length"] maintenance_utils.RotateServerKey(cn=flags.FLAGS.common_name, keylength=keylength)