def run_windows_cmd(self, remote_auth_transport, ps_extensions): # Format shell parameters shell_command_base = self.shell_command extension = shell_command_base[1:-1].strip().split('.')[-1] session = winrm.Session(self.remote_server, auth=(self.remote_user, self.remote_password), transport=remote_auth_transport, server_cert_validation="ignore") self.commandline = render(shell_command_base, self.rendered_shell_params) LOG.debug("Windows cmd: %s", self.commandline) if extension in ps_extensions: shell_command_base = shell_command_base + (' "{{shell_param1}}"' if self.rendered_shell_params['shell_param1'] else ' $null') shell_command_base = shell_command_base + (' "{{shell_param2}}"' if self.rendered_shell_params['shell_param2'] else ' $null') shell_command_base = shell_command_base + (' "{{shell_param3}}"' if self.rendered_shell_params['shell_param3'] else ' $null') self.commandline = render(shell_command_base, self.rendered_shell_params) r = session.run_ps(self.commandline) else: self.commandline = render(shell_command_base, self.rendered_shell_params) r = session.run_cmd(self.commandline) self.retcode = r.status_code self.stdoutdata = r.std_out self.stderrdata = r.std_err
def _do_incident_mapping(query_definition, event_message, metadata, response, res_client, context_token, additional_map_data=None): """ Update incident with query results as defined in mapping """ incident = event_message.get("incident", {}) incident_id = incident.get("id") # Map for rendering starts with the event (incident, etc) mapdata = copy.deepcopy(event_message) if additional_map_data: mapdata.update(additional_map_data) # Add in any rendered vars mapdata.update(query_definition.vars) # Add in any query result metadata mapdata.update(metadata) # Add in the result data if query_definition.result_container: # We only use the first row returned to do incident updates. data = {"result": response[0]} else: data = response if data is not None: mapdata.update(data) # Render the mapping rules with the query results mapping = {} for key, value_template in query_definition.incident_mapping.items(): # Note: each value uses 'render_json' so that we can produce text, lists, etc. # Values should generally be escaped with the '|json' filter. # If this doesn't work, just use the unescaped version. if isinstance(value_template, list): mapping[key] = [ _json(template_functions.render(t, mapdata)) for t in value_template ] else: mapping[key] = _json( template_functions.render(value_template, mapdata)) # Update the incident (with awareness of field datatypes, if type coercion is needed) incident_fields = _get_incident_fields(res_client) try: res_client.get_put("/incidents/{0}".format(incident_id), lambda incident: _update_incident( incident, mapping, incident_fields), co3_context_token=context_token) except SimpleHTTPException: LOG.error("Failed to update incident with fields: %s", json.dumps(mapping, indent=2)) raise
def _do_iterate_per_result(query_definition, event_message, metadata, response, datatable_locks, res_client, context_token, additional_map_data=None): """Call additional mappings n times per result row""" for row in response: mapdata = {"result": row} mapdata.update(metadata) if additional_map_data: mapdata.update(additional_map_data) count = int( template_functions.render( query_definition.iterate_per_result.count_template, mapdata)) for i in range(count): update_with_results(res_client, query_definition.iterate_per_result, event_message, [row], datatable_locks, context_token, additional_map_data={'i': i})
def map_values(template_file, message_dict): with open(template_file, 'r') as template: log.debug("Message in dict form: {}".format(message_dict)) incident_template = template.read() incident_data = template_functions.render(incident_template, message_dict) return incident_data
def map_values(self, template_file, message_dict): """Map_Values is used to : Take in a forwarded Event from ICDx Import a Jinja template Map Event data to a new Incidents data including artifact data""" log.debug("Attempting to map message to an IncidentDTO. Message provided : {}".format(message_dict)) with open(template_file, 'r') as template: incident_template = template.read() incident_data = template_functions.render(incident_template, message_dict) log.debug(incident_data) return incident_data
def _map_values(template_file, message_dict): """ Map values from jinja template. :param template_file: :param message_dict: :return: output_data """ with open(template_file, 'r') as template: log.debug("Message in dict format: %s", message_dict) template = template.read() output_data = template_functions.render(template, message_dict) return output_data
def _shell_action(self, event, *args, **kwargs): """The @handler() annotation without an event name makes this a default handler - for all events on this component's queue. This will be called with some "internal" events from Circuits, so you must declare the method with the generic parameters (event, *args, **kwargs), and ignore any messages that are not from the Actions module. """ if not isinstance(event, ActionMessage): # Some event we are not interested in return # Based on the action name, # find the commandline template for this action action_name = event.name action_template = self.options.get(action_name, self.options.get("command")) # Disposition can vary too disposition_args = self.options.get(action_name + "_result_disposition", self.options.get("result_disposition", "new_attachment")) # The disposition arguments can use template features # Add a few convenience properties event.message["properties"] = event.message.get("properties") or {} event.message["action_name"] = action_name event.message["properties"]["_message_headers"] = event.hdr() disposition_args = template_functions.render(disposition_args, event.message) # Construct the disposition result_disposition = Disposition(self.rest_client(), disposition_args) # Run the action based on the template and data; # the result is returned as a string. evt = task(_shell_run, action_template, event.message) LOG.info("shell: %s", action_name) ret = yield self.call(evt) result = ret.value if isinstance(result, list): # results from circuits tasks come back wrapped in a list result = result[0] if isinstance(result, Exception): raise result if result is None: LOG.debug("No result.") yield "No result." else: # Process the result according to the chosen disposition LOG.debug("Result: %s", result) result_disposition.call(event, result) yield "Found result"
def render_file_mapping(file_mapping_dict, data, source_dir, target_dir): """ Walk each value in the "rendered" file-mapping dictionary, and create the target files. Nesting in the 'target' dictionary represents the target directory structure. Source values are the full path to a source file. Each source file is treated as a JINJA2 template, and rendered using the data provided. :param file_mapping_dict: {"target": "source"...} :param data: the data for JINJA rendering of each source file :param source_dir: path to the root of the source files :param target_dir: path where the target files and directories should be written """ for (key, value) in sorted(file_mapping_dict.items()): if not key: LOG.error(u"Cannot render empty target for %s", value) continue # The key is a directory-name or filename, # optionally followed by a '@xxx" where 'xxx' is a variable tha the # template needs, such as a loop-variable. Split this out if present. loopvar = None if "@" in key: split = key.split("@", 1) key = split[0] loopvar = split[1] data["loopvar"] = loopvar # if isinstance(value, dict): # This is a subdirectory subdir = os.path.join(target_dir, key) try: os.mkdir(subdir) except OSError as exc: LOG.warn(exc) render_file_mapping(value, data, source_dir, subdir) else: target_file = os.path.join(target_dir, key) source_file = os.path.join(source_dir, value) if os.path.exists(target_file): LOG.error(u"Not writing %s: file exists.", target_file) continue # Render the source file as a JINJA template LOG.debug(u"Writing %s from template %s", target_file, source_file) LOG.info(u"Writing %s", target_file) with io.open(source_file, 'r', encoding="utf-8") as source: source_template = source.read() source_rendered = template_functions.render( source_template, data) with io.open(target_file, mode="w", encoding="utf-8") as outfile: outfile.write(source_rendered)
def run_remote_linux(self): self.commandline = render(self.shell_command, self.rendered_shell_params) LOG.debug("Remote cmd: %s", self.commandline) # initialize the SSH client client = paramiko.SSHClient() # add to known hosts client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) try: client.connect(hostname=self.remote_server, username=self.remote_user, password=self.remote_password) stdin, stdout, stderr = client.exec_command(self.commandline) self.stdoutdata = stdout.read().decode() self.stderrdata = stderr.read().decode() self.retcode = client.recv_exit_status() except: LOG.error("Unable to run cmd: %s on remote server: %s", self.remote_server, self.commandline)
def build_incident_dto(alert, custom_temp_file=None): current_path = os.path.dirname(os.path.realpath(__file__)) if custom_temp_file: template_file = custom_temp_file else: default_temp_file = join(current_path, pardir, "data/templates/msg_incident_mapping.jinja") template_file = default_temp_file try: with open(template_file, 'r') as template: log.debug("Reading template file") incident_template = template.read() return template_functions.render(incident_template, alert) except jinja2.exceptions.TemplateSyntaxError: log.info("'incident_template' is not set correctly in config file.")
def mkdescription(self, data, kind, threat_id, classification): """Make Incident description text""" data[u'kind'] = kind data[u'id'] = threat_id data[u'classification'] = classification try: return { 'format': 'text', 'content': template_functions.render(self.threat_template, data) } except jinja2.exceptions.TemplateSyntaxError as err: log.info(u'threat template is not set correctly in config file {}'. format(err)) raise err
def _shell_run(action_template, action_data): """Render and run the `action_template` command""" try: # Resolve the commandline by rendering the commandline template commandline = template_functions.render(action_template, action_data) # Write a temporary file containing the action_data with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_file: temp_filename = temp_file.name temp_file.write(json.dumps(action_data, indent=2, sort_keys=True)) env = os.environ.copy() env["EVENTDATA"] = temp_filename # Execute the command line process (NOT in its own shell) commandline = os.path.expandvars(commandline) LOG.info("Run: %s (%s)", commandline, temp_filename) cmd = shlex.split(commandline, posix=True) call = subprocess.Popen(cmd, shell=False, stderr=subprocess.PIPE, stdout=subprocess.PIPE, env=env) stdoutdata, stderrdata = call.communicate() retcode = call.returncode output = stderrdata.decode() # Nonzero exit code indicates error; leave the temp file in place to ease debugging the script if retcode is not 0: raise OSError("'{}' failed: {}".format(cmd[0], output)) result = stdoutdata.decode() # Clean up the temporary file os.remove(temp_filename) except Exception as exc: # Return (don't raise) return exc # Return the results LOG.debug("Run result: %s", result) return result
def run_local_cmd(self): shell_command_base = render(self.shell_command, self.rendered_shell_params) self.commandline = os.path.expandvars(shell_command_base) LOG.debug("local cmd: %s", self.commandline) # Set up the environment env = os.environ.copy() # Execute the command line process (NOT in its own shell) cmd = shlex.split(self.commandline, posix=True) call = subprocess.Popen(cmd, shell=False, stderr=subprocess.PIPE, stdout=subprocess.PIPE, env=env) self.stdoutdata, self.stderrdata = call.communicate() self.retcode = call.returncode encoding = chardet.detect(self.stdoutdata)["encoding"] or "utf-8" self.result = self.stdoutdata.decode(encoding)
def build_incident_dto(self, rc, headers, case_id): current_path = os.path.dirname(os.path.realpath(__file__)) default_temp_file = join(current_path, pardir, "data/templates/esm_incident_mapping.jinja") template_file = self.options.get("incident_template", default_temp_file) try: with open(template_file, 'r') as template: log.debug("Reading template file") case_details = case_get_case_detail(rc, self.options, headers, case_id) log.debug("Case details in dict form: {}".format(case_details)) incident_template = template.read() return template_functions.render(incident_template, case_details) except jinja2.exceptions.TemplateSyntaxError: log.info( "'incident_template' is not set correctly in config file.")
def render_template(self, template, mapdata): # Render a JINJA template, using our filters etc return template_functions.render(template, mapdata)
def _shell_command_function(self, event, *args, **kwargs): """Function: Runs a shell command.""" try: # Get the function parameters: shell_command = self.get_select_param( kwargs.get("shell_command") ) # select, values: "cmdscan", "netscan", "sockscan", "malfind" shell_param1 = kwargs.get("shell_param1") # text shell_param2 = kwargs.get("shell_param2") # text shell_param3 = kwargs.get("shell_param3") # text log = logging.getLogger(__name__) log.info("shell_command: %s", shell_command) log.info("shell_param1: %s", shell_param1) log.info("shell_param2: %s", shell_param2) log.info("shell_param3: %s", shell_param3) # Escape the input parameters escaping = self.options.get("shell_escaping", "sh") escaped_args = { "shell_param1": render(u"{{shell_param1|%s}}" % escaping, kwargs), "shell_param2": render(u"{{shell_param2|%s}}" % escaping, kwargs), "shell_param3": render(u"{{shell_param3|%s}}" % escaping, kwargs) } # Substitute parameters into the shell command if shell_command not in self.options: yield FunctionError( u"Command is not configured: '{}'".format(shell_command)) return shell_command_base = self.options[shell_command] commandline = render(shell_command_base, escaped_args) commandline = os.path.expandvars(commandline) yield StatusMessage(u"Running: {}".format(commandline)) # Set up the environment env = os.environ.copy() # Execute the command line process (NOT in its own shell) cmd = shlex.split(commandline, posix=True) tstart = time.time() call = subprocess.Popen(cmd, shell=False, stderr=subprocess.PIPE, stdout=subprocess.PIPE, env=env) stdoutdata, stderrdata = call.communicate() retcode = call.returncode tend = time.time() encoding = chardet.detect(stdoutdata)["encoding"] or "utf-8" result = stdoutdata.decode(encoding) result_json = None try: # Let's see if the output can be decoded as JSON result_json = json.loads(result) except: pass output = stderrdata.decode(encoding) output_json = None try: # Let's see if the output can be decoded as JSON output_json = json.loads(output) except: pass results = { "commandline": commandline, "start": int(tstart * 1000.0), "end": int(tend * 1000.0), "elapsed": int((tend - tstart) * 1000.0), "exitcode": retcode, # Nonzero exit code indicates error "stdout": result, "stderr": output, "stdout_json": result_json, # May be null "stderr_json": output_json # May be null } # Produce a FunctionResult with the results yield FunctionResult(results) except Exception: yield FunctionError()
def _docker_run_docker_container_function(self, event, *args, **kwargs): """Function: A function intended to be used to create a Docker Container from an image, feed an input to the container and then return the results.""" try: # Get the function parameters: artifact_id = kwargs.get("artifact_id") # number attachment_id = kwargs.get("attachment_id") # number incident_id = kwargs.get("incident_id") # number task_id = kwargs.get("task_id") # number docker_image = self.get_select_param( kwargs.get("docker_image") ) # select, values: "volatility", "nsrl", "plaso", "bloodhound" docker_input = kwargs.get("docker_input") # text docker_operation = kwargs.get("docker_operation") # text docker_artifact_type = kwargs.get("docker_artifact_type") attachment_name = None # Initialise attachment name as none payload = ResultPayload("fn_docker", **kwargs) log = logging.getLogger(__name__) log.info("artifact_id: %s", artifact_id) log.info("attachment_id: %s", attachment_id) log.info("incident_id: %s", incident_id) log.info("task_id: %s", task_id) log.info("docker_image: %s", docker_image) log.info("docker_input: %s", docker_input) log.info("docker_artifact_type: %s", docker_artifact_type) log.info("docker_operation: %s", docker_operation) helper = ResDockerHelper(self.options) image_to_use = helper.get_config_option( self.options, "docker_image", True) or docker_image # Prepare the args which will be rendered into the app.config cmd escaped_args = { "docker_input": render(u"{{docker_input|%s}}" % "sh", kwargs), } attachment_file_name = None # Initialise filename as None to avoid reference errors # Check whether we are dealing with an attachment or artifact if (artifact_id or attachment_id or task_id) and docker_input is None: log.debug( "Input appears to be an attachment, attempting to parse.") yield StatusMessage("Downloading Attachment from REST API") # Get the files data attachment_input = resilient_lib.get_file_attachment( incident_id=incident_id, artifact_id=artifact_id, attachment_id=attachment_id, task_id=task_id, res_client=self.rest_client()) # Get the files name attachment_name = resilient_lib.get_file_attachment_name( incident_id=incident_id, artifact_id=artifact_id, attachment_id=attachment_id, task_id=task_id, res_client=self.rest_client()) # Get the external directory in which to save the file output_vol = helper.get_config_option( options=self.all_options.get('{}{}'.format( CONFIGSECTIONPREFIX, image_to_use)), option_name="primary_source_dir", optional=True) log.debug("Writing attachment to bind folder") if os.path.isdir(output_vol): # Convert to named temp file with tempfile.NamedTemporaryFile( delete=False, prefix=DOCKERATTACHMENTPREFIX, dir=output_vol) as temp_file: try: temp_file.write(attachment_input) os.chmod(temp_file.name, 0o666) temp_file.close() finally: attachment_file_name = os.path.split( temp_file.name)[1] log.debug("Saving file to %s", temp_file.name) # Add a attachment_input arg to be rendered into the cmd command escaped_args.update({ "attachment_input": render("{{attachment_input|%s}}" % "sh", { u"attachment_input": attachment_file_name }), }) yield StatusMessage( u"Added this as an Attachment Input: {}". format(attachment_name)) else: errMsg = u"""Could not write file to directory, does the directory {0} exist? If not create it with mkdir {0}""".format( output_vol) raise FunctionError(errMsg) else: # We are not dealing with an attachment log.debug("Working with an artifact") docker_interface = DockerUtils() # Decide whether to use local connection or remote docker_interface.setup_docker_connection(options=self.options) # Ensure the specified image is an approved one if image_to_use not in helper.get_config_option( self.options, "docker_approved_images").split(","): raise ValueError( "Image is not in list of approved images. Review your app.config" ) # Gather the command to send to the image and format docker_extra_kwargs for any image specific volumes command, docker_extra_kwargs, image_fullname, docker_operation = docker_interface.gather_image_args_and_volumes( helper, image_to_use, self.all_options, escaped_args, docker_operation) log.info( "Command: %s \n Volume Bind: %s", (command, docker_extra_kwargs.get('volumes', "No Volumes"))) # Now Get the Image docker_interface.get_image(image_fullname) # Get the Client docker_client = docker_interface.get_client() yield StatusMessage("Now starting container with input") try: # Run container using client container = docker_client.containers.run( image=image_fullname, command=render(command, escaped_args), detach=True, # Detach from container remove= False, # Remove set to false as will be removed manually after gathering info **docker_extra_kwargs) container_stats = docker_interface.gather_container_stats( container_id=container.id) container_id = container.id # Gather the logs as they happen, until the container finishes. container_logs = container.logs(follow=True) yield StatusMessage("Container has finished and logs gathered") """ Attempt to remove the container now we have finished. Will throw an exception if the container has already been removed""" container_status = container.wait() container.remove() except requests.exceptions.HTTPError as request_exception: yield StatusMessage( u"""Encountered issue when trying to remove container: {} \n {}""" .format( request_exception, u"""If you supplied an extra app.config value to remove the container this is expected.""" )) timestamp_epoch = int(time.time() * 1000) # Setup tempfile to write back the attachment with tempfile.NamedTemporaryFile(mode="w+t", delete=False) as temp_upload_file: try: new_attachment_name = helper.format_result_attachment_name( image_to_use, container_id) # Write and close tempfile temp_upload_file.write( helper.format_output_attachment_body( container_id, docker_operation, attachment_file_name, docker_artifact_type, docker_input, container_logs.decode('utf-8'), timestamp_epoch)) temp_upload_file.close() # Access Resilient API client = self.rest_client() # Create POST uri # ..for a task, if task_id is defined if task_id: attachment_uri = '/tasks/{}/attachments'.format( task_id) # ...else for an attachment else: attachment_uri = '/incidents/{}/attachments'.format( incident_id) # POST the new attachment new_attachment = client.post_attachment( attachment_uri, temp_upload_file.name, filename=new_attachment_name, mimetype='text/plain') except Exception as upload_exception: err_msg = u"""Encountered issue when saving results to a file and uploading via REST API. Exception: {0}""".format( upload_exception) raise FunctionError(err_msg) finally: os.unlink(temp_upload_file.name) results = payload.done( # If container had no errors, 0 will be returned. Use a falsey check to ensure we get 0 else False success=True if not container_status.get("StatusCode", 1) else False, content={ "logs": container_logs.decode('utf-8'), "container_exit_status": container_status, "container_stats": container_stats, "container_id": container_id, "res_links": { "res_object": helper.prepare_res_link(host=self.host_config[0], incident_id=incident_id, task_id=task_id) }, "attachment_name": attachment_name or None, }) results["metrics"]["timestamp_epoch"] = timestamp_epoch # Update the docker_operation input to ensure we have captured the operation done, if any results["inputs"]["docker_operation"] = docker_operation # Produce a FunctionResult with the results using the FunctionPayload yield FunctionResult(results) log.debug("RESULTS: %s", results) log.info("Complete") except Exception: yield FunctionError() finally: try: os.unlink(temp_file.name) except NameError: log.debug( "Error when trying to unlink file, appears file does not exist." ) else: log.debug("Successfully cleaned up file")
def gather_image_args_and_volumes(self, helper, image_to_use, all_options, escaped_args, docker_operation=None): """ A helper function used to gather the command to be run aswell as format the kwargs to used on run. Depending on the image, there may exist the need for a specific type of volume binding. You have options in how you can set this value. You can either set it in fn_docker app.config section. Or if you intend to run multiple images, which each require their own separate volume binding you can instead configure an app.config section for each image where you can specify the internal and external volume primary volume bind. :param helper: :param image_to_use: :param all_options: :param escaped_args: :return: """ LOG.debug( all_options.get('{}{}'.format(CONFIGSECTIONPREFIX, image_to_use))) command = helper.get_config_option(options=all_options.get( '{}{}'.format(CONFIGSECTIONPREFIX, image_to_use)), option_name="cmd") output_vol = helper.get_config_option(options=all_options.get( '{}{}'.format(CONFIGSECTIONPREFIX, image_to_use)), option_name="primary_source_dir", optional=True) internal_vol = helper.get_config_option(options=all_options.get( '{}{}'.format(CONFIGSECTIONPREFIX, image_to_use)), option_name="primary_dest_dir", optional=True) docker_config_operation = helper.get_config_option( options=all_options.get('{}{}'.format(CONFIGSECTIONPREFIX, image_to_use)), option_name="cmd_operation", optional=True) approved_operations = helper.get_config_option( options=all_options.get('{}{}'.format(CONFIGSECTIONPREFIX, image_to_use)), option_name="{}_approved_operations".format(image_to_use), optional=True) image_fullname = helper.get_config_option(options=all_options.get( '{}{}'.format(CONFIGSECTIONPREFIX, image_to_use)), option_name="docker_image", optional=True) docker_extra_kwargs = self.parse_extra_kwargs(options=all_options.get( '{}{}'.format(CONFIGSECTIONPREFIX, image_to_use))) container_volume_bind = { output_vol: { 'bind': internal_vol, 'mode': 'rw' } } if output_vol and internal_vol else dict() if docker_extra_kwargs.get('volumes', False): LOG.debug( "Found a Volume in Extra Kwargs. Appending to existing volume definition" ) # Split the volumes string by commas to get each volume binding for volume in docker_extra_kwargs.get('volumes').split(','): # Split the volume into each of its params volume_params = volume.split(':') if volume == docker_extra_kwargs.get('volumes').split( ',')[0] and not internal_vol: internal_vol = volume_params[1] # Format the volume data into a dict and update. container_volume_bind.update({ volume_params[0]: { 'bind': volume_params[1], 'mode': volume_params[2] } }) # After we finish looping del docker_extra_kwargs['volumes'] # Remove the volume docker_extra_kwargs['volumes'] = container_volume_bind # If config is not set at all for approved operations, all operations are approved # Ensure the operation that will be done is an approved one # An operation set as a function field will take priority over one set as a app.config to enable multiple workflows with different operations operation = docker_operation or docker_config_operation if approved_operations and operation not in approved_operations: raise ValueError( u"Operation is not found in the list of approved operations. Review your app.config and add {} to the approved_operations for the {} image to fix this. " .format(operation, image_to_use)) escaped_args.update({ "internal_vol": render("{{internal_vol|%s}}" % "sh", {"internal_vol": internal_vol}), "operation": render("{{operation|%s}}" % "sh", {"operation": operation}) }) return render( command, escaped_args), docker_extra_kwargs, image_fullname, operation
def _shell_command_function(self, event, *args, **kwargs): """Function: Runs a shell command.""" try: # Get the function parameters: shell_command = kwargs.get('shell_command') # text shell_remote = kwargs.get("shell_remote") # boolean shell_param1 = kwargs.get("shell_param1") # text shell_param2 = kwargs.get("shell_param2") # text shell_param3 = kwargs.get("shell_param3") # text LOG.info("shell_command: %s", shell_command) LOG.info("shell_remote: %s", shell_remote) LOG.info("shell_param1: %s", shell_param1) LOG.info("shell_param2: %s", shell_param2) LOG.info("shell_param3: %s", shell_param3) # Options keys are lowercase, so the shell command name needs to be lowercase if shell_command: shell_command = shell_command.lower() # Escape the input parameters escaping = self.options.get("shell_escaping", "sh") rendered_shell_params = { "shell_param1": render(u"{{shell_param1|%s}}" % escaping, kwargs), "shell_param2": render(u"{{shell_param2|%s}}" % escaping, kwargs), "shell_param3": render(u"{{shell_param3|%s}}" % escaping, kwargs) } # If running a remote script, get the remote computer and the remote command if shell_remote: colon_split = shell_command.split(':') if len(colon_split) != 2: raise ValueError("Remote commands must be of the format remote_command_name:remote_computer_name, " "'%s' was specified" % shell_command) else: shell_command = colon_split[0].strip() if self.options.get(colon_split[1]) is None: raise ValueError('The remote computer %s is not configured' % colon_split[1]) else: remote = self.options.get(colon_split[1]).strip() if remote.startswith('(') and remote.endswith(')'): remote = remote[1:-1] else: raise ValueError('Remote computer configurations must be wrapped in parentheses (), ' "%s was specified" % remote) # Check if command is configured if shell_command not in self.options: if ':' in shell_command: raise ValueError("Syntax for a remote command '%s' was used but remote_shell was set to False" % shell_command) raise ValueError('%s command not configured' % shell_command) shell_command_base = self.options[shell_command].strip() # Remote commands must wrap a path with [] if shell_remote: if shell_command_base.startswith('[') and shell_command_base.endswith(']'): run_cmd = RunCmd(remote, shell_command_base[1:-1].strip(), rendered_shell_params) run_cmd.run_windows_cmd(self.options.get('remote_auth_transport'), self.options.get('remote_powershell_extensions', '').strip(",")) # linux remote cmd elif shell_command_base.startswith('(') and shell_command_base.endswith(')'): run_cmd = RunCmd(remote, shell_command_base[1:-1].strip(), rendered_shell_params) run_cmd.run_remote_linux() else: raise ValueError('A remote command must specify a remote path wrapped in square brackets [] for Windows and parentheses () for Linux, ' "'%s' was specified" % shell_command) # local command else: run_cmd = RunCmd(None, shell_command_base, rendered_shell_params) run_cmd.run_local_cmd() yield FunctionResult(run_cmd.make_result()) except Exception: yield FunctionError()
def _shell_command_function(self, event, *args, **kwargs): """Function: Runs a shell command.""" try: # Get the function parameters: shell_command = kwargs.get('shell_command') # text shell_remote = kwargs.get("shell_remote") # boolean shell_param1 = kwargs.get("shell_param1") # text shell_param2 = kwargs.get("shell_param2") # text shell_param3 = kwargs.get("shell_param3") # text log = logging.getLogger(__name__) log.info("shell_command: %s", shell_command) log.info("shell_remote: %s", shell_remote) log.info("shell_param1: %s", shell_param1) log.info("shell_param2: %s", shell_param2) log.info("shell_param3: %s", shell_param3) # Options keys are lowercase, so the shell command name needs to be lowercase if shell_command: shell_command = shell_command.lower() # Escape the input parameters escaping = self.options.get("shell_escaping", "sh") escaped_args = { "shell_param1": render(u"{{shell_param1|%s}}" % escaping, kwargs), "shell_param2": render(u"{{shell_param2|%s}}" % escaping, kwargs), "shell_param3": render(u"{{shell_param3|%s}}" % escaping, kwargs) } # If running a remote script, get the remote computer and the remote command if shell_remote: colon_split = shell_command.split(':') if len(colon_split) != 2: raise ValueError( "Remote commands must be of the format remote_command_name:remote_computer_name, " "'%s' was specified" % shell_command) else: shell_command = colon_split[0].strip() if self.options.get(colon_split[1]) is None: raise ValueError( 'The remote computer %s is not configured' % colon_split[1]) else: remote = self.options.get(colon_split[1]).strip() if remote.startswith('(') and remote.endswith(')'): remote = remote[1:-1] else: raise ValueError( 'Remote computer configurations must be wrapped in parentheses (), ' "%s was specfied" % remote) # Get remote credentials remote_config = re.split(':|@', remote) if len(remote_config) != 3: raise ValueError( 'Remote machine %s must be of the format username:password@server, ' "'%s' was specified" % remote) else: remote_user = remote_config[0] remote_password = remote_config[1] remote_server = remote_config[2] # Check if command is configured if shell_command not in self.options: if ':' in shell_command: raise ValueError( "Syntax for a remote command '%s' was used but remote_shell was set to False" % shell_command) raise ValueError('%s command not configured' % shell_command) shell_command_base = self.options[shell_command].strip() # Remote commands must wrap a path with [] if shell_command_base.startswith( '[') and shell_command_base.endswith(']'): if shell_remote: extension = shell_command_base[1:-1].strip().split('.')[-1] if extension not in self.options.get( 'remote_powershell_extensions'): raise ValueError( "The specified file must be have extension %s but %s was specified" % (str( self.options.get('remote_powershell_extensions' )), extension)) # Format shell parameters shell_command_base = shell_command_base[1:-1].strip() if shell_param1: shell_command_base = shell_command_base + ' "{{shell_param1}}"' else: shell_command_base = shell_command_base + ' $null' if shell_param2: shell_command_base = shell_command_base + ' "{{shell_param2}}"' else: shell_command_base = shell_command_base + ' $null' if shell_param3: shell_command_base = shell_command_base + ' "{{shell_param3}}"' else: shell_command_base = shell_command_base + ' $null' else: raise ValueError( "A remote command '%s' was specified but shell_remote was set to False" % shell_command) elif shell_remote: raise ValueError( 'A remote command must specify a remote path wrapped in square brackets [], ' "'%s' was specified" % shell_command) if shell_command_base.startswith( '(') and shell_command_base.endswith( ')') and not shell_remote: raise ValueError( 'Please specify a valid shell command that is not wrapped in parentheses or brackets' 'when shell_remote is False') commandline = render(shell_command_base, escaped_args) if shell_remote: session = winrm.Session( remote_server, auth=(remote_user, remote_password), transport=self.options.get('remote_auth_transport')) tstart = time.time() if escaping == "sh": r = session.run_cmd(commandline) elif escaping == "ps": r = session.run_ps(commandline) retcode = r.status_code stdoutdata = r.std_out stderrdata = r.std_err tend = time.time() else: commandline = os.path.expandvars(commandline) # Set up the environment env = os.environ.copy() # Execute the command line process (NOT in its own shell) cmd = shlex.split(commandline, posix=True) tstart = time.time() call = subprocess.Popen(cmd, shell=False, stderr=subprocess.PIPE, stdout=subprocess.PIPE, env=env) stdoutdata, stderrdata = call.communicate() retcode = call.returncode tend = time.time() encoding = chardet.detect(stdoutdata)["encoding"] or "utf-8" result = stdoutdata.decode(encoding) result_json = None try: # Let's see if the output can be decoded as JSON result_json = json.loads(result) except: pass output = stderrdata.decode(encoding) output_json = None try: # Let's see if the output can be decoded as JSON output_json = json.loads(output) except: pass results = { "commandline": commandline, "start": int(tstart * 1000.0), "end": int(tend * 1000.0), "elapsed": int((tend - tstart) * 1000.0), "exitcode": retcode, # Nonzero exit code indicates error "stdout": result, "stderr": output, "stdout_json": result_json, # May be null "stderr_json": output_json # May be null } yield FunctionResult(results) except Exception: yield FunctionError()