def _save_html(self, html_file, name, container_id):
     if hasattr(Vault, 'get_vault_tmp_dir'):
         fd, path = tempfile.mkstemp(dir=Vault.get_vault_tmp_dir(),
                                     text=True)
     else:
         fd, path = tempfile.mkstemp(dir='/opt/phantom/vault/tmp',
                                     text=True)
     os.write(fd, html_file)
     os.close(fd)
     success, message, vault_id = ph_rules.vault_add(
         container_id, path, name)
     if success:
         return phantom.APP_SUCCESS, None
     return phantom.APP_ERROR, message
    def _handle_post_url(self, param):

        action_result = ActionResult(dict(param))
        self.add_action_result(action_result)

        params = dict()
        params['url'] = param["url"]
        params['filename'] = param.get('filename')
        params['key'] = self._api_key

        # Check if we have a size
        sizes = {"tiny": "T", "small": "S", "normal": "N", "medium": "M", "large": "L", "full page": "F"}
        test = param.get("size")
        if not test:
            self.save_progress("Size was blank, using the default \"full page\" size")
            test = "full page"
        if not sizes.get(test.lower()):
            self.save_progress("Given size not found, using the default \"full page\" size")
            params['size'] = "F"
        else:
            params['size'] = sizes[test.lower()]

        # Check if we have a Secret Phrase
        if self._api_phrase is None:
            params['hash'] = ""
        else:
            params['hash'] = str(hashlib.md5((params['url'] + self._api_phrase).encode('utf-8')).hexdigest())

        params['cacheLimit'] = '0'
        params['format'] = 'JPG'
        params['timeout'] = '200'

        ret_val, image = self._make_rest_call('', action_result, params, method='post', stream=True)
        if phantom.is_fail(ret_val):
            return action_result.get_status()

        permalink = None
        # only create a permalink if the hash is used
        if params['hash']:
            permalink = self._get_sspermalink('', params=params, method='post')

        if params['filename']:
            file_name = "{}.jpg".format(params['filename'])
        else:
            file_name = "{0}{1}".format(param["url"], "_screenshot.jpg")

        is_download = False
        if hasattr(Vault, "create_attachment"):
            vault_ret = Vault.create_attachment(image, self.get_container_id(), file_name=file_name)

            if vault_ret.get('succeeded'):
                action_result.set_status(phantom.APP_SUCCESS, "Downloaded screenshot")
                _, _, vault_meta_info = ph_rules.vault_info(container_id=self.get_container_id(), vault_id=vault_ret[phantom.APP_JSON_HASH])
                if not vault_meta_info:
                    self.debug_print("Error while fetching meta information for vault ID: {}".format(vault_ret[phantom.APP_JSON_HASH]))
                    return action_result.set_status(phantom.APP_ERROR, "Could not find meta information of the downloaded screenshot's Vault")

                vault_path = list(vault_meta_info)[0]['path']
                summary = {
                        phantom.APP_JSON_VAULT_ID: vault_ret[phantom.APP_JSON_HASH],
                        phantom.APP_JSON_NAME: file_name,
                        'vault_file_path': vault_path,
                        phantom.APP_JSON_SIZE: vault_ret.get(phantom.APP_JSON_SIZE)}
                if permalink:
                    summary['permalink'] = permalink
                action_result.update_summary(summary)
                is_download = True
            else:
                is_download = False
        if not is_download:
            if hasattr(Vault, 'get_vault_tmp_dir'):
                temp_dir = Vault.get_vault_tmp_dir()
            else:
                temp_dir = '/opt/phantom/vault/tmp'
            temp_dir = "{0}{1}".format(temp_dir, '/{}'.format(uuid.uuid4()))
            os.makedirs(temp_dir)
            file_path = os.path.join(temp_dir, 'tempimage.jpg')

            with open(file_path, 'wb') as f:
                f.write(image)

            success, message, vault_id = ph_rules.vault_add(container=self.get_container_id(), file_location=file_path, file_name=file_name)

            if success:
                action_result.set_status(phantom.APP_SUCCESS, "Downloaded screenshot")
                _, _, vault_meta_info = ph_rules.vault_info(container_id=self.get_container_id(), vault_id=vault_id)
                if not vault_meta_info:
                    self.debug_print("Error while fetching meta information for vault ID: {}".format(vault_id))
                    return action_result.set_status(phantom.APP_ERROR, "Could not find meta information of the downloaded screenshot's Vault")

                vault_path = list(vault_meta_info)[0]['path']
                summary = {
                        phantom.APP_JSON_VAULT_ID: vault_id,
                        phantom.APP_JSON_NAME: file_name,
                        'vault_file_path': vault_path}

                if permalink:
                    summary['permalink'] = permalink
                action_result.update_summary(summary)
            else:
                return action_result.set_status(phantom.APP_ERROR, "Error occurred while saving file to vault: {}".format(message))

        return action_result.get_status()
示例#3
0
def zip_extract(container=None, vault_id=None, password=None, **kwargs):
    """
    Extract all files recursively from a .zip archive. Add the extracted files to the vault and return the vault IDs of the extracted files. Provide a password if needed to decrypt.
    
    Args:
        container (CEF type: phantom container id): The container that extracted files will be added to. Should be a container ID or a container dictionary.
        vault_id: The vault ID of the zip archive to be unzipped.
        password: The password to use for decryption of the zip archive if necessary.
    
    Returns a JSON-serializable object that implements the configured data paths:
        zip_file_info.name: File name of the zip file in the vault
        zip_file_info.user: User who added the zip file to the vault
        output_files.*.file_name: The names of the files extracted from the zip archive.
        output_files.*.file_path: The file paths of the files extracted from the zip archive.
        output_files.*.vault_id: The vault IDs of the files extracted from the zip archive.
    """
    ############################ Custom Code Goes Below This Line #################################
    import json
    import phantom.rules as phantom

    import os
    from pathlib import Path
    import zipfile

    outputs = {'output_files': []}

    # Ensure valid container input
    if isinstance(container, dict) and container.get('id'):
        container_id = container['id']
    elif isinstance(container, int):
        container_id = container
    else:
        raise TypeError(
            "The input 'container' is neither a container dictionary nor an int, so it cannot be used"
        )

    # check the vault_id input
    success, message, info = phantom.vault_info(vault_id=vault_id,
                                                container_id=container_id)
    if not success:
        raise ValueError("Could not find file in vault")
    outputs['zip_file_info'] = info[0]

    if password and not isinstance(password, str):
        raise TypeError("password must be a string")

    # create a directory to store the extracted files before adding to the vault
    extract_path = Path("/opt/phantom/vault/tmp/") / vault_id
    extract_path.mkdir(parents=True, exist_ok=True)

    # extract the files with ZipFile
    with zipfile.ZipFile(info[0]["path"]) as f_zip:
        if password:
            f_zip.extractall(str(extract_path), pwd=password.encode())
        else:
            f_zip.extractall(str(extract_path))

    # add each extracted file to the vault and the output
    for p in extract_path.rglob("*"):
        if p.is_file():
            success, message, vault_id = phantom.vault_add(
                container=container_id, file_location=str(p), file_name=p.name)
            if not success:
                raise RuntimeError(
                    'failed to add file to vault with path {}'.format(str(p)))
            outputs['output_files'].append({
                'file_path': str(p),
                'file_name': p.name,
                'vault_id': vault_id
            })

    # Return a JSON-serializable object
    assert json.dumps(
        outputs
    )  # Will raise an exception if the :outputs: object is not JSON-serializable
    return outputs
def write_embedded_bash_script_to_vault(action=None,
                                        success=None,
                                        container=None,
                                        results=None,
                                        handle=None,
                                        filtered_artifacts=None,
                                        filtered_results=None,
                                        custom_function=None,
                                        **kwargs):
    phantom.debug("write_embedded_bash_script_to_vault() called")

    parameters = []

    parameters.append({
        "input_1": None,
        "input_2": None,
        "input_3": None,
        "input_4": None,
        "input_5": None,
        "input_6": None,
        "input_7": None,
        "input_8": None,
        "input_9": None,
        "input_10": None,
    })

    ################################################################################
    ## Custom Code Start
    ################################################################################

    bash_script = r"""
#!/bin/bash

# This script is part of the Splunk SOAR playbook called internal_host_ssh_log4j_investigate. It gathers 
# system information as part of a unix endpoint investigation. The output is a human-readable log and a 
# set of .csv files to be copied back to SOAR

echo "##############################################################"
echo "splunk_soar_internal_host_ssh_investigate.sh"
echo "##############################################################"
echo ""
echo "[+] Basic system configuration:"

echo "key,value" > /tmp/basic_system_configuration.csv

echo "hostname: $(uname -n | tr -d "\n")"
echo "hostname,$(uname -n | tr -d "\n")" >> /tmp/basic_system_configuration.csv

echo "current time: $(date +%F_%T)"
echo "current time,$(date +%F_%T)" >> /tmp/basic_system_configuration.csv

echo "IP address: $(ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1' | tr '\n' ' ')"
echo "IP address,$(ifconfig | grep -Eo 'inet (addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1' | tr '\n' ' ')" >> /tmp/basic_system_configuration.csv

echo "OS release: $(cat /etc/*release | sort -u | tr "\n" ";")"
echo "OS release,$(cat /etc/*release | sort -u | tr "\n" ";")" >> /tmp/basic_system_configuration.csv

echo "OS issue: $(cat /etc/issue)"
echo "OS issue,$(cat /etc/issue)" >> /tmp/basic_system_configuration.csv

echo "OS kernel: $(uname -a)"
echo "OS kernel,$(uname -a)" >> /tmp/basic_system_configuration.csv

echo ""
echo "USER,PID,%CPU,%MEM,VSZ,RSS,TTY,STAT,START,TIME,COMMAND" > /tmp/process_list.csv
echo "$(ps aux)" >> /tmp/process_list.csv
echo "[+] Process list:"
echo "$(ps aux)"

echo ""
echo "UNIT,LOAD,ACTIVE,SUB,DESCRIPTION" > /tmp/service_list.csv
echo "$(systemctl)" >> /tmp/service_list.csv
echo "[+] Service list:"
echo "$(systemctl)"

echo ""
echo "$(last -a)" > /tmp/login_history.csv
echo "[+] login history:"
echo "$(last -a)"

echo ""
echo "$(ss -tunapl)" > /tmp/open_sockets.csv
echo "[+] Open sockets:"
echo "$(ss -tunapl)"

echo ""
echo "cron_job" > /tmp/cron_jobs.csv
echo "$(for user in $(cut -f1 -d: /etc/passwd); do crontab -u $user -l 2>/dev/null | grep -v '^#'; done)" >> /tmp/cron_jobs.csv
echo "[+] Cron jobs:"
echo "$(for user in $(cut -f1 -d: /etc/passwd); do crontab -u $user -l 2>/dev/null | grep -v '^#'; done)"

echo ""
echo "[+] Zip up the outputs ..."
zip -j /tmp/$1_ssh_output.zip /tmp/basic_system_configuration.csv /tmp/process_list.csv /tmp/service_list.csv /tmp/login_history.csv /tmp/open_sockets.csv /tmp/cron_jobs.csv
echo "wrote zip file to /tmp/$1_ssh_output.zip; next we will copy it back to SOAR"
"""

    file_name = 'splunk_soar_internal_host_ssh_investigate.sh'
    file_path = '/opt/phantom/vault/tmp/{}'.format(file_name)
    with open(file_path, 'w') as bash_script_file:
        bash_script_file.write(bash_script)

    success, message, vault_id = phantom.vault_add(file_location=file_path,
                                                   file_name=file_name)
    parameters = [{'input_1': vault_id}]

    ################################################################################
    ## Custom Code End
    ################################################################################

    phantom.custom_function(custom_function="community/passthrough",
                            parameters=parameters,
                            name="write_embedded_bash_script_to_vault",
                            callback=upload_bash_script)

    return
    def _handle_file(self, curr_file, container_id):

        file_name = curr_file.get('file_name')

        local_file_path = curr_file['file_path']

        contains = self._get_file_contains(local_file_path)

        # lets move the data into the vault
        vault_attach_dict = {}

        if not file_name:
            file_name = os.path.basename(local_file_path)

        self._base_connector.debug_print(
            "Vault file name: {0}".format(file_name))

        vault_attach_dict[
            phantom.
            APP_JSON_ACTION_NAME] = self._base_connector.get_action_name()
        vault_attach_dict[
            phantom.APP_JSON_APP_RUN_ID] = self._base_connector.get_app_run_id(
            )

        file_name = self._decode_uni_string(file_name, file_name)

        # success, message, vault_id = phantom_rules.vault_add(container_id, local_file_path, file_name)
        try:
            success, message, vault_id = phantom_rules.vault_add(
                file_location=local_file_path,
                container=container_id,
                file_name=file_name,
                metadata=vault_attach_dict)
        except Exception as e:
            self._base_connector.debug_print(
                phantom.APP_ERR_FILE_ADD_TO_VAULT.format(e))
            return phantom.APP_ERROR, phantom.APP_ERROR

        if not success:
            self._base_connector.debug_print(
                PROC_EMAIL_FAILED_VAULT_ADD_FILE.format(message))
            return phantom.APP_ERROR, phantom.APP_ERROR

        # add the vault id artifact to the container
        cef_artifact = {}
        if file_name:
            cef_artifact.update({'fileName': file_name})

        if vault_id:
            cef_artifact.update({
                'vaultId': vault_id,
                'cs6': vault_id,
                'cs6Label': 'Vault ID'
            })

            # now get the rest of the hashes and add them to the cef artifact
            self._add_vault_hashes_to_dictionary(cef_artifact, vault_id)

        if not cef_artifact:
            return phantom.APP_SUCCESS, phantom.APP_ERROR

        artifact = {}
        artifact.update(_artifact_common)
        artifact['container_id'] = container_id
        artifact['name'] = 'Vault Artifact'
        artifact['cef'] = cef_artifact
        if contains:
            artifact['cef_types'] = {'vaultId': contains, 'cs6': contains}
        self._set_sdi(artifact)

        ret_val, status_string, artifact_id = self._base_connector.save_artifact(
            artifact)
        self._base_connector.debug_print(
            PROC_EMAIL_SAVE_CONT_PASSED.format(ret_val, status_string,
                                               artifact_id))

        return phantom.APP_SUCCESS, ret_val
def add_embedded_bash_script_to_vault(action=None,
                                      success=None,
                                      container=None,
                                      results=None,
                                      handle=None,
                                      filtered_artifacts=None,
                                      filtered_results=None,
                                      custom_function=None,
                                      **kwargs):
    phantom.debug("add_embedded_bash_script_to_vault() called")

    parameters = []

    parameters.append({
        "input_1": None,
        "input_2": None,
        "input_3": None,
        "input_4": None,
        "input_5": None,
        "input_6": None,
        "input_7": None,
        "input_8": None,
        "input_9": None,
        "input_10": None,
    })

    ################################################################################
    ## Custom Code Start
    ################################################################################

    bash_script = r"""
#!/bin/bash
  
# This script is part of the Splunk SOAR playbook called internal_host_ssh_log4j_investigate. It shows
# the installed java version, lists any running java processes, performs a search for the JndiLookup.class
# file in any .jar files found on disk, and searches any .war files for a log4j jar. The output is a human-readable
# log and a set of .csv files to be copied back to SOAR

echo "##############################################################"
echo "splunk_soar_internal_host_ssh_log4j_investigate.sh"
echo "##############################################################"
echo ""

echo "java environment configuration" > /tmp/java_environment.csv
echo "[+] Checking Java version:"
echo "$(java -version)"
echo "java version:" >> /tmp/java_environment.csv
java -version 2>> /tmp/java_environment.csv

echo ""
echo "[+] Checking running Java processes with ps:"
echo "$(ps aux | grep java)"
echo "ps java processes:" >> /tmp/java_environment.csv
echo "$(ps aux | grep java)" >> /tmp/java_environment.csv

echo ""
echo "[+] Checking running Java processes with jps:"
echo "$(jps -v)"
echo "jps java processes:" >> /tmp/java_environment.csv
echo "$(jps -v)" >> /tmp/java_environment.csv

echo "[+] Search .jar files for JndiLookup.class files ..."
echo "jar_files" > /tmp/jars_with_jndi.csv
find / 2>/dev/null -name '*.jar' -type f -print0 | xargs -0 grep JndiLookup.class | awk '{print $3}' | while read -r file
do
    if [ -f "$file" ]; then
        echo "JndiLookup.class found in .jar file: $file"
        echo "$file" >> /tmp/jars_with_jndi.csv
    fi
done

echo ""
echo "[+] Search .war files for log4j .jar files ..."
echo "war_file,jar_size,jar_time_modified,jar_file" > /tmp/wars_with_jars.csv
find / 2>/dev/null -name '*.war' -type f -print0 | xargs -0 grep log4j | awk '{print $3}' | while read -r war_file
do
    if [ -f "$war_file" ]; then
        unzip -l "$war_file" | grep log4j | awk '{print $1"," $2" "$3","$4}' | while read -r jar_file
        do
            echo ".war file $war_file was found containing the file $jar_file"
            echo "$war_file,$jar_file" >> /tmp/wars_with_jars.csv
        done
    fi
done

echo "[+] Zip up the outputs ..."
zip -j /tmp/$1_ssh_log4j_output.zip /tmp/java_environment.csv /tmp/jars_with_jndi.csv /tmp/wars_with_jars.csv
echo "wrote zip file to /tmp/$1_ssh_log4j_output.zip; next we will copy it back to SOAR"
"""

    file_name = 'splunk_soar_internal_host_ssh_log4j_investigate.sh'
    file_path = '/opt/phantom/vault/tmp/{}'.format(file_name)
    with open(file_path, 'w') as bash_script_file:
        bash_script_file.write(bash_script)

    success, message, vault_id = phantom.vault_add(file_location=file_path,
                                                   file_name=file_name)
    parameters = [{'input_1': vault_id}]

    ################################################################################
    ## Custom Code End
    ################################################################################

    phantom.custom_function(custom_function="community/passthrough",
                            parameters=parameters,
                            name="add_embedded_bash_script_to_vault",
                            callback=upload_bash_script)

    return