def add_configuration_import(new_export_data, res_client):
    """
    Makes a REST request to add a configuration import. 

    After the request is made, the configuration import is set at a pending state and needs to be confirmed.
    If the configuration state is not reported as pending, raise an SDK Exception.


    :param new_export_data: A dict representing a configuration import DTO
    :type result: dict
    :param import_id: The ID of the configuration import to confirm
    :type import_id: int
    :param res_client: An instantiated res_client for making REST calls
    :type res_client: SimpleClient()
    :raises SDKException: If the confirmation request fails raise an SDKException
    """
    try:
        result = res_client.post(IMPORT_URL, new_export_data)
    except requests.RequestException as upload_exception:
        LOG.debug(new_export_data)
        raise SDKException(upload_exception)
    else:
        assert isinstance(result, dict)
    if result.get("status", '') == "PENDING":
        confirm_configuration_import(result, result.get("id"), res_client)
    else:
        raise SDKException(
            "Could not import because the server did not return an import ID")
def get_apikey_permissions(path):
    """
    Returns a list of api keys to allow an integration to run.
    :param path: Location to file with api keys one per line.
    :return apikey_permissions: Return list of api keys.
    """

    try:
        # Read the apikey_permissions.txt file into a List
        apikey_permissions_lines = sdk_helpers.read_file(path)

    except Exception as err:
        raise SDKException(u"Failed to parse configs from apikey_permissions.txt file\nThe apikey_permissions.txt file may "
                           u"be corrupt. Visit the App Exchange to contact the developer\nReason: {0}".format(err))

    # Raise an error if nothing found in the file
    if not apikey_permissions_lines:
        raise SDKException(u"No content found in provided apikey_permissions.txt file: {0}".format(path))

    # Get permissions. Ignore comments where 1st non-whitespace character is a '#'.
    apikey_permissions = [p.strip() for p in apikey_permissions_lines if not p.lstrip().startswith("#")]

    # Do basic check on api keys to see if they are in correct format.
    for p in apikey_permissions:
        if not re.match("[_a-zA-Z]*$", p):
            raise SDKException(u"Value '{0}' in file '{1}' is not a valid api key value.".format(p, path))

    # Ensure that the permissions includes at minimum the set of base permissions.
    if not all(p in apikey_permissions for p in BASE_PERMISSIONS):
        raise SDKException(u"'The file '{0}' is missing one of the base api key permissions.".format(path))

    return apikey_permissions
Exemplo n.º 3
0
    def execute_command(self, args):
        LOG.debug("called: CmdCodegen.execute_command()")

        if args.gather_results:
            if not args.package:
                raise SDKException("'-p' must be specified when using '{0}'".format(constants.SUB_CMD_OPT_GATHER_RESULTS))

            SDKException.command_ran = "{0} {1}".format(self.CMD_NAME, constants.SUB_CMD_OPT_GATHER_RESULTS)
            self._get_results_from_log_file(args)

        elif args.reload:
            if not args.package:
                raise SDKException("'-p' must be specified when using '--reload'")

            SDKException.command_ran = "{0} {1}".format(self.CMD_NAME, "--reload")
            self._reload_package(args)

        elif args.package:
            SDKException.command_ran = "{0} {1}".format(self.CMD_NAME, "--package | -p")
            self._gen_package(args)

        elif not args.package and args.function:
            SDKException.command_ran = "{0} {1}".format(self.CMD_NAME, "--function | -f")
            self._gen_function(args)

        else:
            self.parser.print_help()
def get_icon(icon_name, path_to_icon, width_accepted, height_accepted, default_path_to_icon):
    """
    TODO: update docstring with correct standard
    Returns the icon at path_to_icon as a base64 encoded string if it is a valid .png file with the resolution
    width_accepted x height_accepted. If path_to_icon does not exist, default_path_to_icon is returned as a base64
    encoded string
    """

    path_icon_to_use = path_to_icon

    # Use default_path_to_icon if path_to_icon does not exist
    if not path_icon_to_use or not os.path.isfile(path_icon_to_use):
        LOG.warning("WARNING: Default icon will be used\nProvided custom icon path for %s is invalid: %s\nNOTE: %s should be placed in the /icons directory", icon_name, path_icon_to_use, icon_name)
        path_icon_to_use = default_path_to_icon

    # Validate path_icon_to_use and ensure we have READ permissions
    try:
        sdk_helpers.validate_file_paths(os.R_OK, path_icon_to_use)
    except SDKException as err:
        raise OSError("Could not find valid icon file. Looked at two locations:\n{0}\n{1}\n{2}".format(path_to_icon, default_path_to_icon, err.message))

    # Get the extension of the file. os.path.splitext returns a Tuple with the file extension at position 1 and can be an empty string
    split_path = os.path.splitext(path_icon_to_use)
    file_extension = split_path[1]

    if not file_extension:
        raise SDKException("Provided icon file does not have an extension. Icon file must be .png\nIcon File: {0}".format(path_icon_to_use))

    elif file_extension != ".png":
        raise SDKException("{0} is not a supported icon file type. Icon file must be .png\nIcon File: {1}".format(file_extension, path_icon_to_use))

    # Open the icon_file in Bytes mode to validate its resolution
    with open(path_icon_to_use, mode="rb") as icon_file:
        # According to: https://en.wikipedia.org/wiki/Portable_Network_Graphics#File_format
        # First need to seek 16 bytes:
        #   8 bytes: png signature
        #   4 bytes: IDHR Chunk Length
        #   4 bytes: IDHR Chunk type
        icon_file.seek(16)

        try:
            # Bytes 17-20 = image width. Use struct to unpack big-endian encoded unsigned int
            icon_width = struct.unpack(">I", icon_file.read(4))[0]

            # Bytes 21-24 = image height. Use struct to unpack big-endian encoded unsigned int
            icon_height = struct.unpack(">I", icon_file.read(4))[0]
        except Exception as err:
            raise SDKException("Failed to read icon's resolution. Icon file corrupt. Icon file must be .png\nIcon File: {0}".format(path_icon_to_use))

    # Raise exception if resolution is not accepted
    if icon_width != width_accepted or icon_height != height_accepted:
        raise SDKException("Icon resolution is {0}x{1}. Resolution must be {2}x{3}\nIcon File: {4}".format(icon_width, icon_height, width_accepted, height_accepted, path_icon_to_use))

    # If we get here all validations have passed. Open the file in Bytes mode and encode it as base64 and decode to a utf-8 string
    with open(path_icon_to_use, "rb") as icon_file:
        encoded_icon_string = base64.b64encode(icon_file.read()).decode("utf-8")

    return encoded_icon_string
Exemplo n.º 5
0
def get_import_definition_from_customize_py(path_customize_py_file):
    """
       Return the ImportDefinition in a customize.py or /util/data/export.res file as a Dictionary.
       :param path_customize_py_file: Path to the customize.py file
       :return import definition dict from /util/data/export.res if the file exists. Otherwise,
               get it from customize.py
    """

    # If there is a /util/data/export.res then get the import definition from there.
    path_src = os.path.dirname(path_customize_py_file)
    path_local_export_res = os.path.join(path_src, PATH_LOCAL_EXPORT_RES)
    if os.path.isfile(path_local_export_res):
        import_definition = get_import_definition_from_local_export_res(
            path_local_export_res)
        return import_definition

    customize_py = load_customize_py_module(path_customize_py_file)

    # Call customization_data() to get all ImportDefinitions that are "yielded"
    customize_py_import_definitions_generator = customize_py.customization_data(
    )
    customize_py_import_definitions = []

    # customization_data() returns a Generator object with all yield statements, so we loop them
    for definition in customize_py_import_definitions_generator:
        if isinstance(definition, ImportDefinition):
            customize_py_import_definitions.append(
                json.loads(base64.b64decode(definition.value)))
        else:
            LOG.warning(
                "WARNING: Unsupported data found in customize.py file. Expected an ImportDefinition. Got: '%s'",
                definition)

    # If no ImportDefinition found
    if not customize_py_import_definitions:
        raise SDKException(
            "No ImportDefinition found in the customize.py file")

    # If more than 1 found
    elif len(customize_py_import_definitions) > 1:
        raise SDKException(
            "Multiple ImportDefinitions found in the customize.py file. There must only be 1 ImportDefinition defined"
        )

    # Get the import defintion as dict
    customize_py_import_definition = customize_py_import_definitions[0]

    # Remove the incident type that was added by codegen that allows the data to import
    customize_py_import_definition = remove_default_incident_type_from_import_definition(
        customize_py_import_definition)

    return customize_py_import_definition
Exemplo n.º 6
0
def get_configuration_py_file_path(file_type, setup_py_attributes):
    """  Get the location of configuration file config or customize for a package.

    If file_type == "customize" check that entry point 'resilient.circuits.apphost.customize' (SUPPORTED_EP[0])
    is defined in setup.py of the package.
    If file_type == "config" check that entry point 'resilient.circuits.apphost.configsection' (SUPPORTED_EP[1]) is
    defined in setup.py of the package, else check 'resilient.circuits.configsection' (SUPPORTED_EP[2]) was detected in
    setup.py of the package.

    Note: For some packages neither of these files may exist not exist.

    :param file_type: File whose location is required should be 'customize' or 'config'.
    :param setup_py_attributes: Parsed setup.py content.
    :return path_py_file: The customize or config file location for the package.
    """
    path_py_file = None

    if file_type == "customize":
        if SUPPORTED_EP[0] in setup_py_attributes["entry_points"]:
            path_py_file = setup_py_attributes["entry_points"][SUPPORTED_EP[0]]
    elif file_type == "config":
        for ep in SUPPORTED_EP[1:]:
            if ep in setup_py_attributes["entry_points"]:
                path_py_file = setup_py_attributes["entry_points"][ep]
                break
    else:
        raise SDKException("Unknown option '{}'.".format(file_type))

    if path_py_file:
        # If configuration file defined in setup.py but does not exist raise an error.
        try:
            sdk_helpers.validate_file_paths(os.R_OK, path_py_file)
        except SDKException:
            LOG.info(
                "Configuration File '%s' defined as an entry point in 'setup.py' not found at location '%s'.",
                file_type, path_py_file)
            if not sdk_helpers.validate_file_paths(os.R_OK, path_py_file):
                raise SDKException(
                    "Configuration File '{0}' defined as an entry point in 'setup.py' not found at "
                    "location '{1}'.".format(file_type, path_py_file))
    else:
        # For certain packages or threat-feeds these files may not exist.
        # Warn user if file not found.
        LOG.warning(
            "WARNING: Configuration File of type '%s' not defined in 'setup.py'. Ignoring and continuing.",
            file_type)

    return path_py_file
def parse_file_paths_from_readme(readme_line_list):
    """
    Takes a list of strings and looks through to find the links characters in markdown: 
    ![<fall_back_name>](<link_to_screenshot>)
    The method will raise an SDKException if there is a link started without the provided parenthetical
    link in correct syntax.
    If no exception is raised, it returns a list of filepaths for linked images in the readme.

    :param readme_line_list: list of readme lines that will have the comment lines removed
    :type readme_line_list: list[str]
    :raises: SDKException if link given in invalid syntax
    :return: list of paths to linked files
    :rtype: list[str]
    """

    paths = []

    # first, filter out any comments as those might contain invalid screenshot paths
    readme_line_list = re.sub(r"(<!--.*?-->)", "", "".join(readme_line_list), flags=re.DOTALL).splitlines()

    # loop through the lines of the readme
    for line in readme_line_list:

        line = line.strip() # strip any leading and trailing whitespace

        # check if the line starts with the Markdown syntax for a linked file
        if line.startswith("!["):
            if "(" in line and ")" in line:
                # look to find the linked file between ( )
                paths.append(line[line.rfind("(")+1:line.rfind(")")])
            else:
                # incorrect syntax for line starting with "![" but not including "( ... )"
                raise SDKException(u"Line '{0}' in README has invalid link syntax".format(line))

    return paths
def confirm_configuration_import(result, import_id, res_client):
    """
    Makes a REST request to confirm a pending configuration import as accepted.

    Takes 3 params 
    The result of a configuration import request
    The ID of the configuration import request
    A res_client to perform the request

    :param result: Result of the configuration import request
    :type result: dict
    :param import_id: The ID of the configuration import to confirm
    :type import_id: int
    :param res_client: An instantiated res_client for making REST calls
    :type res_client: SimpleClient()
    :raises SDKException: If the confirmation request fails raise an SDKException
    """

    result["status"] = "ACCEPTED"  # Have to confirm changes
    uri = "{}/{}".format(IMPORT_URL, import_id)
    try:
        res_client.put(uri, result)
        LOG.info(
            "Imported configuration changes successfully to the Resilient Appliance"
        )
    except requests.RequestException as import_exception:
        raise SDKException(repr(import_exception))
def get_workflow_functions(workflow, function_uuid=None):
    """Parses the XML of the Workflow Object and returns
    a List of all Functions found. If function_uuid is defined
    returns all occurrences of that function.

    A Workflow Function can have the attributes:
    - uuid: String
    - inputs: Dict
    - post_processing_script: String
    - pre_processing_script: String
    - result_name: String"""

    return_functions = []

    # Workflow XML text
    wf_xml = workflow.get("content", {}).get("xml", None)

    if wf_xml is None:
        raise SDKException(
            "Could not load xml content from Workflow: {0}".format(workflow))

    # Get the root element + endode in utf8 in order to handle Unicode
    root = ET.fromstring(wf_xml.encode("utf8"))

    # Get the prefix for each element's tag
    tag_prefix = root.tag.replace("definitions", "")

    xml_path = "./{0}process/{0}serviceTask/{0}extensionElements/*".format(
        tag_prefix)
    the_function_elements = []

    if function_uuid is not None:
        xml_path = "{0}[@uuid='{1}']".format(xml_path, function_uuid)

        # Get all elements at xml_path that have the uuid of the function
        the_function_elements = root.findall(xml_path)

    else:
        the_extension_elements = root.findall(xml_path)
        for extension_element in the_extension_elements:
            if "function" in extension_element.tag:
                the_function_elements.append(extension_element)

    # Foreach element found, load it as a dictionary and append to return list
    for fn_element in the_function_elements:
        return_function = json.loads(fn_element.text)
        return_function["uuid"] = fn_element.attrib.get("uuid", "")
        return_function["result_name"] = return_function.get(
            "result_name", None)
        return_function["post_processing_script"] = return_function.get(
            "post_processing_script", None)
        return_function["pre_processing_script"] = return_function.get(
            "pre_processing_script", None)

        return_functions.append(return_function)

    return return_functions
Exemplo n.º 10
0
def read_zip_file(path, pattern):
    """Returns unzipped contents of file whose name matches a pattern
    in zip file at path.

    :param path: Path to zip file.
    :param pattern: File pattern to match in the zip file.
    :return: file_content: Return unzipped file content.
    """
    file_content = None
    try:
        with ZipFile((path), 'r') as zobj:
            # Get all file names matching 'pattern'.
            file_matches = [
                f for f in zobj.namelist() if pattern.lower() in f.lower()
            ]
            if len(file_matches):
                if len(file_matches) > 1:
                    raise SDKException(
                        "More than one file matching pattern {0} found in zip file: {1}"
                        .format(pattern, path))
                else:
                    file_name = file_matches.pop()
                    # Extract the file.
                    f = zobj.open(file_name)
                    # Read file and convert content from bytes to string.
                    file_content = f.read().decode('utf8', 'ignore')
            else:
                raise SDKException(
                    "A file matching pattern {0} was not found in zip file: {1}"
                    .format(pattern, path))
    except BadZipfile:
        raise SDKException("Bad zip file {0}.".format(path))

    except SDKException as err:
        raise err

    except Exception as err:
        # An an unexpected error trying to read a zipfile.
        raise SDKException(
            "Got an error '{0}' attempting to read zip file {1}".format(
                err, path))

    return file_content
Exemplo n.º 11
0
def add_tag(tag_name, list_of_objs):
    """
    Returns list_of_objs with tag_name added to each object.
    Replaces any tags that were there originally to address bug INT-3077

    :param tag_name: The name of the tag to add
    :param list_of_objs: A list of all the objects you want to add the tag too
    :raise: SDKException: if list_of_objs is corrupt
    :return: list_of_objs with tag_name added to each object
    :rtype: list of dicts
    """
    # Create tag_to_add
    tag_to_add = {"tag_handle": tag_name, "value": None}

    err_msg = "Error adding tag '{0}'. '{1}' (printed above) is not a {2}. Instead a {3} was provided.\nProvided ImportDefinition in the customize.py file may be corrupt"

    # Check list_of_objs is a List
    if not isinstance(list_of_objs, list):
        LOG.error("Error adding tag.\n'list_of_objs': %s", list_of_objs)
        raise SDKException(
            err_msg.format(tag_name, "list_of_objs", "List",
                           type(list_of_objs)))

    # Loop each object in the List
    for obj in list_of_objs:

        # If its not a dict, error
        if not isinstance(obj, dict):
            LOG.error("Error adding tag.\n'list_of_objs': %s\n'obj': %s",
                      list_of_objs, obj)
            raise SDKException(
                err_msg.format(tag_name, "obj", "Dictionary", type(obj)))

        # Set the obj's 'tags' value to tag_to_add
        obj["tags"] = [tag_to_add]

    # Return the updated list_of_objs
    return list_of_objs
Exemplo n.º 12
0
    def execute_command(self, args):
        LOG.debug("called: CmdDev.execute_command()")

        if args.set_version:
            if not args.package:
                raise SDKException(
                    "'-p' must be specified when using '--set-version'")

            SDKException.command_ran = "{0} {1}".format(
                self.CMD_NAME, "--set-version")
            self._set_version(args)

        else:
            self.parser.print_help()
Exemplo n.º 13
0
def validate_file_paths(permissions, *args):
    """
    Check the given *args paths exist and has the given permissions, else raises an Exception
    """

    # For each *args
    for path_to_file in args:
        # Check the file exists
        if not os.path.isfile(path_to_file):
            raise SDKException("Could not find file: {0}".format(path_to_file))

        if permissions:
            # Check we have the correct permissions
            has_permissions(permissions, path_to_file)
Exemplo n.º 14
0
def validate_dir_paths(permissions, *args):
    """
    Check the given *args paths are Directories and have the given permissions, else raises an Exception
    """

    # For each *args
    for path_to_dir in args:
        # Check the dir exists
        if not os.path.isdir(path_to_dir):
            raise SDKException(
                "Could not find directory: {0}".format(path_to_dir))

        if permissions:
            # Check we have the correct permissions
            has_permissions(permissions, path_to_dir)
def get_required_python_version(python_requires_str):
    """
    Given a value from the 'python_requires' attribute of setup.py, parse out the
    numerical value given for the version required.

    :param python_requires_str: str representation of the value assosciated with the 'python_requires' attr in setup.py
    :raise SDKException: if format of python_requires is not correct (i.e. in '>=<version>' format)
    :return: return the minimum required python version or None if not found
    :rtype: tuple with (<major>, <minor>) version format
    """
    try:
        version_str = re.match(r"(?:>=)([0-9]+[\.0-9]*)", python_requires_str).groups()[0]
        version = pkg_resources.parse_version(version_str)
        
        return sdk_helpers.parse_version_object(version)
    except AttributeError as e:
        raise SDKException("'python_requires' version not given in correct format.")
Exemplo n.º 16
0
def has_permissions(permissions, path):
    """
    Raises an exception if the user does not have the given permissions to path
    """

    LOG.debug("checking if: %s has correct permissions", path)

    if not os.access(path, permissions):

        if permissions is os.R_OK:
            permissions = "READ"
        elif permissions is os.W_OK:
            permissions = "WRITE"

        raise SDKException(
            "User does not have {0} permissions for: {1}".format(
                permissions, path))
Exemplo n.º 17
0
    def execute_command(self, args):
        LOG.debug("called: CmdCodegen.execute_command()")

        if args.reload:
            if not args.package:
                raise SDKException("'-p' must be specified when using '--reload'")

            SDKException.command_ran = "{0} {1}".format(self.CMD_NAME, "--reload")
            self._reload_package(args)

        elif args.package:
            SDKException.command_ran = "{0} {1}".format(self.CMD_NAME, "--package | -p")
            self._gen_package(args)

        elif not args.package and args.function:
            SDKException.command_ran = "{0} {1}".format(self.CMD_NAME, "--function | -f")
            self._gen_function(args)

        else:
            self.parser.print_help()
Exemplo n.º 18
0
    def perform_duplication_check(obj_type, obj_identifier, obj_type_name,
                                  new_object_api_name, export):
        """Attempt to get the referenced object from the org_export
        If the object is not found, return True.
        If the object is found, raise an SDK Exception specifying the provided object name is not unique
        and already exists on the system. 

        :param obj_type: The type name in the org export to search 
        :type obj_type: str
        :param obj_identifier: The identifier for the given object
        :type obj_identifier: str
        :param obj_type_name: [description]
        :type obj_type_name: str
        :param new_object_api_name: [description]
        :type new_object_api_name: str
        :param export: The org export to search through
        :type export: dict
        :raises SDKException: If the provided object name is found then this function raises a SDK exception specifying this must be unique. 
        """
        # Perform a duplication check with the provided new name
        try:
            # Try to get a res obj with the new name
            get_res_obj(obj_type,
                        obj_identifier,
                        obj_type_name, [new_object_api_name],
                        export,
                        include_api_name=False)
        except SDKException:
            # get_res_obj raises an exception if the object is not found
            # normally this is good but for this unique use case
            # we expect that the object will not be found and so catch and release the raised SDKException
            return True
        else:
            # if get_res_obj does not raise an exception it means an object with that identifier exists
            # and in this case we raise an SDKException as the new name provided for cloning needs to be unique
            raise SDKException(
                "The new name for a cloned object needs to be unique and a {} with the api name '{}' already exists"
                .format(obj_type_name, new_object_api_name))
def test_fail_package_files_validate_customize_py(
        fx_copy_fn_main_mock_integration):

    filename = "customize.py"
    attr_dict = sdk_validate_configs.package_files.get(filename)
    path_file = os.path.join(
        fx_copy_fn_main_mock_integration[1],
        attr_dict.get("path").format(fx_copy_fn_main_mock_integration[0]))

    # mock import definition parsing - mock raising an exception
    with patch(
            "resilient_sdk.util.sdk_validate_helpers.package_helpers.get_import_definition_from_customize_py"
    ) as mock_import_def:

        mock_import_def.side_effect = SDKException("failed")

        result = sdk_validate_helpers.package_files_validate_customize_py(
            path_file, attr_dict)

        assert len(result) == 1
        result = result[0]
        assert isinstance(result, SDKValidateIssue)
        assert result.severity == SDKValidateIssue.SEVERITY_LEVEL_CRITICAL
def test_package_files_validate_improper_icon(
        fx_copy_fn_main_mock_integration):

    filename = "app_logo.png"
    attr_dict = sdk_validate_configs.package_files.get(filename)
    path_file = os.path.join(fx_copy_fn_main_mock_integration[1],
                             attr_dict.get("path"))

    # mock import definition parsing - mock raising an exception
    with patch(
            "resilient_sdk.util.sdk_validate_helpers.package_helpers.get_icon"
    ) as mock_icon:

        mock_icon.side_effect = SDKException("Failed for some reason")

        result = sdk_validate_helpers.package_files_validate_icon(
            path_file, attr_dict, filename)

    assert len(result) == 1
    result = result[0]
    assert isinstance(result, SDKValidateIssue)
    assert result.severity == SDKValidateIssue.SEVERITY_LEVEL_CRITICAL
    assert "ERROR: Failed for some reason" == result.description
Exemplo n.º 21
0
def read_local_exportfile(path_local_exportfile):
    """
    Read export from given path
    Return res export as dict
    """
    # Get absolute path
    path_local_exportfile = os.path.abspath(path_local_exportfile)

    # Validate we can read it
    validate_file_paths(os.R_OK, path_local_exportfile)

    # Read the export file content.
    if is_zipfile(path_local_exportfile):
        # File is a zip file get unzipped content.
        export_content = read_zip_file(path_local_exportfile,
                                       RES_EXPORT_SUFFIX)
    else:
        # File is a assumed to be a text file read the export file content.
        export_content = ''.join(read_file(path_local_exportfile))

    if not export_content:
        raise SDKException("Failed to read {0}".format(path_local_exportfile))

    return json.loads(export_content)
Exemplo n.º 22
0
    def _reload_package(args):

        old_params, path_customize_py_bak = [], ""

        # Get + validate package, customize.py and setup.py paths
        path_package = os.path.abspath(args.package)
        # Get basename of path_to_src (version information is stripped from the basename).
        path_package_basename = re.split(VERSION_REGEX, os.path.basename(path_package), 1)[0]
        sdk_helpers.validate_dir_paths(os.R_OK, path_package)

        path_customize_py = os.path.join(path_package, path_package_basename, package_helpers.PATH_CUSTOMIZE_PY)
        sdk_helpers.validate_file_paths(os.W_OK, path_customize_py)

        path_setup_py_file = os.path.join(path_package, package_helpers.PATH_SETUP_PY)
        sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file)

        # Set package + output args correctly (this handles if user runs 'codegen --reload -p .')
        args.package = os.path.basename(path_package)
        args.output = os.path.dirname(path_package)

        LOG.info("'codegen --reload' started for '%s'", args.package)

        # Load the customize.py module
        customize_py_module = package_helpers.load_customize_py_module(path_customize_py, warn=False)

        try:
            # Get the 'old_params' from customize.py
            old_params = customize_py_module.codegen_reload_data()
        except AttributeError:
            raise SDKException(u"Corrupt customize.py. No reload method found in {0}".format(path_customize_py))

        if not old_params:
            raise SDKException(u"No reload params found in {0}".format(path_customize_py))

        # Rename the old customize.py with .bak
        path_customize_py_bak = sdk_helpers.rename_to_bak_file(path_customize_py)

        # If local export file exists then save it to a .bak file.
        # (Older packages may not have the /util/data/export.res file)
        path_export_res = os.path.join(path_package, path_package_basename,
                                       package_helpers.PATH_UTIL_DATA_DIR,
                                       package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
        if os.path.isfile(path_export_res):
            path_export_res_bak = sdk_helpers.rename_to_bak_file(path_export_res)
        else:
            path_export_res_bak = None

        try:
            # Map command line arg name to dict key returned by codegen_reload_data() in customize.py
            mapping_tuples = [
                ("messagedestination", "message_destinations"),
                ("function", "functions"),
                ("workflow", "workflows"),
                ("rule", "actions"),
                ("field", "incident_fields"),
                ("artifacttype", "incident_artifact_types"),
                ("datatable", "datatables"),
                ("task", "automatic_tasks"),
                ("script", "scripts")
            ]

            # Merge old_params with new params specified on command line
            args = CmdCodegen.merge_codegen_params(old_params, args, mapping_tuples)

            # Parse the setup.py file
            setup_py_attributes = package_helpers.parse_setup_py(path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

            LOG.debug("Regenerating codegen '%s' package now", args.package)

            # Regenerate the package
            CmdCodegen._gen_package(args, setup_py_attributes=setup_py_attributes)

            LOG.info("\nNOTE: Ensure the MANIFEST.in file includes line:\nrecursive-include %s/util *\n", args.package)
            LOG.info("'codegen --reload' complete for '%s'", args.package)

        except Exception as err:
            LOG.error(u"Error running resilient-sdk codegen --reload\n\nERROR:%s", err)

        # This is required in finally block as user may kill using keyboard interrupt
        finally:
            # If an error occurred, customize.py does not exist, rename the backup file to original
            if not os.path.isfile(path_customize_py):
                LOG.info(u"An error occurred. Renaming customize.py.bak to customize.py")
                sdk_helpers.rename_file(path_customize_py_bak, package_helpers.BASE_NAME_CUSTOMIZE_PY)
            if not os.path.isfile(path_export_res) and path_export_res_bak:
                LOG.info(u"An error occurred. Renaming export.res.bak to export.res")
                sdk_helpers.rename_file(path_export_res_bak, package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
Exemplo n.º 23
0
    def _gen_package(args, setup_py_attributes={}):

        LOG.info("Generating codegen package...")

        if not sdk_helpers.is_valid_package_name(args.package):
            raise SDKException(u"'{0}' is not a valid package name".format(args.package))

        # Strip off version information, if present in package base folder, to get the package name.
        package_name = re.split(VERSION_REGEX, args.package, 1)[0]
        # Get base version if we are running against a package base folder with version.
        base_version = ''.join(re.split(package_name, args.package))

        # Get output_base, use args.output if defined, else current directory
        output_base = args.output if args.output else os.curdir
        output_base = os.path.abspath(output_base)

        # If --exportfile is specified, read org_export from that file
        if args.exportfile:
            LOG.info("Using local export file: %s", args.exportfile)
            org_export = sdk_helpers.read_local_exportfile(args.exportfile)

        else:
            # Instantiate connection to the Resilient Appliance
            res_client = sdk_helpers.get_resilient_client()

            # Generate + get latest export from Resilient Server
            org_export = sdk_helpers.get_latest_org_export(res_client)

        # Get data required for Jinja2 templates from export
        jinja_data = sdk_helpers.get_from_export(org_export,
                                                 message_destinations=args.messagedestination,
                                                 functions=args.function,
                                                 workflows=args.workflow,
                                                 rules=args.rule,
                                                 fields=args.field,
                                                 artifact_types=args.artifacttype,
                                                 datatables=args.datatable,
                                                 tasks=args.task,
                                                 scripts=args.script)

        # Get 'minified' version of the export. This is used in customize.py
        jinja_data["export_data"] = sdk_helpers.minify_export(org_export,
                                                              message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")),
                                                              functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")),
                                                              workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")),
                                                              rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")),
                                                              fields=jinja_data.get("all_fields"),
                                                              artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")),
                                                              datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")),
                                                              tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")),
                                                              phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")),
                                                              scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts")))

        # Add package_name to jinja_data
        jinja_data["package_name"] = package_name

        # Add version
        jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION)

        # Validate we have write permissions
        sdk_helpers.validate_dir_paths(os.W_OK, output_base)

        # Join package_name to output base (add base version if running against a folder which includes a version).
        output_base = os.path.join(output_base, package_name+base_version)

        # If the output_base directory does not exist, create it
        if not os.path.exists(output_base):
            os.makedirs(output_base)

        # Instansiate Jinja2 Environment with path to Jinja2 templates
        jinja_env = sdk_helpers.setup_jinja_env("data/codegen/templates/package_template")

        # This dict maps our package file structure to  Jinja2 templates
        package_mapping_dict = {
            "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data),
            "README.md": ("README.md.jinja2", jinja_data),
            "setup.py": ("setup.py.jinja2", jinja_data),
            "tox.ini": ("tox.ini.jinja2", jinja_data),
            "Dockerfile": ("Dockerfile.jinja2", jinja_data),
            "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data),
            "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data),
            "data": {},
            "icons": {
                "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO,
                "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO,
            },
            "doc": {
                "screenshots": {
                    "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT
                }
            },
            package_name: {
                "__init__.py": ("package/__init__.py.jinja2", jinja_data),
                "LICENSE": ("package/LICENSE.jinja2", jinja_data),

                "components": {
                    "__init__.py": ("package/components/__init__.py.jinja2", jinja_data),
                },
                "util": {
                    "data": {
                        "export.res": ("package/util/data/export.res.jinja2", jinja_data)
                    },
                    "__init__.py": ("package/util/__init__.py.jinja2", jinja_data),
                    "config.py": ("package/util/config.py.jinja2", jinja_data),
                    "customize.py": ("package/util/customize.py.jinja2", jinja_data),
                    "selftest.py": ("package/util/selftest.py.jinja2", jinja_data),
                }
            }
        }

        # If there are Functions, add a 'tests' directory
        if jinja_data.get("functions"):
            package_mapping_dict["tests"] = {}

        # Loop each Function
        for f in jinja_data.get("functions"):
            # Add package_name to function data
            f["package_name"] = package_name

            # Generate function_component.py file name
            file_name = u"funct_{0}.py".format(f.get("export_key"))

            # Add to 'components' directory
            package_mapping_dict[package_name]["components"][file_name] = ("package/components/function.py.jinja2", f)

            # Add to 'tests' directory
            package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f)

        for w in jinja_data.get("workflows"):

            # Generate wf_xx.md file name
            file_name = u"wf_{0}.md".format(w.get(ResilientObjMap.WORKFLOWS))

            # Add workflow to data directory
            package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w)

        newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping(
            jinja_mapping_dict=package_mapping_dict,
            jinja_env=jinja_env,
            target_dir=output_base,
            package_dir=output_base)

        # Log new and skipped files
        if newly_generated_files:
            LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files))

        if skipped_files:
            LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files))

        LOG.info("'codegen' complete for '%s'", package_name)
Exemplo n.º 24
0
    def _set_version(args):

        new_version = args.set_version

        if not sdk_helpers.is_valid_version_syntax(new_version):
            raise SDKException(
                "{0} is not a valid version".format(new_version))

        new_version_int = list(map(int, (re.findall(r"\d+", new_version))))

        # Get absolute path_to_src
        path_to_src = os.path.abspath(args.package)

        # Get path to setup.py file
        path_setup_py_file = os.path.join(path_to_src,
                                          package_helpers.BASE_NAME_SETUP_PY)

        # Parse the setup.py file
        setup_py_attributes = package_helpers.parse_setup_py(
            path_setup_py_file,
            package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

        package_name = setup_py_attributes.get("name", "")

        LOG.info("Setting Resilient Platform version for %s to %s",
                 package_name, new_version)

        # Get the customize file location.
        path_customize_py = package_helpers.get_configuration_py_file_path(
            "customize", setup_py_attributes)

        # Get customize.py ImportDefinition
        customize_py_import_definition = package_helpers.get_import_definition_from_customize_py(
            path_customize_py)

        old_version = customize_py_import_definition["server_version"][
            "version"]

        LOG.info("Old Version: %s", old_version)
        LOG.info("New Version: %s", new_version)

        # Set the new version
        customize_py_import_definition["server_version"][
            "version"] = new_version
        customize_py_import_definition["server_version"][
            "major"] = new_version_int[0]
        customize_py_import_definition["server_version"][
            "minor"] = new_version_int[1]
        customize_py_import_definition["server_version"][
            "build_number"] = new_version_int[2]

        LOG.info("Loading old customize.py file")

        # Load the customize.py module
        customize_py_module = package_helpers.load_customize_py_module(
            path_customize_py, warn=False)

        # Get the 'old_params' from customize.py
        old_params = customize_py_module.codegen_reload_data()

        # Rename the old customize.py with .bak
        path_customize_py_bak = sdk_helpers.rename_to_bak_file(
            path_customize_py)

        # If local export file exists then save it to a .bak file.
        # (Older packages may not have the /util/data/export.res file)
        # Figure out the path of the files first
        dir_customize_py = os.path.dirname(path_customize_py)
        path_local_export_res = os.path.join(
            dir_customize_py, package_helpers.PATH_LOCAL_EXPORT_RES)

        path_local_export_res_bak = None
        if os.path.isfile(path_local_export_res):
            path_local_export_res_bak = sdk_helpers.rename_to_bak_file(
                path_local_export_res)

        try:

            jinja_data = sdk_helpers.get_from_export(
                customize_py_import_definition,
                message_destinations=old_params.get("message_destinations"),
                functions=old_params.get("functions"),
                workflows=old_params.get("workflows"),
                rules=old_params.get("actions"),
                fields=old_params.get("incident_fields"),
                artifact_types=old_params.get("incident_artifact_types"),
                datatables=old_params.get("datatables"),
                tasks=old_params.get("automatic_tasks"),
                scripts=old_params.get("scripts"))

            jinja_data["export_data"] = sdk_helpers.minify_export(
                customize_py_import_definition,
                message_destinations=sdk_helpers.get_object_api_names(
                    ResilientObjMap.MESSAGE_DESTINATIONS,
                    jinja_data.get("message_destinations")),
                functions=sdk_helpers.get_object_api_names(
                    ResilientObjMap.FUNCTIONS, jinja_data.get("functions")),
                workflows=sdk_helpers.get_object_api_names(
                    ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")),
                rules=sdk_helpers.get_object_api_names(
                    ResilientObjMap.RULES, jinja_data.get("rules")),
                fields=jinja_data.get("all_fields"),
                artifact_types=sdk_helpers.get_object_api_names(
                    ResilientObjMap.INCIDENT_ARTIFACT_TYPES,
                    jinja_data.get("artifact_types")),
                datatables=sdk_helpers.get_object_api_names(
                    ResilientObjMap.DATATABLES, jinja_data.get("datatables")),
                tasks=sdk_helpers.get_object_api_names(
                    ResilientObjMap.TASKS, jinja_data.get("tasks")),
                phases=sdk_helpers.get_object_api_names(
                    ResilientObjMap.PHASES, jinja_data.get("phases")),
                scripts=sdk_helpers.get_object_api_names(
                    ResilientObjMap.SCRIPTS, jinja_data.get("scripts")))

            # Add package_name to jinja_data
            jinja_data["package_name"] = package_name

            # Add version
            jinja_data["version"] = setup_py_attributes.get(
                "version", package_helpers.MIN_SETUP_PY_VERSION)

            # Instansiate Jinja2 Environment with path to Jinja2 templates for customize.py
            jinja_env = sdk_helpers.setup_jinja_env(
                "data/codegen/templates/package_template/package/util")
            jinja_template = jinja_env.get_template("customize.py.jinja2")

            LOG.info("Writing new customize.py file")

            # Render & write jinja2 template
            jinja_rendered_text = jinja_template.render(jinja_data)
            sdk_helpers.write_file(path_customize_py, jinja_rendered_text)

            # Instansiate Jinja2 Environment with path to Jinja2 templates for /util/dat/export.res
            #jinja_env = sdk_helpers.setup_jinja_env("data/codegen/templates/package_template/package/util/data")
            jinja_template = jinja_env.get_template("/data/export.res.jinja2")

            LOG.debug("Writing new /util/data/export.res file")

            # Render jinja2 template
            jinja_rendered_text = jinja_template.render(jinja_data)

            # Make sure the /util/data directory is there if it is not
            dir_local_export_res = os.path.dirname(path_local_export_res)
            if not os.path.exists(dir_local_export_res):
                os.makedirs(dir_local_export_res)

            # Write the file
            sdk_helpers.write_file(path_local_export_res, jinja_rendered_text)

            LOG.info("'dev --set-version' complete for '%s'", package_name)

        except Exception as err:
            LOG.error(
                u"Error running resilient-sdk dev --set-version\n\nERROR:%s",
                err)

        # This is required in finally block as user may kill using keyboard interrupt
        finally:
            # If an error occurred, customize.py does not exist, rename the backup file to original
            if not os.path.isfile(path_customize_py):
                LOG.info(
                    u"An error occurred. Renaming customize.py.bak to customize.py"
                )
                sdk_helpers.rename_file(path_customize_py_bak,
                                        package_helpers.BASE_NAME_CUSTOMIZE_PY)
            if path_local_export_res_bak and not os.path.isfile(
                    path_local_export_res):
                LOG.info(
                    u"An error occurred. Renaming /util/data/export.res.bak to export.res"
                )
                sdk_helpers.rename_file(
                    path_local_export_res_bak,
                    package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
    def execute_command(self, args):
        LOG.debug("called: ConnectorCodegenCmd.execute_command()")
        LOG.info(args)
        if args.package:
            SDKException.command_ran = "{0} {1}".format(
                self.CMD_NAME, "--package | -p")
            LOG.info("%s %s" %
                     ("Generating a package of type", DEFAULT_CONNECTOR))

            ###
            # Perform checks
            ###
            if os.path.exists(args.package):
                raise SDKException(
                    u"'{0}' already exists. Add --reload flag to regenerate it"
                    .format(args.package))

            if not sdk_helpers.is_valid_package_name(args.package):
                raise SDKException(u"'{0}' is not a valid package name".format(
                    args.package))

            ###
            # Gather data needed for generation
            ###
            # The package_name will be specified in the args
            package_name = args.package

            # Get output_base, use args.output if defined, else current directory
            output_base = args.output if args.output else os.curdir
            output_base = os.path.join(os.path.abspath(output_base),
                                       package_name)

            jinja_data = {"package_name": package_name}

            # If the output_base directory does not exist, create it
            if not os.path.exists(output_base):
                os.makedirs(output_base)

            from jinja2 import Environment, PackageLoader
            jinja_env = Environment(
                # Loads Jinja Templates in cp4s_connector_sdk/<<relative_path_to_templates>>
                loader=PackageLoader(
                    "cp4s_connector_sdk",
                    get_jinja_env_location(connecter_type=args.connectortype)),
                trim_blocks=True,  # First newline after a block is removed
                # Leading spaces and tabs are stripped from the start of a line to a block
                lstrip_blocks=True,
                keep_trailing_newline=
                True  # Preserve the trailing newline when rendering templates
            )
            # Add custom filters to our jinja_env
            sdk_helpers.add_filters_to_jinja_env(jinja_env)

            # Assign one of the mapping dict function to connector_mapping_collector so that below we call one thing
            # but here we define which thing will be called
            # TODO: Review
            connector_mapping_collector = get_car_connector_mapping_dict
            # Prepare a mapping dict based on type of connector
            # This dict maps our package file structure to  Jinja2 templates
            # TODO: Refactor as there can be a UDI or CAR package_mapping_dict
            package_mapping_dict = connector_mapping_collector(jinja_data)

            ###
            # Generate a connector package using jinja
            ###
            newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping(
                jinja_mapping_dict=package_mapping_dict,
                jinja_env=jinja_env,
                target_dir=output_base,
                package_dir=output_base)

            ###
            # Report the files that were and were not built
            ###
            if newly_generated_files:
                LOG.debug("Newly generated files:\n\t> %s",
                          "\n\t> ".join(newly_generated_files))

            if skipped_files:
                LOG.debug("Files Skipped:\n\t> %s",
                          "\n\t> ".join(skipped_files))

            LOG.info("%s %s" % ("Codegen run finished for ", package_name))
        else:
            self.parser.print_help()
Exemplo n.º 26
0
    def _get_results_from_log_file(cls, args):
        """
        - Gets all function names from the payload_samples directory
        - Traverses the file at the path specified by args.gather_results (in a reversed order)
        - Looks for lines containing ``[<fn_name>] Result: {'version': 2.0, 'success': True...``
        - Parses it and generates an output_json_example.json and output_json_schema.json file for each ``Result`` found
        - Uses the libary ``genson`` to generate the JSON schema from a Python Dictionary

        :param args: (required) the cmd line arguments
        :type args: argparse.ArgumentParser
        :raises: an SDKException if args.package is not a valid path
        """

        # Check if Python >= MIN_SUPPORTED_PY_VERSION
        if not sdk_helpers.is_python_min_supported_version(constants.ERROR_WRONG_PYTHON_VERSION):
            raise SDKException(constants.ERROR_WRONG_PYTHON_VERSION)

        path_package = os.path.abspath(args.package)
        path_log_file = args.gather_results
        path_payload_samples_dir = os.path.join(path_package, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR)

        LOG.debug("\nPath to project: %s", path_package)

        sdk_helpers.validate_dir_paths(os.W_OK, path_package)

        package_name = package_helpers.get_package_name(path_package)

        LOG.info("'codegen %s' started for '%s'", constants.SUB_CMD_OPT_GATHER_RESULTS, package_name)
        try:

            sdk_helpers.validate_dir_paths(os.W_OK, path_payload_samples_dir)

        except SDKException as e:

            if constants.ERROR_NOT_FIND_DIR in e.message:
                LOG.warning("WARNING: no '%s' found. Running 'codegen --reload' to create the default missing files\n%s", package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR, constants.LOG_DIVIDER)
                args.reload = True
                cls._reload_package(args)
                LOG.warning(constants.LOG_DIVIDER)

            else:
                raise e

        functions_that_need_payload_samples = args.function if args.function else os.listdir(path_payload_samples_dir)

        results_scraped = sdk_helpers.scrape_results_from_log_file(path_log_file)

        for fn_name in functions_that_need_payload_samples:

            fn_results = results_scraped.get(fn_name)

            if not fn_results:
                package_helpers.color_output("WARNING: No results could be found for '{0}' in '{1}'".format(fn_name, path_log_file), constants.VALIDATE_LOG_LEVEL_WARNING, do_print=True)
                continue

            LOG.info("Results found for '[%s]'", fn_name)

            path_payload_samples_fn_name = os.path.join(path_payload_samples_dir, fn_name)
            path_output_json_example = os.path.join(path_payload_samples_fn_name, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)
            path_output_json_schema = os.path.join(path_payload_samples_fn_name, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA)

            path_output_json_example_bak = sdk_helpers.rename_to_bak_file(path_output_json_example)
            path_output_json_schema_bak = sdk_helpers.rename_to_bak_file(path_output_json_schema)

            try:
                LOG.debug("Writing JSON example file for '%s' to '%s'", fn_name, path_output_json_example)
                sdk_helpers.write_file(path_output_json_example, json.dumps(fn_results, indent=2))

                LOG.debug("Writing JSON schema file for '%s' to '%s'", fn_name, path_output_json_schema)
                builder = CustomSchemaBuilder(schema_uri=constants.CODEGEN_JSON_SCHEMA_URI)
                main_genson_builder_overwrites(builder)
                builder.add_object(fn_results)
                sdk_helpers.write_file(path_output_json_schema, builder.to_json(indent=2))

            finally:
                if not os.path.isfile(path_output_json_example) and path_output_json_example_bak:
                    LOG.info(u"An error occurred. Renaming %s.bak to %s", package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)
                    sdk_helpers.rename_file(path_output_json_example_bak, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)

                if not os.path.isfile(path_output_json_schema) and path_output_json_schema_bak:
                    LOG.info(u"An error occurred. Renaming %s.bak to %s", package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA)
                    sdk_helpers.rename_file(path_output_json_schema_bak, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA)

        LOG.info("'codegen %s' complete for '%s'", constants.SUB_CMD_OPT_GATHER_RESULTS, package_name)
Exemplo n.º 27
0
    def _reload_package(args):

        old_params, path_customize_py_bak = [], ""

        # Get absolute path to package
        path_package = os.path.abspath(args.package)

        LOG.debug("\nPath to project: %s", path_package)

        # Ensure the package directory exists and we have WRITE access
        sdk_helpers.validate_dir_paths(os.W_OK, path_package)

        path_setup_py_file = os.path.join(path_package, package_helpers.BASE_NAME_SETUP_PY)

        package_name = package_helpers.get_package_name(path_package)

        if not sdk_helpers.is_valid_package_name(package_name):
            raise SDKException(u"'{0}' is not a valid package name. 'name' attribute in setup.py file is not valid or not specified".format(package_name))

        LOG.debug("\nProject name: %s", package_name)

        # Generate path to customize.py file + validate we have permissions to read it
        path_customize_py = os.path.join(path_package, package_name, package_helpers.PATH_CUSTOMIZE_PY)
        sdk_helpers.validate_file_paths(os.W_OK, path_customize_py)

        # Set package + output args correctly (this handles if user runs 'codegen --reload -p .')
        args.package = package_name
        args.output = path_package

        LOG.info("'codegen --reload' started for '%s'", args.package)

        # Load the customize.py module
        customize_py_module = package_helpers.load_customize_py_module(path_customize_py, warn=False)

        try:
            # Get the 'old_params' from customize.py
            old_params = customize_py_module.codegen_reload_data()
        except AttributeError:
            raise SDKException(u"Corrupt customize.py. No reload method found in {0}".format(path_customize_py))

        if not old_params:
            raise SDKException(u"No reload params found in {0}".format(path_customize_py))

        # Rename the old customize.py with .bak
        path_customize_py_bak = sdk_helpers.rename_to_bak_file(path_customize_py)

        # If local export file exists then save it to a .bak file.
        # (Older packages may not have the /util/data/export.res file)
        path_export_res = os.path.join(path_package, package_name,
                                       package_helpers.PATH_UTIL_DATA_DIR,
                                       package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
        if os.path.isfile(path_export_res):
            path_export_res_bak = sdk_helpers.rename_to_bak_file(path_export_res)
        else:
            path_export_res_bak = None

        try:
            # Map command line arg name to dict key returned by codegen_reload_data() in customize.py
            mapping_tuples = [
                ("messagedestination", "message_destinations"),
                ("function", "functions"),
                ("workflow", "workflows"),
                ("rule", "actions"),
                ("field", "incident_fields"),
                ("artifacttype", "incident_artifact_types"),
                ("incidenttype", "incident_types"),
                ("datatable", "datatables"),
                ("task", "automatic_tasks"),
                ("script", "scripts"),
                ("playbook", "playbooks")
            ]

            # Merge old_params with new params specified on command line
            args = CmdCodegen.merge_codegen_params(old_params, args, mapping_tuples)

            # Parse the setup.py file
            setup_py_attributes = package_helpers.parse_setup_py(path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

            LOG.debug("Regenerating codegen '%s' package now", args.package)

            # Regenerate the package
            path_reloaded = CmdCodegen._gen_package(args, setup_py_attributes=setup_py_attributes)

            LOG.info("\nNOTE: Ensure the MANIFEST.in file includes line:\nrecursive-include %s/util *\n", args.package)
            LOG.info("'codegen --reload' complete for '%s'", args.package)

            return path_reloaded

        # This is required in finally block as user may kill using keyboard interrupt
        finally:
            # If an error occurred, customize.py does not exist, rename the backup file to original
            if not os.path.isfile(path_customize_py):
                LOG.info(u"An error occurred. Renaming customize.py.bak to customize.py")
                sdk_helpers.rename_file(path_customize_py_bak, package_helpers.BASE_NAME_CUSTOMIZE_PY)
            if not os.path.isfile(path_export_res) and path_export_res_bak:
                LOG.info(u"An error occurred. Renaming export.res.bak to export.res")
                sdk_helpers.rename_file(path_export_res_bak, package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
Exemplo n.º 28
0
    def _gen_package(args, setup_py_attributes={}):

        LOG.info("Generating codegen package...")

        sdk_helpers.is_python_min_supported_version()

        if os.path.exists(args.package) and not args.reload:
            raise SDKException(u"'{0}' already exists. Add --reload flag to regenerate it".format(args.package))

        if not sdk_helpers.is_valid_package_name(args.package):
            raise SDKException(u"'{0}' is not a valid package name".format(args.package))

        # The package_name will be specified in the args
        package_name = args.package

        # Get output_base, use args.output if defined, else current directory
        output_base = args.output if args.output else os.curdir
        output_base = os.path.abspath(output_base)

        # If --exportfile is specified, read org_export from that file
        if args.exportfile:
            LOG.info("Using local export file: %s", args.exportfile)
            org_export = sdk_helpers.read_local_exportfile(args.exportfile)

        else:
            # Instantiate connection to the Resilient Appliance
            res_client = sdk_helpers.get_resilient_client(path_config_file=args.config)

            # Generate + get latest export from Resilient Server
            org_export = sdk_helpers.get_latest_org_export(res_client)

        # Get data required for Jinja2 templates from export
        jinja_data = sdk_helpers.get_from_export(org_export,
                                                 message_destinations=args.messagedestination,
                                                 functions=args.function,
                                                 workflows=args.workflow,
                                                 rules=args.rule,
                                                 fields=args.field,
                                                 artifact_types=args.artifacttype,
                                                 datatables=args.datatable,
                                                 tasks=args.task,
                                                 scripts=args.script,
                                                 incident_types=args.incidenttype,
                                                 playbooks=args.playbook)

        # Get 'minified' version of the export. This is used in customize.py
        jinja_data["export_data"] = sdk_helpers.minify_export(org_export,
                                                              message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")),
                                                              functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")),
                                                              workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")),
                                                              rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")),
                                                              fields=jinja_data.get("all_fields"),
                                                              artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")),
                                                              datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")),
                                                              tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")),
                                                              phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")),
                                                              scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts")),
                                                              incident_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_TYPES, jinja_data.get("incident_types")),
                                                              playbooks=sdk_helpers.get_object_api_names(ResilientObjMap.PLAYBOOKS, jinja_data.get("playbooks")))

        # Add package_name to jinja_data
        jinja_data["package_name"] = package_name

        # Add version
        jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION)

        jinja_data["resilient_libraries_version"] = sdk_helpers.get_resilient_libraries_version_to_use()

        # Validate we have write permissions
        sdk_helpers.validate_dir_paths(os.W_OK, output_base)

        if not args.reload:
            # If this is not a reload, join package_name to output base
            output_base = os.path.join(output_base, package_name)

        # If the output_base directory does not exist, create it
        if not os.path.exists(output_base):
            os.makedirs(output_base)

        # Instansiate Jinja2 Environment with path to Jinja2 templates
        jinja_env = sdk_helpers.setup_jinja_env(constants.PACKAGE_TEMPLATE_PATH)

        # This dict maps our package file structure to  Jinja2 templates
        package_mapping_dict = {
            "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data),
            "README.md": ("README.md.jinja2", jinja_data),
            "setup.py": ("setup.py.jinja2", jinja_data),
            "tox.ini": ("tox.ini.jinja2", jinja_data),
            "Dockerfile": ("Dockerfile.jinja2", jinja_data),
            "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data),
            "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data),
            "data": {},
            "icons": {
                "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO,
                "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO,
            },
            "doc": {
                "screenshots": {
                    "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT
                }
            },
            package_name: {
                "__init__.py": ("package/__init__.py.jinja2", jinja_data),
                "LICENSE": ("package/LICENSE.jinja2", jinja_data),

                "components": {
                    "__init__.py": ("package/components/__init__.py.jinja2", jinja_data),
                },
                "util": {
                    "data": {
                        "export.res": ("package/util/data/export.res.jinja2", jinja_data)
                    },
                    "__init__.py": ("package/util/__init__.py.jinja2", jinja_data),
                    "config.py": ("package/util/config.py.jinja2", jinja_data),
                    "customize.py": ("package/util/customize.py.jinja2", jinja_data),
                    "selftest.py": ("package/util/selftest.py.jinja2", jinja_data),
                }
            }
        }

        # If there are Functions, add a 'tests' and a 'payload_samples' directory (if in dev mode)
        if jinja_data.get("functions"):
            package_mapping_dict["tests"] = {}
            package_mapping_dict[package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR] = {}

        # Get a list of function names in export.
        fn_names = [f.get(ResilientObjMap.FUNCTIONS) for f in jinja_data.get("functions")]

        # Loop each Function
        for f in jinja_data.get("functions"):
            # Add package_name to function data
            f["package_name"] = package_name

            # Get function name
            fn_name = f.get(ResilientObjMap.FUNCTIONS)

            # Generate funct_function_component.py file name
            # Don't add prefix if function name already begins with "func_" or "funct_".
            if re.search(r"^(func|funct)_", fn_name):
                file_name = u"{0}.py".format(fn_name)
            else:
                file_name = u"funct_{0}.py".format(fn_name)
                # Check if file_name without extension already exists in functions names list.
                if os.path.splitext(file_name)[0] in fn_names:
                    raise SDKException(u"File name '{0}' already in use please rename the function '{1}'."
                                       .format(file_name, fn_name))

            # Add an 'atomic function' to 'components' directory else add a 'normal function'
            package_mapping_dict[package_name]["components"][file_name] = ("package/components/atomic_function.py.jinja2", f)

            # Add to 'tests' directory
            package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f)

            # Add a 'payload_samples/fn_name' directory and the files to it
            CmdCodegen.add_payload_samples(package_mapping_dict, fn_name, f)

        # Get a list of workflow names in export.
        wf_names = [w.get(ResilientObjMap.WORKFLOWS) for w in jinja_data.get("workflows")]

        for w in jinja_data.get("workflows"):
            # Get workflow name
            wf_name = w.get(ResilientObjMap.WORKFLOWS)

            # Generate wf_xx.md file name
            # Don't add prefix if workflow name already begins with "wf_".
            if re.search(r"^wf_", wf_name):
                file_name = u"{0}.md".format(wf_name)
            else:
                file_name = u"wf_{0}.md".format(wf_name)
                # Check if file_name without extension already exists in workflow names list.
                if os.path.splitext(file_name)[0] in wf_names:
                    raise SDKException(u"File name '{0}' already in use please recreate the workflow '{1}'."
                                       .format(file_name, wf_name))

            # Add workflow to data directory
            package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w)

        newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping(
            jinja_mapping_dict=package_mapping_dict,
            jinja_env=jinja_env,
            target_dir=output_base,
            package_dir=output_base)

        # Log new and skipped files
        if newly_generated_files:
            LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files))

        if skipped_files:
            LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files))

        LOG.info("'codegen' complete for '%s'", package_name)

        return output_base
Exemplo n.º 29
0
def create_extension(path_setup_py_file,
                     path_apikey_permissions_file,
                     output_dir,
                     path_built_distribution=None,
                     path_extension_logo=None,
                     path_company_logo=None,
                     custom_display_name=None,
                     repository_name=None,
                     keep_build_dir=False):
    """
    TODO: update this docstring to new standard format
    Function that creates The App.zip file from the given setup.py, customize and config files
    and copies it to the output_dir. Returns the path to the App.zip
    - path_setup_py_file [String]: abs path to the setup.py file
    - path_apikey_permissions_file [String]: abs path to the apikey_permissions.txt file
    - output_dir [String]: abs path to the directory the App.zip should be produced
    - path_built_distribution [String]: abs path to a tar.gz Built Distribution
        - if provided uses that .tar.gz
        - else looks for it in the output_dir. E.g: output_dir/package_name.tar.gz
    - path_extension_logo [String]: abs path to the app_logo.png. Has to be 200x72 and a .png file
        - if not provided uses default icon
    - path_company_logo [String]: abs path to the company_logo.png. Has to be 100x100 and a .png file
        - if not provided uses default icon
    - custom_display_name [String]: will give the App that display name. Default: name from setup.py file
    - repository_name [String]: will over-ride the container repository name for the App. Default: 'ibmresilient'
    - keep_build_dir [Boolean]: if True, build/ will not be remove. Default: False
    """

    LOG.info("Creating App")
    # Variables to hold path of files for customize and config as defined in setup.py.
    # Set initially default to 'None', actual paths will be calculated later.
    path_customize_py_file = None
    path_config_py_file = None

    # Ensure the output_dir exists, we have WRITE access and ensure we can READ setup.py and apikey_permissions.txt
    # files.
    sdk_helpers.validate_dir_paths(os.W_OK, output_dir)
    sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file,
                                    path_apikey_permissions_file)

    # Parse the setup.py file
    setup_py_attributes = parse_setup_py(path_setup_py_file,
                                         SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

    # Validate setup.py attributes

    # Validate the name attribute. Raise exception if invalid
    if not sdk_helpers.is_valid_package_name(setup_py_attributes.get("name")):
        raise SDKException(
            "'{0}' is not a valid App name. The name attribute must be defined and can only include 'a-z and _'.\nUpdate this value in the setup.py file located at: {1}"
            .format(setup_py_attributes.get("name"), path_setup_py_file))

    # Validate the version attribute. Raise exception if invalid
    if not sdk_helpers.is_valid_version_syntax(
            setup_py_attributes.get("version")):
        raise SDKException(
            "'{0}' is not a valid App version syntax. The version attribute must be defined. Example: version=\"1.0.0\".\nUpdate this value in the setup.py file located at: {1}"
            .format(setup_py_attributes.get("version"), path_setup_py_file))

    # Validate the url supplied in the setup.py file, set to an empty string if not valid
    if not sdk_helpers.is_valid_url(setup_py_attributes.get("url")):
        LOG.warning("WARNING: '%s' is not a valid url. Ignoring.",
                    setup_py_attributes.get("url"))
        setup_py_attributes["url"] = ""

    # Get the tag name
    tag_name = setup_py_attributes.get("name")

    # Get the customize file location.
    path_customize_py_file = get_configuration_py_file_path(
        "customize", setup_py_attributes)

    # Get the config file location.
    path_config_py_file = get_configuration_py_file_path(
        "config", setup_py_attributes)

    # Get ImportDefinition from the discovered customize file.
    if path_customize_py_file:
        import_definition = get_import_definition_from_customize_py(
            path_customize_py_file)
    else:
        # No 'customize.py' file found generate import definition with just mimimum server version.
        import_definition = {'server_version': IMPORT_MIN_SERVER_VERSION}

    # Add the tag to the import defintion
    import_definition = add_tag_to_import_definition(tag_name,
                                                     SUPPORTED_RES_OBJ_NAMES,
                                                     import_definition)

    # Parse the app.configs from the discovered config file
    if path_config_py_file:
        app_configs = get_configs_from_config_py(path_config_py_file)
    else:
        # No config file file found generate an empty definition.
        app_configs = ("", [])

    # Parse the api key permissions from the apikey_permissions.txt file
    apikey_permissions = get_apikey_permissions(path_apikey_permissions_file)

    # Generate the name for the extension
    extension_name = "{0}-{1}".format(setup_py_attributes.get("name"),
                                      setup_py_attributes.get("version"))

    # Generate the uuid
    uuid = sdk_helpers.generate_uuid_from_string(
        setup_py_attributes.get("name"))

    # Set the container repository name to default if value not passed in as argument.
    if not repository_name:
        repository_name = REPOSITORY_NAME

    # Generate paths to the directories and files we will use in the build directory
    path_build = os.path.join(output_dir, BASE_NAME_BUILD)
    path_extension_json = os.path.join(path_build, BASE_NAME_EXTENSION_JSON)
    path_export_res = os.path.join(path_build, BASE_NAME_EXPORT_RES)

    try:
        # If there is an old build directory, remove it first
        if os.path.exists(path_build):
            shutil.rmtree(path_build)

        # Create the directories for the path "/build/"
        os.makedirs(path_build)

        # If no path_built_distribution is given, use the default: "<output_dir>/<package-name>.tar.gz"
        if not path_built_distribution:
            path_built_distribution = os.path.join(
                output_dir, "{0}.tar.gz".format(extension_name))

        # Validate the built distribution exists and we have READ access
        sdk_helpers.validate_file_paths(os.R_OK, path_built_distribution)

        # Copy the built distribution to the build dir and enforce rename to .tar.gz
        shutil.copy(
            path_built_distribution,
            os.path.join(path_build, "{0}.tar.gz".format(extension_name)))

        # Get the extension_logo (icon) and company_logo (author.icon) as base64 encoded strings
        extension_logo = get_icon(
            icon_name=os.path.basename(PATH_DEFAULT_ICON_EXTENSION_LOGO),
            path_to_icon=path_extension_logo,
            width_accepted=200,
            height_accepted=72,
            default_path_to_icon=PATH_DEFAULT_ICON_EXTENSION_LOGO)

        company_logo = get_icon(
            icon_name=os.path.basename(PATH_DEFAULT_ICON_COMPANY_LOGO),
            path_to_icon=path_company_logo,
            width_accepted=100,
            height_accepted=100,
            default_path_to_icon=PATH_DEFAULT_ICON_COMPANY_LOGO)

        # Get the display name
        # Use --display-name if passed
        # If not use 'display_name' attribute in setup.py
        # If not set use the 'name' attribute in setup.py
        display_name = custom_display_name or setup_py_attributes.get(
            "display_name") or setup_py_attributes.get("name")

        # Image string is all lowercase on quay.io
        image_name = "{0}/{1}:{2}".format(repository_name,
                                          setup_py_attributes.get("name"),
                                          setup_py_attributes.get("version"))
        image_name = image_name.lower()

        # Generate the contents for the extension.json file
        the_extension_json_file_contents = {
            "author": {
                "name": setup_py_attributes.get("author"),
                "website": setup_py_attributes.get("url"),
                "icon": {
                    "data": company_logo,
                    "media_type": "image/png"
                }
            },
            "description": {
                "content": setup_py_attributes.get("description"),
                "format": "text"
            },
            "display_name": display_name,
            "icon": {
                "data": extension_logo,
                "media_type": "image/png"
            },
            "long_description": {
                "content":
                "<div>{0}</div>".format(
                    setup_py_attributes.get("long_description")),
                "format":
                "html"
            },
            "minimum_resilient_version": {
                "major":
                import_definition.get("server_version").get("major", None),
                "minor":
                import_definition.get("server_version").get("minor", None),
                "build_number":
                import_definition.get("server_version").get(
                    "build_number", None),
                "version":
                import_definition.get("server_version").get("version", None)
            },
            "name": setup_py_attributes.get("name"),
            "tag": {
                "prefix": tag_name,
                "name": tag_name,
                "display_name": tag_name,
                "uuid": uuid
            },
            "uuid": uuid,
            "version": setup_py_attributes.get("version"),
            "current_installation": {
                "executables": [{
                    "name": setup_py_attributes.get("name"),
                    "image": image_name,
                    "config_string": app_configs[0],
                    "permission_handles": apikey_permissions,
                    "uuid": uuid
                }]
            }
        }

        # Write the executable.json file
        sdk_helpers.write_file(
            path_extension_json,
            json.dumps(the_extension_json_file_contents, sort_keys=True))

        # Write the customize ImportDefinition to the app*.zip export.res file
        sdk_helpers.write_file(path_export_res,
                               json.dumps(import_definition, sort_keys=True))

        # Copy the built distribution to the build dir, enforce rename to .tar.gz
        shutil.copy(
            path_built_distribution,
            os.path.join(path_build, "{0}.tar.gz".format(extension_name)))

        # create The Extension Zip by zipping the build directory
        extension_zip_base_path = os.path.join(
            output_dir, "{0}{1}".format(PREFIX_EXTENSION_ZIP, extension_name))
        extension_zip_name = shutil.make_archive(
            base_name=extension_zip_base_path,
            format="zip",
            root_dir=path_build)
        path_the_extension_zip = os.path.join(extension_zip_base_path,
                                              extension_zip_name)

    except SDKException as err:
        raise err

    except Exception as err:
        raise SDKException(err)

    finally:
        # Remove the build dir. Keep it if user passes --keep-build-dir
        if not keep_build_dir:
            shutil.rmtree(path_build)

    LOG.info("App %s.zip created", "{0}{1}".format(PREFIX_EXTENSION_ZIP,
                                                   extension_name))

    # Return the path to the extension zip
    return path_the_extension_zip
Exemplo n.º 30
0
def get_configs_from_config_py(path_config_py_file):
    """Returns a tuple (config_str, config_list). If no configs found, return ("", []).
    Raises Exception if it fails to parse configs
    - config_str: is the full string found in the config file
    - apphost_config_str: is the full string found in the app host config file
    - config_list: is a list of dict objects that contain each un-commented config
        - Each dict object has the attributes: name, placeholder, env_name, section_name
    """

    config_str, config_list = "", []

    try:
        # Get the module name from the config file path by getting basename and stripping '.py'
        # from the rhs of the resulting string.
        config_module = os.path.basename(path_config_py_file)[:-3]
        # Import the config module
        config_py = sdk_helpers.load_py_module(path_config_py_file,
                                               config_module)

        # Call config_section_data() to get the string containing the configs
        config_str = config_py.config_section_data()
        # Call apphost_config_section_data available to get app host config settings
        try:
            apphost_config_str = config_py.apphost_config_section_data()
        except AttributeError:
            # An app host config may not exist
            apphost_config_str = None

        # concatenate the config sections
        clean_cfg_list = list(filter(None, [apphost_config_str, config_str]))
        concat_cfg_str = '\n'.join(clean_cfg_list)
        # Iterate over config sections and parse settings.
        for cfg_str in clean_cfg_list:
            # Instansiate a new configparser
            config_parser = configparser.ConfigParser()

            # Read and parse the configs from the config_str or apphost_config_str
            if sys.version_info < (3, 2):
                # config_parser.readfp() was deprecated and replaced with read_file in PY3.2
                config_parser.readfp(StringIO(cfg_str))
            else:
                config_parser.read_file(StringIO(cfg_str))

            # Get the configs from each section
            for section_name in config_parser.sections():
                parsed_configs = config_parser.items(section_name)

                for config in parsed_configs:
                    config_list.append({
                        "name":
                        config[0],
                        "placeholder":
                        config[1],
                        "env_name":
                        "{0}_{1}".format(section_name.upper(),
                                         config[0].upper()),
                        "section_name":
                        section_name
                    })

    except ImportError as err:
        raise SDKException(
            u"Failed to load module '{0}' got error '{1}'".format(
                config_module, err.__repr__()))

    except Exception as err:
        raise SDKException(
            u"Failed to parse configs from the config file\nThe config file may be corrupt. Visit "
            u"the App Exchange to contact the developer\nReason: {0}".format(
                err))

    return (concat_cfg_str, config_list)