예제 #1
0
def test_validate_dir_paths(fx_mk_temp_dir):
    non_exist_dir = "/non_exits/path/"
    with pytest.raises(SDKException, match=r"Could not find directory: " + non_exist_dir):
        sdk_helpers.validate_dir_paths(None, non_exist_dir)

    exists_dir = mock_paths.TEST_TEMP_DIR

    sdk_helpers.validate_dir_paths(None, exists_dir)
예제 #2
0
    def _reload_package(args):

        old_params, path_customize_py_bak = [], ""

        # Get + validate package, customize.py and setup.py paths
        path_package = os.path.abspath(args.package)
        # Get basename of path_to_src (version information is stripped from the basename).
        path_package_basename = re.split(VERSION_REGEX, os.path.basename(path_package), 1)[0]
        sdk_helpers.validate_dir_paths(os.R_OK, path_package)

        path_customize_py = os.path.join(path_package, path_package_basename, package_helpers.PATH_CUSTOMIZE_PY)
        sdk_helpers.validate_file_paths(os.W_OK, path_customize_py)

        path_setup_py_file = os.path.join(path_package, package_helpers.PATH_SETUP_PY)
        sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file)

        # Set package + output args correctly (this handles if user runs 'codegen --reload -p .')
        args.package = os.path.basename(path_package)
        args.output = os.path.dirname(path_package)

        LOG.info("'codegen --reload' started for '%s'", args.package)

        # Load the customize.py module
        customize_py_module = package_helpers.load_customize_py_module(path_customize_py, warn=False)

        try:
            # Get the 'old_params' from customize.py
            old_params = customize_py_module.codegen_reload_data()
        except AttributeError:
            raise SDKException(u"Corrupt customize.py. No reload method found in {0}".format(path_customize_py))

        if not old_params:
            raise SDKException(u"No reload params found in {0}".format(path_customize_py))

        # Rename the old customize.py with .bak
        path_customize_py_bak = sdk_helpers.rename_to_bak_file(path_customize_py)

        # If local export file exists then save it to a .bak file.
        # (Older packages may not have the /util/data/export.res file)
        path_export_res = os.path.join(path_package, path_package_basename,
                                       package_helpers.PATH_UTIL_DATA_DIR,
                                       package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
        if os.path.isfile(path_export_res):
            path_export_res_bak = sdk_helpers.rename_to_bak_file(path_export_res)
        else:
            path_export_res_bak = None

        try:
            # Map command line arg name to dict key returned by codegen_reload_data() in customize.py
            mapping_tuples = [
                ("messagedestination", "message_destinations"),
                ("function", "functions"),
                ("workflow", "workflows"),
                ("rule", "actions"),
                ("field", "incident_fields"),
                ("artifacttype", "incident_artifact_types"),
                ("datatable", "datatables"),
                ("task", "automatic_tasks"),
                ("script", "scripts")
            ]

            # Merge old_params with new params specified on command line
            args = CmdCodegen.merge_codegen_params(old_params, args, mapping_tuples)

            # Parse the setup.py file
            setup_py_attributes = package_helpers.parse_setup_py(path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

            LOG.debug("Regenerating codegen '%s' package now", args.package)

            # Regenerate the package
            CmdCodegen._gen_package(args, setup_py_attributes=setup_py_attributes)

            LOG.info("\nNOTE: Ensure the MANIFEST.in file includes line:\nrecursive-include %s/util *\n", args.package)
            LOG.info("'codegen --reload' complete for '%s'", args.package)

        except Exception as err:
            LOG.error(u"Error running resilient-sdk codegen --reload\n\nERROR:%s", err)

        # This is required in finally block as user may kill using keyboard interrupt
        finally:
            # If an error occurred, customize.py does not exist, rename the backup file to original
            if not os.path.isfile(path_customize_py):
                LOG.info(u"An error occurred. Renaming customize.py.bak to customize.py")
                sdk_helpers.rename_file(path_customize_py_bak, package_helpers.BASE_NAME_CUSTOMIZE_PY)
            if not os.path.isfile(path_export_res) and path_export_res_bak:
                LOG.info(u"An error occurred. Renaming export.res.bak to export.res")
                sdk_helpers.rename_file(path_export_res_bak, package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
예제 #3
0
    def _gen_package(args, setup_py_attributes={}):

        LOG.info("Generating codegen package...")

        if not sdk_helpers.is_valid_package_name(args.package):
            raise SDKException(u"'{0}' is not a valid package name".format(args.package))

        # Strip off version information, if present in package base folder, to get the package name.
        package_name = re.split(VERSION_REGEX, args.package, 1)[0]
        # Get base version if we are running against a package base folder with version.
        base_version = ''.join(re.split(package_name, args.package))

        # Get output_base, use args.output if defined, else current directory
        output_base = args.output if args.output else os.curdir
        output_base = os.path.abspath(output_base)

        # If --exportfile is specified, read org_export from that file
        if args.exportfile:
            LOG.info("Using local export file: %s", args.exportfile)
            org_export = sdk_helpers.read_local_exportfile(args.exportfile)

        else:
            # Instantiate connection to the Resilient Appliance
            res_client = sdk_helpers.get_resilient_client()

            # Generate + get latest export from Resilient Server
            org_export = sdk_helpers.get_latest_org_export(res_client)

        # Get data required for Jinja2 templates from export
        jinja_data = sdk_helpers.get_from_export(org_export,
                                                 message_destinations=args.messagedestination,
                                                 functions=args.function,
                                                 workflows=args.workflow,
                                                 rules=args.rule,
                                                 fields=args.field,
                                                 artifact_types=args.artifacttype,
                                                 datatables=args.datatable,
                                                 tasks=args.task,
                                                 scripts=args.script)

        # Get 'minified' version of the export. This is used in customize.py
        jinja_data["export_data"] = sdk_helpers.minify_export(org_export,
                                                              message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")),
                                                              functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")),
                                                              workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")),
                                                              rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")),
                                                              fields=jinja_data.get("all_fields"),
                                                              artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")),
                                                              datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")),
                                                              tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")),
                                                              phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")),
                                                              scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts")))

        # Add package_name to jinja_data
        jinja_data["package_name"] = package_name

        # Add version
        jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION)

        # Validate we have write permissions
        sdk_helpers.validate_dir_paths(os.W_OK, output_base)

        # Join package_name to output base (add base version if running against a folder which includes a version).
        output_base = os.path.join(output_base, package_name+base_version)

        # If the output_base directory does not exist, create it
        if not os.path.exists(output_base):
            os.makedirs(output_base)

        # Instansiate Jinja2 Environment with path to Jinja2 templates
        jinja_env = sdk_helpers.setup_jinja_env("data/codegen/templates/package_template")

        # This dict maps our package file structure to  Jinja2 templates
        package_mapping_dict = {
            "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data),
            "README.md": ("README.md.jinja2", jinja_data),
            "setup.py": ("setup.py.jinja2", jinja_data),
            "tox.ini": ("tox.ini.jinja2", jinja_data),
            "Dockerfile": ("Dockerfile.jinja2", jinja_data),
            "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data),
            "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data),
            "data": {},
            "icons": {
                "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO,
                "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO,
            },
            "doc": {
                "screenshots": {
                    "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT
                }
            },
            package_name: {
                "__init__.py": ("package/__init__.py.jinja2", jinja_data),
                "LICENSE": ("package/LICENSE.jinja2", jinja_data),

                "components": {
                    "__init__.py": ("package/components/__init__.py.jinja2", jinja_data),
                },
                "util": {
                    "data": {
                        "export.res": ("package/util/data/export.res.jinja2", jinja_data)
                    },
                    "__init__.py": ("package/util/__init__.py.jinja2", jinja_data),
                    "config.py": ("package/util/config.py.jinja2", jinja_data),
                    "customize.py": ("package/util/customize.py.jinja2", jinja_data),
                    "selftest.py": ("package/util/selftest.py.jinja2", jinja_data),
                }
            }
        }

        # If there are Functions, add a 'tests' directory
        if jinja_data.get("functions"):
            package_mapping_dict["tests"] = {}

        # Loop each Function
        for f in jinja_data.get("functions"):
            # Add package_name to function data
            f["package_name"] = package_name

            # Generate function_component.py file name
            file_name = u"funct_{0}.py".format(f.get("export_key"))

            # Add to 'components' directory
            package_mapping_dict[package_name]["components"][file_name] = ("package/components/function.py.jinja2", f)

            # Add to 'tests' directory
            package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f)

        for w in jinja_data.get("workflows"):

            # Generate wf_xx.md file name
            file_name = u"wf_{0}.md".format(w.get(ResilientObjMap.WORKFLOWS))

            # Add workflow to data directory
            package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w)

        newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping(
            jinja_mapping_dict=package_mapping_dict,
            jinja_env=jinja_env,
            target_dir=output_base,
            package_dir=output_base)

        # Log new and skipped files
        if newly_generated_files:
            LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files))

        if skipped_files:
            LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files))

        LOG.info("'codegen' complete for '%s'", package_name)
예제 #4
0
    def _get_results_from_log_file(cls, args):
        """
        - Gets all function names from the payload_samples directory
        - Traverses the file at the path specified by args.gather_results (in a reversed order)
        - Looks for lines containing ``[<fn_name>] Result: {'version': 2.0, 'success': True...``
        - Parses it and generates an output_json_example.json and output_json_schema.json file for each ``Result`` found
        - Uses the libary ``genson`` to generate the JSON schema from a Python Dictionary

        :param args: (required) the cmd line arguments
        :type args: argparse.ArgumentParser
        :raises: an SDKException if args.package is not a valid path
        """

        # Check if Python >= MIN_SUPPORTED_PY_VERSION
        if not sdk_helpers.is_python_min_supported_version(constants.ERROR_WRONG_PYTHON_VERSION):
            raise SDKException(constants.ERROR_WRONG_PYTHON_VERSION)

        path_package = os.path.abspath(args.package)
        path_log_file = args.gather_results
        path_payload_samples_dir = os.path.join(path_package, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR)

        LOG.debug("\nPath to project: %s", path_package)

        sdk_helpers.validate_dir_paths(os.W_OK, path_package)

        package_name = package_helpers.get_package_name(path_package)

        LOG.info("'codegen %s' started for '%s'", constants.SUB_CMD_OPT_GATHER_RESULTS, package_name)
        try:

            sdk_helpers.validate_dir_paths(os.W_OK, path_payload_samples_dir)

        except SDKException as e:

            if constants.ERROR_NOT_FIND_DIR in e.message:
                LOG.warning("WARNING: no '%s' found. Running 'codegen --reload' to create the default missing files\n%s", package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR, constants.LOG_DIVIDER)
                args.reload = True
                cls._reload_package(args)
                LOG.warning(constants.LOG_DIVIDER)

            else:
                raise e

        functions_that_need_payload_samples = args.function if args.function else os.listdir(path_payload_samples_dir)

        results_scraped = sdk_helpers.scrape_results_from_log_file(path_log_file)

        for fn_name in functions_that_need_payload_samples:

            fn_results = results_scraped.get(fn_name)

            if not fn_results:
                package_helpers.color_output("WARNING: No results could be found for '{0}' in '{1}'".format(fn_name, path_log_file), constants.VALIDATE_LOG_LEVEL_WARNING, do_print=True)
                continue

            LOG.info("Results found for '[%s]'", fn_name)

            path_payload_samples_fn_name = os.path.join(path_payload_samples_dir, fn_name)
            path_output_json_example = os.path.join(path_payload_samples_fn_name, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)
            path_output_json_schema = os.path.join(path_payload_samples_fn_name, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA)

            path_output_json_example_bak = sdk_helpers.rename_to_bak_file(path_output_json_example)
            path_output_json_schema_bak = sdk_helpers.rename_to_bak_file(path_output_json_schema)

            try:
                LOG.debug("Writing JSON example file for '%s' to '%s'", fn_name, path_output_json_example)
                sdk_helpers.write_file(path_output_json_example, json.dumps(fn_results, indent=2))

                LOG.debug("Writing JSON schema file for '%s' to '%s'", fn_name, path_output_json_schema)
                builder = CustomSchemaBuilder(schema_uri=constants.CODEGEN_JSON_SCHEMA_URI)
                main_genson_builder_overwrites(builder)
                builder.add_object(fn_results)
                sdk_helpers.write_file(path_output_json_schema, builder.to_json(indent=2))

            finally:
                if not os.path.isfile(path_output_json_example) and path_output_json_example_bak:
                    LOG.info(u"An error occurred. Renaming %s.bak to %s", package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)
                    sdk_helpers.rename_file(path_output_json_example_bak, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)

                if not os.path.isfile(path_output_json_schema) and path_output_json_schema_bak:
                    LOG.info(u"An error occurred. Renaming %s.bak to %s", package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA)
                    sdk_helpers.rename_file(path_output_json_schema_bak, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_SCHEMA)

        LOG.info("'codegen %s' complete for '%s'", constants.SUB_CMD_OPT_GATHER_RESULTS, package_name)
예제 #5
0
    def _reload_package(args):

        old_params, path_customize_py_bak = [], ""

        # Get absolute path to package
        path_package = os.path.abspath(args.package)

        LOG.debug("\nPath to project: %s", path_package)

        # Ensure the package directory exists and we have WRITE access
        sdk_helpers.validate_dir_paths(os.W_OK, path_package)

        path_setup_py_file = os.path.join(path_package, package_helpers.BASE_NAME_SETUP_PY)

        package_name = package_helpers.get_package_name(path_package)

        if not sdk_helpers.is_valid_package_name(package_name):
            raise SDKException(u"'{0}' is not a valid package name. 'name' attribute in setup.py file is not valid or not specified".format(package_name))

        LOG.debug("\nProject name: %s", package_name)

        # Generate path to customize.py file + validate we have permissions to read it
        path_customize_py = os.path.join(path_package, package_name, package_helpers.PATH_CUSTOMIZE_PY)
        sdk_helpers.validate_file_paths(os.W_OK, path_customize_py)

        # Set package + output args correctly (this handles if user runs 'codegen --reload -p .')
        args.package = package_name
        args.output = path_package

        LOG.info("'codegen --reload' started for '%s'", args.package)

        # Load the customize.py module
        customize_py_module = package_helpers.load_customize_py_module(path_customize_py, warn=False)

        try:
            # Get the 'old_params' from customize.py
            old_params = customize_py_module.codegen_reload_data()
        except AttributeError:
            raise SDKException(u"Corrupt customize.py. No reload method found in {0}".format(path_customize_py))

        if not old_params:
            raise SDKException(u"No reload params found in {0}".format(path_customize_py))

        # Rename the old customize.py with .bak
        path_customize_py_bak = sdk_helpers.rename_to_bak_file(path_customize_py)

        # If local export file exists then save it to a .bak file.
        # (Older packages may not have the /util/data/export.res file)
        path_export_res = os.path.join(path_package, package_name,
                                       package_helpers.PATH_UTIL_DATA_DIR,
                                       package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
        if os.path.isfile(path_export_res):
            path_export_res_bak = sdk_helpers.rename_to_bak_file(path_export_res)
        else:
            path_export_res_bak = None

        try:
            # Map command line arg name to dict key returned by codegen_reload_data() in customize.py
            mapping_tuples = [
                ("messagedestination", "message_destinations"),
                ("function", "functions"),
                ("workflow", "workflows"),
                ("rule", "actions"),
                ("field", "incident_fields"),
                ("artifacttype", "incident_artifact_types"),
                ("incidenttype", "incident_types"),
                ("datatable", "datatables"),
                ("task", "automatic_tasks"),
                ("script", "scripts"),
                ("playbook", "playbooks")
            ]

            # Merge old_params with new params specified on command line
            args = CmdCodegen.merge_codegen_params(old_params, args, mapping_tuples)

            # Parse the setup.py file
            setup_py_attributes = package_helpers.parse_setup_py(path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

            LOG.debug("Regenerating codegen '%s' package now", args.package)

            # Regenerate the package
            path_reloaded = CmdCodegen._gen_package(args, setup_py_attributes=setup_py_attributes)

            LOG.info("\nNOTE: Ensure the MANIFEST.in file includes line:\nrecursive-include %s/util *\n", args.package)
            LOG.info("'codegen --reload' complete for '%s'", args.package)

            return path_reloaded

        # This is required in finally block as user may kill using keyboard interrupt
        finally:
            # If an error occurred, customize.py does not exist, rename the backup file to original
            if not os.path.isfile(path_customize_py):
                LOG.info(u"An error occurred. Renaming customize.py.bak to customize.py")
                sdk_helpers.rename_file(path_customize_py_bak, package_helpers.BASE_NAME_CUSTOMIZE_PY)
            if not os.path.isfile(path_export_res) and path_export_res_bak:
                LOG.info(u"An error occurred. Renaming export.res.bak to export.res")
                sdk_helpers.rename_file(path_export_res_bak, package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
예제 #6
0
    def _gen_package(args, setup_py_attributes={}):

        LOG.info("Generating codegen package...")

        sdk_helpers.is_python_min_supported_version()

        if os.path.exists(args.package) and not args.reload:
            raise SDKException(u"'{0}' already exists. Add --reload flag to regenerate it".format(args.package))

        if not sdk_helpers.is_valid_package_name(args.package):
            raise SDKException(u"'{0}' is not a valid package name".format(args.package))

        # The package_name will be specified in the args
        package_name = args.package

        # Get output_base, use args.output if defined, else current directory
        output_base = args.output if args.output else os.curdir
        output_base = os.path.abspath(output_base)

        # If --exportfile is specified, read org_export from that file
        if args.exportfile:
            LOG.info("Using local export file: %s", args.exportfile)
            org_export = sdk_helpers.read_local_exportfile(args.exportfile)

        else:
            # Instantiate connection to the Resilient Appliance
            res_client = sdk_helpers.get_resilient_client(path_config_file=args.config)

            # Generate + get latest export from Resilient Server
            org_export = sdk_helpers.get_latest_org_export(res_client)

        # Get data required for Jinja2 templates from export
        jinja_data = sdk_helpers.get_from_export(org_export,
                                                 message_destinations=args.messagedestination,
                                                 functions=args.function,
                                                 workflows=args.workflow,
                                                 rules=args.rule,
                                                 fields=args.field,
                                                 artifact_types=args.artifacttype,
                                                 datatables=args.datatable,
                                                 tasks=args.task,
                                                 scripts=args.script,
                                                 incident_types=args.incidenttype,
                                                 playbooks=args.playbook)

        # Get 'minified' version of the export. This is used in customize.py
        jinja_data["export_data"] = sdk_helpers.minify_export(org_export,
                                                              message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")),
                                                              functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")),
                                                              workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")),
                                                              rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")),
                                                              fields=jinja_data.get("all_fields"),
                                                              artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")),
                                                              datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")),
                                                              tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")),
                                                              phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")),
                                                              scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts")),
                                                              incident_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_TYPES, jinja_data.get("incident_types")),
                                                              playbooks=sdk_helpers.get_object_api_names(ResilientObjMap.PLAYBOOKS, jinja_data.get("playbooks")))

        # Add package_name to jinja_data
        jinja_data["package_name"] = package_name

        # Add version
        jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION)

        jinja_data["resilient_libraries_version"] = sdk_helpers.get_resilient_libraries_version_to_use()

        # Validate we have write permissions
        sdk_helpers.validate_dir_paths(os.W_OK, output_base)

        if not args.reload:
            # If this is not a reload, join package_name to output base
            output_base = os.path.join(output_base, package_name)

        # If the output_base directory does not exist, create it
        if not os.path.exists(output_base):
            os.makedirs(output_base)

        # Instansiate Jinja2 Environment with path to Jinja2 templates
        jinja_env = sdk_helpers.setup_jinja_env(constants.PACKAGE_TEMPLATE_PATH)

        # This dict maps our package file structure to  Jinja2 templates
        package_mapping_dict = {
            "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data),
            "README.md": ("README.md.jinja2", jinja_data),
            "setup.py": ("setup.py.jinja2", jinja_data),
            "tox.ini": ("tox.ini.jinja2", jinja_data),
            "Dockerfile": ("Dockerfile.jinja2", jinja_data),
            "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data),
            "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data),
            "data": {},
            "icons": {
                "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO,
                "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO,
            },
            "doc": {
                "screenshots": {
                    "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT
                }
            },
            package_name: {
                "__init__.py": ("package/__init__.py.jinja2", jinja_data),
                "LICENSE": ("package/LICENSE.jinja2", jinja_data),

                "components": {
                    "__init__.py": ("package/components/__init__.py.jinja2", jinja_data),
                },
                "util": {
                    "data": {
                        "export.res": ("package/util/data/export.res.jinja2", jinja_data)
                    },
                    "__init__.py": ("package/util/__init__.py.jinja2", jinja_data),
                    "config.py": ("package/util/config.py.jinja2", jinja_data),
                    "customize.py": ("package/util/customize.py.jinja2", jinja_data),
                    "selftest.py": ("package/util/selftest.py.jinja2", jinja_data),
                }
            }
        }

        # If there are Functions, add a 'tests' and a 'payload_samples' directory (if in dev mode)
        if jinja_data.get("functions"):
            package_mapping_dict["tests"] = {}
            package_mapping_dict[package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR] = {}

        # Get a list of function names in export.
        fn_names = [f.get(ResilientObjMap.FUNCTIONS) for f in jinja_data.get("functions")]

        # Loop each Function
        for f in jinja_data.get("functions"):
            # Add package_name to function data
            f["package_name"] = package_name

            # Get function name
            fn_name = f.get(ResilientObjMap.FUNCTIONS)

            # Generate funct_function_component.py file name
            # Don't add prefix if function name already begins with "func_" or "funct_".
            if re.search(r"^(func|funct)_", fn_name):
                file_name = u"{0}.py".format(fn_name)
            else:
                file_name = u"funct_{0}.py".format(fn_name)
                # Check if file_name without extension already exists in functions names list.
                if os.path.splitext(file_name)[0] in fn_names:
                    raise SDKException(u"File name '{0}' already in use please rename the function '{1}'."
                                       .format(file_name, fn_name))

            # Add an 'atomic function' to 'components' directory else add a 'normal function'
            package_mapping_dict[package_name]["components"][file_name] = ("package/components/atomic_function.py.jinja2", f)

            # Add to 'tests' directory
            package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f)

            # Add a 'payload_samples/fn_name' directory and the files to it
            CmdCodegen.add_payload_samples(package_mapping_dict, fn_name, f)

        # Get a list of workflow names in export.
        wf_names = [w.get(ResilientObjMap.WORKFLOWS) for w in jinja_data.get("workflows")]

        for w in jinja_data.get("workflows"):
            # Get workflow name
            wf_name = w.get(ResilientObjMap.WORKFLOWS)

            # Generate wf_xx.md file name
            # Don't add prefix if workflow name already begins with "wf_".
            if re.search(r"^wf_", wf_name):
                file_name = u"{0}.md".format(wf_name)
            else:
                file_name = u"wf_{0}.md".format(wf_name)
                # Check if file_name without extension already exists in workflow names list.
                if os.path.splitext(file_name)[0] in wf_names:
                    raise SDKException(u"File name '{0}' already in use please recreate the workflow '{1}'."
                                       .format(file_name, wf_name))

            # Add workflow to data directory
            package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w)

        newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping(
            jinja_mapping_dict=package_mapping_dict,
            jinja_env=jinja_env,
            target_dir=output_base,
            package_dir=output_base)

        # Log new and skipped files
        if newly_generated_files:
            LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files))

        if skipped_files:
            LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files))

        LOG.info("'codegen' complete for '%s'", package_name)

        return output_base
    def execute_command(self, args):
        """
        Function that creates The App.zip file from the give source path and returns
        the path to the new App.zip

        :param args: Arguments from command line:

            -  **args.package**: path to directory that must include a setup.py, customize.py and config.py file.
            -  **args.cmd**: `package` in this case
            -  **args.display_name**: will give the App that display name. Default: name from setup.py file
            -  **args.repository_name**: if defined, it will replace the default image repository name in app.json for
                                         container access.
            -  **args.keep_build_dir**: if defined, dist/build/ will not be removed.
        :type args: argparse Namespace

        :return: Path to new app.zip
        :rtype: str
        """
        # Set name for SDKException
        SDKException.command_ran = self.CMD_NAME

        # Get absolute path_to_src
        path_to_src = os.path.abspath(args.package)

        # Get basename of path_to_src (version information is stripped from the basename).
        path_to_src_basename = re.split(VERSION_REGEX,
                                        os.path.basename(path_to_src), 1)[0]

        LOG.debug("Path to project: %s", path_to_src)
        LOG.debug("Project basename: %s", path_to_src_basename)

        # Ensure the src directory exists and we have WRITE access
        sdk_helpers.validate_dir_paths(os.W_OK, path_to_src)

        # Generate paths to files required to create app
        path_setup_py_file = os.path.join(path_to_src, BASE_NAME_SETUP_PY)
        path_docker_file = os.path.join(path_to_src, BASE_NAME_DOCKER_FILE)
        path_entry_point = os.path.join(path_to_src, BASE_NAME_ENTRY_POINT)
        path_apikey_permissions_file = os.path.join(
            path_to_src, BASE_NAME_APIKEY_PERMS_FILE)
        path_output_dir = os.path.join(path_to_src, BASE_NAME_DIST_DIR)
        path_extension_logo = os.path.join(path_to_src,
                                           PATH_ICON_EXTENSION_LOGO)
        path_company_logo = os.path.join(path_to_src, PATH_ICON_COMPANY_LOGO)

        LOG.info("Built Distribution starting\n")

        # Create the built distribution
        use_setuptools.run_setup(setup_script=path_setup_py_file,
                                 args=["sdist", "--formats=gztar"])

        LOG.info("\nBuilt Distribution finished. See: %s", path_output_dir)

        # Check that files 'Dockerfile' and 'entrypoint.sh' files exist in the integration package
        # before attempting to create the app.
        sdk_helpers.validate_file_paths(os.R_OK, path_docker_file,
                                        path_entry_point)

        # Create the app
        path_the_extension_zip = create_extension(
            path_setup_py_file=path_setup_py_file,
            path_apikey_permissions_file=path_apikey_permissions_file,
            output_dir=path_output_dir,
            custom_display_name=args.display_name,
            repository_name=args.repository_name,
            keep_build_dir=args.keep_build_dir,
            path_extension_logo=path_extension_logo,
            path_company_logo=path_company_logo)

        LOG.info("App created at: %s", path_the_extension_zip)

        return path_the_extension_zip
예제 #8
0
def create_extension(path_setup_py_file,
                     path_apikey_permissions_file,
                     output_dir,
                     path_built_distribution=None,
                     path_extension_logo=None,
                     path_company_logo=None,
                     custom_display_name=None,
                     repository_name=None,
                     keep_build_dir=False):
    """
    TODO: update this docstring to new standard format
    Function that creates The App.zip file from the given setup.py, customize and config files
    and copies it to the output_dir. Returns the path to the App.zip
    - path_setup_py_file [String]: abs path to the setup.py file
    - path_apikey_permissions_file [String]: abs path to the apikey_permissions.txt file
    - output_dir [String]: abs path to the directory the App.zip should be produced
    - path_built_distribution [String]: abs path to a tar.gz Built Distribution
        - if provided uses that .tar.gz
        - else looks for it in the output_dir. E.g: output_dir/package_name.tar.gz
    - path_extension_logo [String]: abs path to the app_logo.png. Has to be 200x72 and a .png file
        - if not provided uses default icon
    - path_company_logo [String]: abs path to the company_logo.png. Has to be 100x100 and a .png file
        - if not provided uses default icon
    - custom_display_name [String]: will give the App that display name. Default: name from setup.py file
    - repository_name [String]: will over-ride the container repository name for the App. Default: 'ibmresilient'
    - keep_build_dir [Boolean]: if True, build/ will not be remove. Default: False
    """

    LOG.info("Creating App")
    # Variables to hold path of files for customize and config as defined in setup.py.
    # Set initially default to 'None', actual paths will be calculated later.
    path_customize_py_file = None
    path_config_py_file = None

    # Ensure the output_dir exists, we have WRITE access and ensure we can READ setup.py and apikey_permissions.txt
    # files.
    sdk_helpers.validate_dir_paths(os.W_OK, output_dir)
    sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file,
                                    path_apikey_permissions_file)

    # Parse the setup.py file
    setup_py_attributes = parse_setup_py(path_setup_py_file,
                                         SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

    # Validate setup.py attributes

    # Validate the name attribute. Raise exception if invalid
    if not sdk_helpers.is_valid_package_name(setup_py_attributes.get("name")):
        raise SDKException(
            "'{0}' is not a valid App name. The name attribute must be defined and can only include 'a-z and _'.\nUpdate this value in the setup.py file located at: {1}"
            .format(setup_py_attributes.get("name"), path_setup_py_file))

    # Validate the version attribute. Raise exception if invalid
    if not sdk_helpers.is_valid_version_syntax(
            setup_py_attributes.get("version")):
        raise SDKException(
            "'{0}' is not a valid App version syntax. The version attribute must be defined. Example: version=\"1.0.0\".\nUpdate this value in the setup.py file located at: {1}"
            .format(setup_py_attributes.get("version"), path_setup_py_file))

    # Validate the url supplied in the setup.py file, set to an empty string if not valid
    if not sdk_helpers.is_valid_url(setup_py_attributes.get("url")):
        LOG.warning("WARNING: '%s' is not a valid url. Ignoring.",
                    setup_py_attributes.get("url"))
        setup_py_attributes["url"] = ""

    # Get the tag name
    tag_name = setup_py_attributes.get("name")

    # Get the customize file location.
    path_customize_py_file = get_configuration_py_file_path(
        "customize", setup_py_attributes)

    # Get the config file location.
    path_config_py_file = get_configuration_py_file_path(
        "config", setup_py_attributes)

    # Get ImportDefinition from the discovered customize file.
    if path_customize_py_file:
        import_definition = get_import_definition_from_customize_py(
            path_customize_py_file)
    else:
        # No 'customize.py' file found generate import definition with just mimimum server version.
        import_definition = {'server_version': IMPORT_MIN_SERVER_VERSION}

    # Add the tag to the import defintion
    import_definition = add_tag_to_import_definition(tag_name,
                                                     SUPPORTED_RES_OBJ_NAMES,
                                                     import_definition)

    # Parse the app.configs from the discovered config file
    if path_config_py_file:
        app_configs = get_configs_from_config_py(path_config_py_file)
    else:
        # No config file file found generate an empty definition.
        app_configs = ("", [])

    # Parse the api key permissions from the apikey_permissions.txt file
    apikey_permissions = get_apikey_permissions(path_apikey_permissions_file)

    # Generate the name for the extension
    extension_name = "{0}-{1}".format(setup_py_attributes.get("name"),
                                      setup_py_attributes.get("version"))

    # Generate the uuid
    uuid = sdk_helpers.generate_uuid_from_string(
        setup_py_attributes.get("name"))

    # Set the container repository name to default if value not passed in as argument.
    if not repository_name:
        repository_name = REPOSITORY_NAME

    # Generate paths to the directories and files we will use in the build directory
    path_build = os.path.join(output_dir, BASE_NAME_BUILD)
    path_extension_json = os.path.join(path_build, BASE_NAME_EXTENSION_JSON)
    path_export_res = os.path.join(path_build, BASE_NAME_EXPORT_RES)

    try:
        # If there is an old build directory, remove it first
        if os.path.exists(path_build):
            shutil.rmtree(path_build)

        # Create the directories for the path "/build/"
        os.makedirs(path_build)

        # If no path_built_distribution is given, use the default: "<output_dir>/<package-name>.tar.gz"
        if not path_built_distribution:
            path_built_distribution = os.path.join(
                output_dir, "{0}.tar.gz".format(extension_name))

        # Validate the built distribution exists and we have READ access
        sdk_helpers.validate_file_paths(os.R_OK, path_built_distribution)

        # Copy the built distribution to the build dir and enforce rename to .tar.gz
        shutil.copy(
            path_built_distribution,
            os.path.join(path_build, "{0}.tar.gz".format(extension_name)))

        # Get the extension_logo (icon) and company_logo (author.icon) as base64 encoded strings
        extension_logo = get_icon(
            icon_name=os.path.basename(PATH_DEFAULT_ICON_EXTENSION_LOGO),
            path_to_icon=path_extension_logo,
            width_accepted=200,
            height_accepted=72,
            default_path_to_icon=PATH_DEFAULT_ICON_EXTENSION_LOGO)

        company_logo = get_icon(
            icon_name=os.path.basename(PATH_DEFAULT_ICON_COMPANY_LOGO),
            path_to_icon=path_company_logo,
            width_accepted=100,
            height_accepted=100,
            default_path_to_icon=PATH_DEFAULT_ICON_COMPANY_LOGO)

        # Get the display name
        # Use --display-name if passed
        # If not use 'display_name' attribute in setup.py
        # If not set use the 'name' attribute in setup.py
        display_name = custom_display_name or setup_py_attributes.get(
            "display_name") or setup_py_attributes.get("name")

        # Image string is all lowercase on quay.io
        image_name = "{0}/{1}:{2}".format(repository_name,
                                          setup_py_attributes.get("name"),
                                          setup_py_attributes.get("version"))
        image_name = image_name.lower()

        # Generate the contents for the extension.json file
        the_extension_json_file_contents = {
            "author": {
                "name": setup_py_attributes.get("author"),
                "website": setup_py_attributes.get("url"),
                "icon": {
                    "data": company_logo,
                    "media_type": "image/png"
                }
            },
            "description": {
                "content": setup_py_attributes.get("description"),
                "format": "text"
            },
            "display_name": display_name,
            "icon": {
                "data": extension_logo,
                "media_type": "image/png"
            },
            "long_description": {
                "content":
                "<div>{0}</div>".format(
                    setup_py_attributes.get("long_description")),
                "format":
                "html"
            },
            "minimum_resilient_version": {
                "major":
                import_definition.get("server_version").get("major", None),
                "minor":
                import_definition.get("server_version").get("minor", None),
                "build_number":
                import_definition.get("server_version").get(
                    "build_number", None),
                "version":
                import_definition.get("server_version").get("version", None)
            },
            "name": setup_py_attributes.get("name"),
            "tag": {
                "prefix": tag_name,
                "name": tag_name,
                "display_name": tag_name,
                "uuid": uuid
            },
            "uuid": uuid,
            "version": setup_py_attributes.get("version"),
            "current_installation": {
                "executables": [{
                    "name": setup_py_attributes.get("name"),
                    "image": image_name,
                    "config_string": app_configs[0],
                    "permission_handles": apikey_permissions,
                    "uuid": uuid
                }]
            }
        }

        # Write the executable.json file
        sdk_helpers.write_file(
            path_extension_json,
            json.dumps(the_extension_json_file_contents, sort_keys=True))

        # Write the customize ImportDefinition to the app*.zip export.res file
        sdk_helpers.write_file(path_export_res,
                               json.dumps(import_definition, sort_keys=True))

        # Copy the built distribution to the build dir, enforce rename to .tar.gz
        shutil.copy(
            path_built_distribution,
            os.path.join(path_build, "{0}.tar.gz".format(extension_name)))

        # create The Extension Zip by zipping the build directory
        extension_zip_base_path = os.path.join(
            output_dir, "{0}{1}".format(PREFIX_EXTENSION_ZIP, extension_name))
        extension_zip_name = shutil.make_archive(
            base_name=extension_zip_base_path,
            format="zip",
            root_dir=path_build)
        path_the_extension_zip = os.path.join(extension_zip_base_path,
                                              extension_zip_name)

    except SDKException as err:
        raise err

    except Exception as err:
        raise SDKException(err)

    finally:
        # Remove the build dir. Keep it if user passes --keep-build-dir
        if not keep_build_dir:
            shutil.rmtree(path_build)

    LOG.info("App %s.zip created", "{0}{1}".format(PREFIX_EXTENSION_ZIP,
                                                   extension_name))

    # Return the path to the extension zip
    return path_the_extension_zip
def create_extension(path_setup_py_file, path_apikey_permissions_file,
                     output_dir, path_built_distribution=None, path_extension_logo=None, path_company_logo=None, path_payload_samples=None, path_validate_report=None,
                     custom_display_name=None, repository_name=None, image_hash=None, keep_build_dir=False):
    """
    Function that creates The App.zip file from the given setup.py, customize and config files
    and copies it to the output_dir. Returns the path to the app.zip

    :param path_setup_py_file: abs path to the setup.py file
    :type path_setup_py_file: str
    :param path_apikey_permissions_file: abs path to the apikey_permissions.txt file
    :type path_apikey_permissions_file: str
    :param output_dir: abs path to the directory the App.zip should be produced
    :type output_dir: str
    :param path_built_distribution: abs path to a tar.gz Built Distribution
        - if provided uses that .tar.gz
        - else looks for it in the output_dir. E.g: output_dir/package_name.tar.gz
    :type path_built_distribution: str
    :param path_extension_logo: abs path to the app_logo.png. Has to be 200x72 and a .png file
        - if not provided uses default icon
    :type path_extension_logo: str
    :param path_company_logo: abs path to the company_logo.png. Has to be 100x100 and a .png file
        - if not provided uses default icon
    :type path_company_logo: str
    :param path_payload_samples: abs path to directory containing the files with a JSON schema and example output of the functions
    :type path_payload_samples: str
    :param path_validate_report: abs path to directory containing the validation report - to be copied to the build directory that will be zipped
    :type path_validate_report: str
    :param custom_display_name: will give the App that display name. Default: name from setup.py file
    :type custom_display_name: str
    :param repository_name: will over-ride the container repository name for the App. Default: 'ibmresilient'
    :type repository_name: str
    :param image_hash: if defined will append the hash to the image_name in the app.json file e.g. <repository_name>/<package_name>@sha256:<image_hash>. Default: <repository_name>/<package_name>:<version>
    :type image_hash: str
    :param keep_build_dir: if True, build/ will not be removed. Default: False
    :type keep_build_dir: bool
    :return: Path to new app.zip
    :rtype: str
    """

    LOG.info("Creating App")
    # Variables to hold path of files for customize and config as defined in setup.py.
    # Set initially default to 'None', actual paths will be calculated later.
    path_customize_py_file = None
    path_config_py_file = None

    # Ensure the output_dir exists, we have WRITE access and ensure we can READ setup.py and apikey_permissions.txt
    # files.
    sdk_helpers.validate_dir_paths(os.W_OK, output_dir)
    sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file, path_apikey_permissions_file)

    # Parse the setup.py file
    setup_py_attributes = parse_setup_py(path_setup_py_file, SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

    # Validate setup.py attributes

    # Validate the name attribute. Raise exception if invalid
    if not sdk_helpers.is_valid_package_name(setup_py_attributes.get("name")):
        raise SDKException("'{0}' is not a valid App name. The name attribute must be defined and can only include 'a-z and _'.\nUpdate this value in the setup.py file located at: {1}".format(setup_py_attributes.get("name"), path_setup_py_file))

    # Validate the version attribute. Raise exception if invalid
    if not sdk_helpers.is_valid_version_syntax(setup_py_attributes.get("version")):
        raise SDKException("'{0}' is not a valid App version syntax. The version attribute must be defined. Example: version=\"1.0.0\".\nUpdate this value in the setup.py file located at: {1}".format(setup_py_attributes.get("version"), path_setup_py_file))

    # Validate the url supplied in the setup.py file, set to an empty string if not valid
    if not sdk_helpers.is_valid_url(setup_py_attributes.get("url")):
        LOG.warning("WARNING: '%s' is not a valid url. Ignoring.", setup_py_attributes.get("url"))
        setup_py_attributes["url"] = ""

    # Get the tag name
    tag_name = setup_py_attributes.get("name")

    # Get the customize file location.
    path_customize_py_file = get_configuration_py_file_path("customize", setup_py_attributes)

    # Get the config file location.
    path_config_py_file = get_configuration_py_file_path("config", setup_py_attributes)

    # Get ImportDefinition from the discovered customize file.
    if path_customize_py_file:
        import_definition = get_import_definition_from_customize_py(path_customize_py_file)
    else:
        # No 'customize.py' file found generate import definition with just mimimum server version.
        import_definition = {
            'server_version':
                IMPORT_MIN_SERVER_VERSION
        }

    # Add the tag to the import defintion
    import_definition = add_tag_to_import_definition(tag_name, SUPPORTED_RES_OBJ_NAMES, import_definition)

    # Parse the app.configs from the discovered config file
    if path_config_py_file:
        app_configs = get_configs_from_config_py(path_config_py_file)
    else:
        # No config file file found generate an empty definition.
        app_configs = ("", [])

    # Parse the api key permissions from the apikey_permissions.txt file
    apikey_permissions = get_apikey_permissions(path_apikey_permissions_file)

    # Generate the name for the extension
    extension_name = "{0}-{1}".format(setup_py_attributes.get("name"), setup_py_attributes.get("version"))

    # Generate the uuid
    uuid = sdk_helpers.generate_uuid_from_string(setup_py_attributes.get("name"))

    # Set the container repository name to default if value not passed in as argument.
    if not repository_name:
        repository_name = REPOSITORY_NAME

    # Generate paths to the directories and files we will use in the build directory
    path_build = os.path.join(output_dir, BASE_NAME_BUILD)
    path_extension_json = os.path.join(path_build, BASE_NAME_EXTENSION_JSON)
    path_export_res = os.path.join(path_build, BASE_NAME_EXPORT_RES)

    try:
        # If there is an old build directory, remove it first
        if os.path.exists(path_build):
            shutil.rmtree(path_build)

        # Create the directories for the path "/build/"
        os.makedirs(path_build)

        # If no path_built_distribution is given, use the default: "<output_dir>/<package-name>.tar.gz"
        if not path_built_distribution:
            path_built_distribution = os.path.join(output_dir, "{0}.tar.gz".format(extension_name))

        # Validate the built distribution exists and we have READ access
        sdk_helpers.validate_file_paths(os.R_OK, path_built_distribution)

        # Copy the built distribution to the build dir and enforce rename to .tar.gz
        shutil.copy(path_built_distribution, os.path.join(path_build, "{0}.tar.gz".format(extension_name)))

        # Get the extension_logo (icon) and company_logo (author.icon) as base64 encoded strings
        extension_logo = get_icon(
            icon_name=os.path.basename(PATH_DEFAULT_ICON_EXTENSION_LOGO),
            path_to_icon=path_extension_logo,
            width_accepted=constants.ICON_APP_LOGO_REQUIRED_WIDTH,
            height_accepted=constants.ICON_APP_LOGO_REQUIRED_HEIGHT,
            default_path_to_icon=PATH_DEFAULT_ICON_EXTENSION_LOGO)

        company_logo = get_icon(
            icon_name=os.path.basename(PATH_DEFAULT_ICON_COMPANY_LOGO),
            path_to_icon=path_company_logo,
            width_accepted=constants.ICON_COMPANY_LOGO_REQUIRED_WIDTH,
            height_accepted=constants.ICON_COMPANY_LOGO_REQUIRED_HEIGHT,
            default_path_to_icon=PATH_DEFAULT_ICON_COMPANY_LOGO)

        # Get the display name
        # Use --display-name if passed
        # If not use 'display_name' attribute in setup.py
        # If not set use the 'name' attribute in setup.py
        display_name = custom_display_name or setup_py_attributes.get("display_name") or setup_py_attributes.get("name")

        # Image string is all lowercase on quay.io
        image_name = "{0}/{1}:{2}".format(repository_name, setup_py_attributes.get("name"), setup_py_attributes.get("version"))

        if image_hash:

            if not sdk_helpers.is_valid_hash(image_hash):
                raise SDKException(u"image_hash '{0}' is not a valid SHA256 hash\nIt must be a valid hexadecimal and 64 characters long".format(image_hash))

            # If image_hash is defined append to image name e.g. <repository_name>/<package_name>@sha256:<image_hash>
            image_name = "{0}/{1}@sha256:{2}".format(repository_name, setup_py_attributes.get("name"), image_hash)

        image_name = image_name.lower()

        LOG.debug("image_name generated: %s", image_name)

        # Generate the contents for the extension.json file
        the_extension_json_file_contents = {
            "author": {
                "name": setup_py_attributes.get("author"),
                "website": setup_py_attributes.get("url"),
                "icon": {
                    "data": company_logo,
                    "media_type": "image/png"
                }
            },
            "description": {
                "content": setup_py_attributes.get("description"),
                "format": "text"
            },
            "display_name": display_name,
            "icon": {
                "data": extension_logo,
                "media_type": "image/png"
            },
            "long_description": {
                "content": u"<div>{0}</div>".format(setup_py_attributes.get("long_description")),
                "format": "html"
            },
            "minimum_resilient_version": {
                "major": import_definition.get("server_version").get("major", None),
                "minor": import_definition.get("server_version").get("minor", None),
                "build_number": import_definition.get("server_version").get("build_number", None),
                "version": import_definition.get("server_version").get("version", None)
            },
            "name": setup_py_attributes.get("name"),
            "tag": {
                "prefix": tag_name,
                "name": tag_name,
                "display_name": tag_name,
                "uuid": uuid
            },
            "uuid": uuid,
            "version": setup_py_attributes.get("version"),
            "current_installation": {
                "executables": [
                    {
                        "name": setup_py_attributes.get("name"),
                        "image": image_name,
                        "config_string": app_configs[0],
                        "permission_handles": apikey_permissions,
                        "uuid": uuid
                    }
                ]
            }
        }

        # Write the executable.json file
        sdk_helpers.write_file(path_extension_json, json.dumps(the_extension_json_file_contents, sort_keys=True))

        # Gather payload_samples file for each function and add to export.res file if exists
        if not path_payload_samples:
            LOG.warning("WARNING: No path for 'payload_samples' provided. Skipping adding them to the export.res file")

        else:
            for fn in import_definition.get("functions"):

                # Get paths to payload_samples
                fn_name = fn.get(ResilientObjMap.FUNCTIONS)
                path_payload_samples_fn = os.path.join(path_payload_samples, fn_name)
                path_payload_samples_schema = os.path.join(path_payload_samples_fn, BASE_NAME_PAYLOAD_SAMPLES_SCHEMA)
                path_payload_samples_example = os.path.join(path_payload_samples_fn, BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)

                try:
                    # Validate payload_files, add custom error message if we can't
                    sdk_helpers.validate_file_paths(os.R_OK, path_payload_samples_schema, path_payload_samples_example)
                except SDKException as err:
                    err.message += ("\nWARNING: could not access JSON file to add payload_samples. Continuing to create package.\n"
                                    "Add '--no-samples' flag to avoid looking for them and avoid this warning message.\n")
                    LOG.warning(err.message)
                    continue

                # Read in schema payload and add to function import definition
                payload_samples_schema_contents_dict = sdk_helpers.read_json_file(path_payload_samples_schema)
                LOG.debug("Adding JSON output schema to '%s' from file: %s", fn_name, path_payload_samples_schema)
                json_schema_key = os.path.splitext(BASE_NAME_PAYLOAD_SAMPLES_SCHEMA)[0]
                fn[json_schema_key] = json.dumps(payload_samples_schema_contents_dict)

                # Read in example payload and add to function import definition
                payload_samples_example_contents_dict = sdk_helpers.read_json_file(path_payload_samples_example)
                LOG.debug("Adding JSON output example to '%s' from file: %s", fn_name, path_payload_samples_example)
                json_example_key = os.path.splitext(BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)[0]
                fn[json_example_key] = json.dumps(payload_samples_example_contents_dict)

        if path_validate_report:
            path_zipped_validate_report = os.path.join(path_build, os.path.basename(path_validate_report))
            shutil.copy(path_validate_report, path_zipped_validate_report)
        else:
            LOG.warn("WARNING: If a validation report is not included with your submission, it will get rejected. Run this command with the '--validate' flag to include validations.")

        # Write the customize ImportDefinition to the app*.zip export.res file
        sdk_helpers.write_file(path_export_res, json.dumps(import_definition, sort_keys=True))

        # Copy the built distribution to the build dir, enforce rename to .tar.gz
        shutil.copy(path_built_distribution, os.path.join(path_build, "{0}.tar.gz".format(extension_name)))

        # Create the app.zip (Extension Zip) by zipping the build directory
        extension_zip_base_path = os.path.join(output_dir, "{0}{1}".format(PREFIX_EXTENSION_ZIP, extension_name))
        extension_zip_name = shutil.make_archive(base_name=extension_zip_base_path, format="zip", root_dir=path_build)
        path_the_extension_zip = os.path.join(extension_zip_base_path, extension_zip_name)

    except SDKException as err:
        raise err

    except Exception as err:
        raise SDKException(err)

    finally:
        # Remove the build dir. Keep it if user passes --keep-build-dir
        if not keep_build_dir:
            shutil.rmtree(path_build)

    LOG.info("App %s.zip created", "{0}{1}".format(PREFIX_EXTENSION_ZIP, extension_name))

    # Return the path to the extension zip
    return path_the_extension_zip
예제 #10
0
    def execute_command(self, args):
        LOG.debug("docgen called with %s", args)

        # Set docgen name for SDKException
        SDKException.command_ran = self.CMD_NAME

        # Get absolute path_to_src
        path_to_src = os.path.abspath(args.p)

        LOG.debug("Path to project: %s", path_to_src)

        # Instansiate Jinja2 Environment with path to Jinja2 templates
        jinja_env = sdk_helpers.setup_jinja_env("data/docgen/templates")

        # Load the Jinja2 Templates
        readme_template = jinja_env.get_template(README_TEMPLATE_NAME)

        # Generate path to setup.py file
        path_setup_py_file = os.path.join(path_to_src,
                                          package_helpers.BASE_NAME_SETUP_PY)

        try:
            # Ensure we have read permissions for setup.py
            sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file)
        except SDKException as err:
            err.message += "\nEnsure you are in the directory of the package you want to run docgen for"
            raise err

        # Parse the setup.py file
        setup_py_attributes = package_helpers.parse_setup_py(
            path_setup_py_file,
            package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

        package_name = setup_py_attributes.get("name", "")

        # Generate paths to other required directories + files
        path_customize_py_file = os.path.join(
            path_to_src, package_name, package_helpers.PATH_CUSTOMIZE_PY)
        path_config_py_file = os.path.join(path_to_src, package_name,
                                           package_helpers.PATH_CONFIG_PY)
        path_readme = os.path.join(path_to_src,
                                   package_helpers.BASE_NAME_README)
        path_screenshots_dir = os.path.join(path_to_src,
                                            package_helpers.PATH_SCREENSHOTS)
        path_payload_samples_dir = os.path.join(
            path_to_src, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR)

        # Ensure we have read permissions for each required file and the file exists
        sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file,
                                        path_customize_py_file,
                                        path_config_py_file)

        # Check doc/screenshots directory exists, if not, create it + copy default screenshot
        if not os.path.isdir(path_screenshots_dir):
            os.makedirs(path_screenshots_dir)
            shutil.copy(package_helpers.PATH_DEFAULT_SCREENSHOT,
                        path_screenshots_dir)

        # Get the resilient_circuits dependency string from setup.py file
        res_circuits_dep_str = package_helpers.get_dependency_from_install_requires(
            setup_py_attributes.get("install_requires"), "resilient_circuits")

        if not res_circuits_dep_str:
            res_circuits_dep_str = package_helpers.get_dependency_from_install_requires(
                setup_py_attributes.get("install_requires"),
                "resilient-circuits")

        # Get ImportDefinition from customize.py
        customize_py_import_def = package_helpers.get_import_definition_from_customize_py(
            path_customize_py_file)

        # Parse the app.configs from the config.py file
        jinja_app_configs = package_helpers.get_configs_from_config_py(
            path_config_py_file)

        # Get field names from ImportDefinition
        field_names = []
        for f in customize_py_import_def.get("fields", []):
            f_export_key = f.get("export_key")

            if "incident/" in f_export_key and f_export_key not in IGNORED_INCIDENT_FIELDS:
                field_names.append(f.get(ResilientObjMap.FIELDS, ""))

        # Get data from ImportDefinition
        import_def_data = sdk_helpers.get_from_export(
            customize_py_import_def,
            message_destinations=sdk_helpers.get_object_api_names(
                ResilientObjMap.MESSAGE_DESTINATIONS,
                customize_py_import_def.get("message_destinations")),
            functions=sdk_helpers.get_object_api_names(
                ResilientObjMap.FUNCTIONS,
                customize_py_import_def.get("functions")),
            workflows=sdk_helpers.get_object_api_names(
                ResilientObjMap.WORKFLOWS,
                customize_py_import_def.get("workflows")),
            rules=sdk_helpers.get_object_api_names(
                ResilientObjMap.RULES, customize_py_import_def.get("actions")),
            fields=field_names,
            artifact_types=sdk_helpers.get_object_api_names(
                ResilientObjMap.INCIDENT_ARTIFACT_TYPES,
                customize_py_import_def.get("incident_artifact_types")),
            datatables=sdk_helpers.get_object_api_names(
                ResilientObjMap.DATATABLES,
                customize_py_import_def.get("types")),
            tasks=sdk_helpers.get_object_api_names(
                ResilientObjMap.TASKS,
                customize_py_import_def.get("automatic_tasks")),
            scripts=sdk_helpers.get_object_api_names(
                ResilientObjMap.SCRIPTS,
                customize_py_import_def.get("scripts")),
            playbooks=sdk_helpers.get_object_api_names(
                ResilientObjMap.PLAYBOOKS,
                customize_py_import_def.get("playbooks", [])))

        # Lists we use in Jinja Templates
        jinja_functions = self._get_function_details(
            import_def_data.get("functions", []),
            import_def_data.get("workflows", []))
        jinja_scripts = self._get_script_details(
            import_def_data.get("scripts", []))
        jinja_rules = self._get_rule_details(import_def_data.get("rules", []))
        jinja_datatables = self._get_datatable_details(
            import_def_data.get("datatables", []))
        jinja_custom_fields = self._get_custom_fields_details(
            import_def_data.get("fields", []))
        jinja_custom_artifact_types = self._get_custom_artifact_details(
            import_def_data.get("artifact_types", []))
        jinja_playbooks = self._get_playbook_details(
            import_def_data.get("playbooks", []))

        # Other variables for Jinja Templates
        package_name_dash = package_name.replace("_", "-")
        server_version = customize_py_import_def.get("server_version", {})
        supported_app = sdk_helpers.does_url_contain(
            setup_py_attributes.get("url", ""), "ibm.com/mysupport")

        # See if a payload_samples dir exists and use the contents for function results
        try:
            sdk_helpers.validate_dir_paths(os.R_OK, path_payload_samples_dir)

            for f in jinja_functions:
                fn_name = f.get("x_api_name")
                path_payload_samples_fn_name = os.path.join(
                    path_payload_samples_dir, fn_name)
                path_output_json_example = os.path.join(
                    path_payload_samples_fn_name,
                    package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE)

                try:
                    sdk_helpers.validate_file_paths(os.R_OK,
                                                    path_output_json_example)
                    f["results"] = sdk_helpers.read_json_file(
                        path_output_json_example)
                except SDKException as e:
                    sdk_helpers.handle_file_not_found_error(
                        e,
                        u"Error getting results. No '{0}' file found for '{1}'."
                        .format(
                            package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE,
                            fn_name))

        except SDKException as e:
            sdk_helpers.handle_file_not_found_error(
                e, u"Error getting results. No '{0}' directory found.".format(
                    package_helpers.BASE_NAME_PAYLOAD_SAMPLES_EXAMPLE))

        LOG.info("Rendering README for %s", package_name_dash)

        # Render the README Jinja2 Templeate with parameters
        rendered_readme = readme_template.render({
            "name_underscore":
            package_name,
            "name_dash":
            package_name_dash,
            "display_name":
            setup_py_attributes.get("display_name", package_name),
            "short_description":
            setup_py_attributes.get("description"),
            "long_description":
            setup_py_attributes.get("long_description"),
            "version":
            setup_py_attributes.get("version"),
            "server_version":
            server_version.get("version"),
            "all_dependencies":
            setup_py_attributes.get("install_requires", []),
            "res_circuits_dependency_str":
            res_circuits_dep_str,
            "author":
            setup_py_attributes.get("author"),
            "support_url":
            setup_py_attributes.get("url"),
            "supported_app":
            supported_app,
            "app_configs":
            jinja_app_configs[1],
            "functions":
            jinja_functions,
            "scripts":
            jinja_scripts,
            "rules":
            jinja_rules,
            "datatables":
            jinja_datatables,
            "custom_fields":
            jinja_custom_fields,
            "custom_artifact_types":
            jinja_custom_artifact_types,
            "playbooks":
            jinja_playbooks,
            "placeholder_string":
            constants.DOCGEN_PLACEHOLDER_STRING
        })

        # Create a backup if needed of README
        sdk_helpers.rename_to_bak_file(path_readme,
                                       package_helpers.PATH_DEFAULT_README)

        LOG.info("Writing README to: %s", path_readme)

        # Write the new README
        sdk_helpers.write_file(path_readme, rendered_readme)
    def execute_command(self, args):
        """
        Function that creates The App.zip file from the give source path and returns
        the path to the new App.zip

        :param args: Arguments from command line:

            -  **args.package**: path to directory that must include a setup.py, customize.py and config.py file.
            -  **args.cmd**: `package` in this case
            -  **args.display_name**: will give the App that display name. Default: name from setup.py file
            -  **args.repository_name**: if defined, it will replace the default image repository name in app.json for
                                         container access.
            -  **args.keep_build_dir**: if defined, dist/build/ will not be removed.
            -  **args.no_samples**: if defined, set path_payload_samples to None.
            -  **args.validate**: if defined, run ``validate`` and save report in packaged app.
        :type args: argparse Namespace

        :return: Path to new app.zip
        :rtype: str
        """
        # Set name for SDKException
        SDKException.command_ran = self.CMD_NAME

        # Get absolute path_to_src
        path_to_src = os.path.abspath(args.package)

        LOG.debug("\nPath to project: %s", path_to_src)

        # Ensure the src directory exists and we have WRITE access
        sdk_helpers.validate_dir_paths(os.W_OK, path_to_src)

        # Generate path to setup.py file
        path_setup_py_file = os.path.join(path_to_src,
                                          package_helpers.BASE_NAME_SETUP_PY)

        # Ensure we have read permissions for setup.py
        sdk_helpers.validate_file_paths(os.R_OK, path_setup_py_file)

        # Parse the setup.py file
        setup_py_attributes = package_helpers.parse_setup_py(
            path_setup_py_file,
            package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES)

        LOG.debug("\nProject name: %s",
                  setup_py_attributes.get("name", "unknown"))

        # Generate paths to files required to create app
        path_docker_file = os.path.join(path_to_src,
                                        package_helpers.BASE_NAME_DOCKER_FILE)
        path_entry_point = os.path.join(path_to_src,
                                        package_helpers.BASE_NAME_ENTRY_POINT)
        path_apikey_permissions_file = os.path.join(
            path_to_src, package_helpers.BASE_NAME_APIKEY_PERMS_FILE)
        path_output_dir = os.path.join(path_to_src,
                                       package_helpers.BASE_NAME_DIST_DIR)
        path_extension_logo = os.path.join(
            path_to_src, package_helpers.PATH_ICON_EXTENSION_LOGO)
        path_company_logo = os.path.join(
            path_to_src, package_helpers.PATH_ICON_COMPANY_LOGO)
        path_payload_samples = os.path.join(
            path_to_src, package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR)

        # if --no-samples flag, set path_payload_samples to None
        if args.no_samples:
            path_payload_samples = None

        # if --validate flag is set, run validate command
        # else set the path to the file if it exists or None if doesn't exist
        if args.validate:
            LOG.info(
                "Validation on {0} is starting. \nTo skip, run the 'package' command without the '--validate' flag.\nValidations can be executated separately by running: \n  'resilient-sdk validate -p {0}' \nto see more in-depth results.\n"
                .format(args.package))

            validate_args = self.cmd_validate.parser.parse_known_args()[0]

            path_validate_report = self.cmd_validate.execute_command(
                validate_args, output_suppressed=True, run_from_package=True)
        else:
            path_validate_report = package_helpers.check_validate_report_exists(
            )

        # Ensure the 'Dockerfile' and 'entrypoint.sh' files exist and we have READ access
        sdk_helpers.validate_file_paths(os.R_OK, path_docker_file,
                                        path_entry_point)

        LOG.info("\nBuild Distribution starting\n")

        # Create the build distribution
        use_setuptools.run_setup(setup_script=path_setup_py_file,
                                 args=["sdist", "--formats=gztar"])

        LOG.info("\nBuild Distribution finished. See: %s", path_output_dir)

        # Create the app
        path_the_extension_zip = package_helpers.create_extension(
            path_setup_py_file=path_setup_py_file,
            path_apikey_permissions_file=path_apikey_permissions_file,
            output_dir=path_output_dir,
            custom_display_name=args.display_name,
            repository_name=args.repository_name,
            keep_build_dir=args.keep_build_dir,
            path_extension_logo=path_extension_logo,
            path_company_logo=path_company_logo,
            path_payload_samples=path_payload_samples,
            path_validate_report=path_validate_report,
            image_hash=args.image_hash)

        LOG.info("App created at: %s", path_the_extension_zip)

        return path_the_extension_zip