Example #1
0
def fx_mock_res_client():
    """
    Before: Creates a mock instance of res_client
    After: Removes temp directory used to store temp app.config
    """
    _mk_temp_dir()
    app_config = _mk_app_config()

    yield sdk_helpers.get_resilient_client(path_config_file=app_config)
    _rm_temp_dir()
def test_installation(fx_copy_fn_main_mock_integration, fx_get_sub_parser, fx_cmd_line_args_package):
    mock_integration_name = fx_copy_fn_main_mock_integration[0]
    path_fn_main_mock_integration = fx_copy_fn_main_mock_integration[1]

    # Replace cmd line arg "fn_main_mock_integration" with path to temp dir location
    sys.argv[sys.argv.index(mock_integration_name)] = path_fn_main_mock_integration

    # Package the app
    cmd_package = CmdPackage(fx_get_sub_parser)
    args = cmd_package.parser.parse_known_args()[0]
    path_the_app_zip = cmd_package.execute_command(args)

    # Connect to test resilient
    res_client = sdk_helpers.get_resilient_client()

    # Test uploading + installing the .zip
    upload_res = test_helpers.upload_app_zip(res_client, path_the_app_zip)
    assert upload_res.status_code == 200

    install_res = test_helpers.install_app_zip(res_client, upload_res.json())
    assert install_res.get("status") == "installed"
    def _gen_package(args, setup_py_attributes={}):

        LOG.info("Generating codegen package...")

        if not sdk_helpers.is_valid_package_name(args.package):
            raise SDKException(u"'{0}' is not a valid package name".format(args.package))

        # Strip off version information, if present in package base folder, to get the package name.
        package_name = re.split(VERSION_REGEX, args.package, 1)[0]
        # Get base version if we are running against a package base folder with version.
        base_version = ''.join(re.split(package_name, args.package))

        # Get output_base, use args.output if defined, else current directory
        output_base = args.output if args.output else os.curdir
        output_base = os.path.abspath(output_base)

        # If --exportfile is specified, read org_export from that file
        if args.exportfile:
            LOG.info("Using local export file: %s", args.exportfile)
            org_export = sdk_helpers.read_local_exportfile(args.exportfile)

        else:
            # Instantiate connection to the Resilient Appliance
            res_client = sdk_helpers.get_resilient_client()

            # Generate + get latest export from Resilient Server
            org_export = sdk_helpers.get_latest_org_export(res_client)

        # Get data required for Jinja2 templates from export
        jinja_data = sdk_helpers.get_from_export(org_export,
                                                 message_destinations=args.messagedestination,
                                                 functions=args.function,
                                                 workflows=args.workflow,
                                                 rules=args.rule,
                                                 fields=args.field,
                                                 artifact_types=args.artifacttype,
                                                 datatables=args.datatable,
                                                 tasks=args.task,
                                                 scripts=args.script)

        # Get 'minified' version of the export. This is used in customize.py
        jinja_data["export_data"] = sdk_helpers.minify_export(org_export,
                                                              message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")),
                                                              functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")),
                                                              workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")),
                                                              rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")),
                                                              fields=jinja_data.get("all_fields"),
                                                              artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")),
                                                              datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")),
                                                              tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")),
                                                              phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")),
                                                              scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts")))

        # Add package_name to jinja_data
        jinja_data["package_name"] = package_name

        # Add version
        jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION)

        # Validate we have write permissions
        sdk_helpers.validate_dir_paths(os.W_OK, output_base)

        # Join package_name to output base (add base version if running against a folder which includes a version).
        output_base = os.path.join(output_base, package_name+base_version)

        # If the output_base directory does not exist, create it
        if not os.path.exists(output_base):
            os.makedirs(output_base)

        # Instansiate Jinja2 Environment with path to Jinja2 templates
        jinja_env = sdk_helpers.setup_jinja_env("data/codegen/templates/package_template")

        # This dict maps our package file structure to  Jinja2 templates
        package_mapping_dict = {
            "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data),
            "README.md": ("README.md.jinja2", jinja_data),
            "setup.py": ("setup.py.jinja2", jinja_data),
            "tox.ini": ("tox.ini.jinja2", jinja_data),
            "Dockerfile": ("Dockerfile.jinja2", jinja_data),
            "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data),
            "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data),
            "data": {},
            "icons": {
                "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO,
                "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO,
            },
            "doc": {
                "screenshots": {
                    "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT
                }
            },
            package_name: {
                "__init__.py": ("package/__init__.py.jinja2", jinja_data),
                "LICENSE": ("package/LICENSE.jinja2", jinja_data),

                "components": {
                    "__init__.py": ("package/components/__init__.py.jinja2", jinja_data),
                },
                "util": {
                    "data": {
                        "export.res": ("package/util/data/export.res.jinja2", jinja_data)
                    },
                    "__init__.py": ("package/util/__init__.py.jinja2", jinja_data),
                    "config.py": ("package/util/config.py.jinja2", jinja_data),
                    "customize.py": ("package/util/customize.py.jinja2", jinja_data),
                    "selftest.py": ("package/util/selftest.py.jinja2", jinja_data),
                }
            }
        }

        # If there are Functions, add a 'tests' directory
        if jinja_data.get("functions"):
            package_mapping_dict["tests"] = {}

        # Loop each Function
        for f in jinja_data.get("functions"):
            # Add package_name to function data
            f["package_name"] = package_name

            # Generate function_component.py file name
            file_name = u"funct_{0}.py".format(f.get("export_key"))

            # Add to 'components' directory
            package_mapping_dict[package_name]["components"][file_name] = ("package/components/function.py.jinja2", f)

            # Add to 'tests' directory
            package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f)

        for w in jinja_data.get("workflows"):

            # Generate wf_xx.md file name
            file_name = u"wf_{0}.md".format(w.get(ResilientObjMap.WORKFLOWS))

            # Add workflow to data directory
            package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w)

        newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping(
            jinja_mapping_dict=package_mapping_dict,
            jinja_env=jinja_env,
            target_dir=output_base,
            package_dir=output_base)

        # Log new and skipped files
        if newly_generated_files:
            LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files))

        if skipped_files:
            LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files))

        LOG.info("'codegen' complete for '%s'", package_name)
Example #4
0
    def _gen_package(args, setup_py_attributes={}):

        LOG.info("Generating codegen package...")

        sdk_helpers.is_python_min_supported_version()

        if os.path.exists(args.package) and not args.reload:
            raise SDKException(u"'{0}' already exists. Add --reload flag to regenerate it".format(args.package))

        if not sdk_helpers.is_valid_package_name(args.package):
            raise SDKException(u"'{0}' is not a valid package name".format(args.package))

        # The package_name will be specified in the args
        package_name = args.package

        # Get output_base, use args.output if defined, else current directory
        output_base = args.output if args.output else os.curdir
        output_base = os.path.abspath(output_base)

        # If --exportfile is specified, read org_export from that file
        if args.exportfile:
            LOG.info("Using local export file: %s", args.exportfile)
            org_export = sdk_helpers.read_local_exportfile(args.exportfile)

        else:
            # Instantiate connection to the Resilient Appliance
            res_client = sdk_helpers.get_resilient_client(path_config_file=args.config)

            # Generate + get latest export from Resilient Server
            org_export = sdk_helpers.get_latest_org_export(res_client)

        # Get data required for Jinja2 templates from export
        jinja_data = sdk_helpers.get_from_export(org_export,
                                                 message_destinations=args.messagedestination,
                                                 functions=args.function,
                                                 workflows=args.workflow,
                                                 rules=args.rule,
                                                 fields=args.field,
                                                 artifact_types=args.artifacttype,
                                                 datatables=args.datatable,
                                                 tasks=args.task,
                                                 scripts=args.script,
                                                 incident_types=args.incidenttype,
                                                 playbooks=args.playbook)

        # Get 'minified' version of the export. This is used in customize.py
        jinja_data["export_data"] = sdk_helpers.minify_export(org_export,
                                                              message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")),
                                                              functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")),
                                                              workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")),
                                                              rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")),
                                                              fields=jinja_data.get("all_fields"),
                                                              artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")),
                                                              datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")),
                                                              tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")),
                                                              phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")),
                                                              scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts")),
                                                              incident_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_TYPES, jinja_data.get("incident_types")),
                                                              playbooks=sdk_helpers.get_object_api_names(ResilientObjMap.PLAYBOOKS, jinja_data.get("playbooks")))

        # Add package_name to jinja_data
        jinja_data["package_name"] = package_name

        # Add version
        jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION)

        jinja_data["resilient_libraries_version"] = sdk_helpers.get_resilient_libraries_version_to_use()

        # Validate we have write permissions
        sdk_helpers.validate_dir_paths(os.W_OK, output_base)

        if not args.reload:
            # If this is not a reload, join package_name to output base
            output_base = os.path.join(output_base, package_name)

        # If the output_base directory does not exist, create it
        if not os.path.exists(output_base):
            os.makedirs(output_base)

        # Instansiate Jinja2 Environment with path to Jinja2 templates
        jinja_env = sdk_helpers.setup_jinja_env(constants.PACKAGE_TEMPLATE_PATH)

        # This dict maps our package file structure to  Jinja2 templates
        package_mapping_dict = {
            "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data),
            "README.md": ("README.md.jinja2", jinja_data),
            "setup.py": ("setup.py.jinja2", jinja_data),
            "tox.ini": ("tox.ini.jinja2", jinja_data),
            "Dockerfile": ("Dockerfile.jinja2", jinja_data),
            "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data),
            "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data),
            "data": {},
            "icons": {
                "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO,
                "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO,
            },
            "doc": {
                "screenshots": {
                    "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT
                }
            },
            package_name: {
                "__init__.py": ("package/__init__.py.jinja2", jinja_data),
                "LICENSE": ("package/LICENSE.jinja2", jinja_data),

                "components": {
                    "__init__.py": ("package/components/__init__.py.jinja2", jinja_data),
                },
                "util": {
                    "data": {
                        "export.res": ("package/util/data/export.res.jinja2", jinja_data)
                    },
                    "__init__.py": ("package/util/__init__.py.jinja2", jinja_data),
                    "config.py": ("package/util/config.py.jinja2", jinja_data),
                    "customize.py": ("package/util/customize.py.jinja2", jinja_data),
                    "selftest.py": ("package/util/selftest.py.jinja2", jinja_data),
                }
            }
        }

        # If there are Functions, add a 'tests' and a 'payload_samples' directory (if in dev mode)
        if jinja_data.get("functions"):
            package_mapping_dict["tests"] = {}
            package_mapping_dict[package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR] = {}

        # Get a list of function names in export.
        fn_names = [f.get(ResilientObjMap.FUNCTIONS) for f in jinja_data.get("functions")]

        # Loop each Function
        for f in jinja_data.get("functions"):
            # Add package_name to function data
            f["package_name"] = package_name

            # Get function name
            fn_name = f.get(ResilientObjMap.FUNCTIONS)

            # Generate funct_function_component.py file name
            # Don't add prefix if function name already begins with "func_" or "funct_".
            if re.search(r"^(func|funct)_", fn_name):
                file_name = u"{0}.py".format(fn_name)
            else:
                file_name = u"funct_{0}.py".format(fn_name)
                # Check if file_name without extension already exists in functions names list.
                if os.path.splitext(file_name)[0] in fn_names:
                    raise SDKException(u"File name '{0}' already in use please rename the function '{1}'."
                                       .format(file_name, fn_name))

            # Add an 'atomic function' to 'components' directory else add a 'normal function'
            package_mapping_dict[package_name]["components"][file_name] = ("package/components/atomic_function.py.jinja2", f)

            # Add to 'tests' directory
            package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f)

            # Add a 'payload_samples/fn_name' directory and the files to it
            CmdCodegen.add_payload_samples(package_mapping_dict, fn_name, f)

        # Get a list of workflow names in export.
        wf_names = [w.get(ResilientObjMap.WORKFLOWS) for w in jinja_data.get("workflows")]

        for w in jinja_data.get("workflows"):
            # Get workflow name
            wf_name = w.get(ResilientObjMap.WORKFLOWS)

            # Generate wf_xx.md file name
            # Don't add prefix if workflow name already begins with "wf_".
            if re.search(r"^wf_", wf_name):
                file_name = u"{0}.md".format(wf_name)
            else:
                file_name = u"wf_{0}.md".format(wf_name)
                # Check if file_name without extension already exists in workflow names list.
                if os.path.splitext(file_name)[0] in wf_names:
                    raise SDKException(u"File name '{0}' already in use please recreate the workflow '{1}'."
                                       .format(file_name, wf_name))

            # Add workflow to data directory
            package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w)

        newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping(
            jinja_mapping_dict=package_mapping_dict,
            jinja_env=jinja_env,
            target_dir=output_base,
            package_dir=output_base)

        # Log new and skipped files
        if newly_generated_files:
            LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files))

        if skipped_files:
            LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files))

        LOG.info("'codegen' complete for '%s'", package_name)

        return output_base
    def execute_command(self, args):
        LOG.info("Starting 'extract'...")
        LOG.debug("'extract' called with %s", args)

        # Set docgen name for SDKException
        SDKException.command_ran = self.CMD_NAME

        # Get output_base, use args.output if defined, else current directory
        output_base = args.output if args.output else os.curdir
        output_base = os.path.abspath(output_base)

        # If --exportfile is specified, read org_export from that file
        if args.exportfile:
            LOG.info("Using local export file: %s", args.exportfile)
            org_export = sdk_helpers.read_local_exportfile(args.exportfile)

        else:
            # Instantiate connection to the Resilient Appliance
            res_client = sdk_helpers.get_resilient_client(
                path_config_file=args.config)

            # Generate + get latest export from Resilient Server
            org_export = sdk_helpers.get_latest_org_export(res_client)

        LOG.info("Extracting data from export...")

        # Get extracted data from export
        extract_data = sdk_helpers.get_from_export(
            org_export,
            message_destinations=args.messagedestination,
            functions=args.function,
            workflows=args.workflow,
            rules=args.rule,
            fields=args.field,
            artifact_types=args.artifacttype,
            datatables=args.datatable,
            tasks=args.task,
            scripts=args.script,
            incident_types=args.incidenttype,
            playbooks=args.playbook)

        # Get 'minified' version of the export. This is used in to create export.res
        min_extract_data = sdk_helpers.minify_export(
            org_export,
            message_destinations=sdk_helpers.get_object_api_names(
                ResilientObjMap.MESSAGE_DESTINATIONS,
                extract_data.get("message_destinations")),
            functions=sdk_helpers.get_object_api_names(
                ResilientObjMap.FUNCTIONS, extract_data.get("functions")),
            workflows=sdk_helpers.get_object_api_names(
                ResilientObjMap.WORKFLOWS, extract_data.get("workflows")),
            rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES,
                                                   extract_data.get("rules")),
            fields=extract_data.get("all_fields"),
            artifact_types=sdk_helpers.get_object_api_names(
                ResilientObjMap.INCIDENT_ARTIFACT_TYPES,
                extract_data.get("artifact_types")),
            datatables=sdk_helpers.get_object_api_names(
                ResilientObjMap.DATATABLES, extract_data.get("datatables")),
            tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS,
                                                   extract_data.get("tasks")),
            phases=sdk_helpers.get_object_api_names(
                ResilientObjMap.PHASES, extract_data.get("phases")),
            scripts=sdk_helpers.get_object_api_names(
                ResilientObjMap.SCRIPTS, extract_data.get("scripts")),
            incident_types=sdk_helpers.get_object_api_names(
                ResilientObjMap.INCIDENT_TYPES,
                extract_data.get("incident_types")),
            playbooks=sdk_helpers.get_object_api_names(
                ResilientObjMap.PLAYBOOKS,
                extract_data.get(constants.CUST_PLAYBOOKS)))

        # Convert dict to JSON string
        if sys.version_info.major >= 3:
            res_data = json.dumps(min_extract_data, ensure_ascii=False)
        else:
            res_data = unicode(json.dumps(min_extract_data,
                                          ensure_ascii=False))

        # Generate path to file
        file_name = "export-{0}".format(
            sdk_helpers.get_timestamp(
                org_export.get("export_date", 0) / 1000.0))

        # If custom name supplied, prepend it
        if args.name:
            file_name = "{0}-{1}".format(args.name, file_name)

        path_file_to_write = os.path.join(output_base,
                                          "{0}.res".format(file_name))

        LOG.info("Generating %s.res", file_name)

        # Write the file
        sdk_helpers.write_file(path_file_to_write, res_data)

        LOG.debug('Wrote: %s', path_file_to_write)

        # If we should create .zip archive
        if args.zip:

            LOG.info("Generating %s.zip", file_name)

            # Get path to .zip
            path_dir_to_zip = os.path.join(output_base, file_name)

            # Create directory
            os.makedirs(path_dir_to_zip)

            # Copy the written export file into new dir
            shutil.copy(path_file_to_write, path_dir_to_zip)

            # zip the dir
            the_zip = shutil.make_archive(base_name=file_name,
                                          format="zip",
                                          root_dir=path_dir_to_zip)

            if output_base != os.path.dirname(the_zip):
                # Move the zip into the output base
                shutil.move(the_zip, output_base)

            LOG.debug('Wrote: %s.zip', path_dir_to_zip)

            # Remove directory
            shutil.rmtree(path_dir_to_zip)

        LOG.info("'extract' complete")
    def execute_command(self, args):
        """
        When the clone command is executed, we want to perform these actions:
        1: Setup a client to Resilient and get the latest export file.
        2: Cloning a single object per type: For each specified action type:
            2.1: Ensure the user provided both the source and new action name
            2.2: Check that the provided source action type exists and the new action name is unique.
            2.3: Prepare a new Object from the source action object replacing the names as needed.
        3: Cloning multiple objects with a prefix: For each specified action type:
            3.1: Ensure the user provided source action objects which exist
            3.2: Prepend the prefix to the unique identifiers for each object
        3: Prepare our configuration import object for upload with the newly cloned action objects
        4: Submit a configuration import through the API
        5: Confirm the change has been accepted

        :param args: The command line args passed with the clone command of Resilient Objects to be cloned
        :type args: argparse.ArgumentParser.Namespace
        :raises SDKException: An SDKException detailing what failed in the operation
        """
        SDKException.command_ran = "clone"
        LOG.debug("Called clone with %s", args)
        start = datetime.now()

        # Instansiate connection to the Resilient Appliance
        CmdClone.res_client = get_resilient_client(
            path_config_file=args.config)

        org_export = get_latest_org_export(CmdClone.res_client)

        # For the new export data DTO minify the export to only its mandatory attributes
        new_export_data = minify_export(org_export)

        # If any of the supported args are provided
        if any([
                args.function, args.workflow, args.rule,
                args.messagedestination, args.script, args.playbook
        ]):
            if args.prefix:
                self._clone_multiple_action_objects(args, new_export_data,
                                                    org_export)

            else:

                if args.script:
                    # If a Script was provided, call _clone_action_object with Script related params and
                    # add the newly cloned Script to new_export_data
                    new_export_data['scripts'] = self._clone_action_object(
                        args.script, org_export, 'Script',
                        ResilientObjMap.SCRIPTS, 'scripts',
                        CmdClone.replace_common_object_attrs, args.changetype)
                if args.function:
                    # If a Function was provided, call _clone_action_object with Function related params and
                    # add the newly cloned Function to new_export_data
                    new_export_data['functions'] = self._clone_action_object(
                        args.function, org_export, 'Function',
                        ResilientObjMap.FUNCTIONS, 'functions',
                        CmdClone.replace_function_object_attrs)

                if args.rule:
                    # If a Rule was provided, call _clone_action_object with Rule related params and
                    # add the newly cloned Rule to new_export_data
                    new_export_data["actions"] = self._clone_action_object(
                        args.rule,
                        org_export,
                        'Rule',
                        ResilientObjMap.RULES,
                        'actions',
                        CmdClone.replace_rule_object_attrs,
                        new_object_type=args.changetype)

                if args.workflow:
                    # If a Workflow was provided, call _clone_workflow with Workflow related params and
                    # add the newly cloned Workflow to new_export_data
                    new_export_data["workflows"] = self._clone_workflow(
                        args, org_export)

                if args.messagedestination:
                    # If a Message Destination was provided, call _clone_action_object with Message Destination related params and
                    # add the newly cloned Message Destination to new_export_data
                    new_export_data[
                        "message_destinations"] = self._clone_action_object(
                            args.messagedestination, org_export,
                            'Message Destination',
                            ResilientObjMap.MESSAGE_DESTINATIONS,
                            'message_destinations',
                            CmdClone.replace_md_object_attrs)

                if args.playbook:

                    if get_resilient_server_version(
                            CmdClone.res_client
                    ) < constants.MIN_SOAR_SERVER_VERSION_PLAYBOOKS:
                        raise SDKException(
                            u"Playbooks are only supported with IBM SOAR >= {0}"
                            .format(
                                constants.MIN_SOAR_SERVER_VERSION_PLAYBOOKS))

                    # If a Playbook was provided, call _clone_action_object with Playbook related params and
                    # add the newly cloned Playbook to new_export_data
                    new_export_data[
                        constants.CUST_PLAYBOOKS] = self._clone_action_object(
                            input_args=args.playbook,
                            org_export=org_export,
                            obj_name='Playbook',
                            obj_identifier=ResilientObjMap.PLAYBOOKS,
                            obj_key=constants.CUST_PLAYBOOKS,
                            replace_fn=CmdClone.replace_playbook_object_attrs,
                            new_object_type=args.changetype)

            add_configuration_import(new_export_data, CmdClone.res_client)
            # If any message destinations were cloned, after creation attach a Authorised User or API Key
            # to the destination. Providing this info in the Configurations API call above will be ignored.
            self.add_authorised_info_to_md(new_export_data)

        else:
            self.parser.print_help()

        time_delta = (datetime.now() - start).total_seconds()
        LOG.info("'clone' command finished in {} seconds".format(time_delta))
Example #7
0
def test_get_resilient_client(fx_mk_temp_dir, fx_mk_app_config):
    res_client = sdk_helpers.get_resilient_client(path_config_file=fx_mk_app_config)
    assert isinstance(res_client, SimpleClient)