def test_minify_export_default_keys_to_keep(fx_mock_res_client): org_export = sdk_helpers.get_latest_org_export(fx_mock_res_client) minifed_export = sdk_helpers.minify_export(org_export) assert "export_date" in minifed_export assert "export_format_version" in minifed_export assert "id" in minifed_export assert "server_version" in minifed_export
def test_minify_export(fx_mock_res_client): org_export = sdk_helpers.get_latest_org_export(fx_mock_res_client) minifed_export = sdk_helpers.minify_export(org_export, functions=["mock_function_one"]) minified_functions = minifed_export.get("functions") minified_fields = minifed_export.get("fields") minified_incident_types = minifed_export.get("incident_types") # Test it minified given function assert len(minified_functions) == 1 assert minified_functions[0].get("export_key") == "mock_function_one" # Test it set a non-mentioned object to 'empty' assert minifed_export.get("phases") == [] # Test it added the internal field assert len(minified_fields) == 1 assert minified_fields[0].get("export_key") == "incident/internal_customizations_field" assert minified_fields[0].get("uuid") == "bfeec2d4-3770-11e8-ad39-4a0004044aa1" # Test it added the default incident type assert len(minified_incident_types) == 1 assert minified_incident_types[0].get("export_key") == "Customization Packages (internal)" assert minified_incident_types[0].get("uuid") == "bfeec2d4-3770-11e8-ad39-4a0004044aa0"
def _gen_package(args, setup_py_attributes={}): LOG.info("Generating codegen package...") if not sdk_helpers.is_valid_package_name(args.package): raise SDKException(u"'{0}' is not a valid package name".format(args.package)) # Strip off version information, if present in package base folder, to get the package name. package_name = re.split(VERSION_REGEX, args.package, 1)[0] # Get base version if we are running against a package base folder with version. base_version = ''.join(re.split(package_name, args.package)) # Get output_base, use args.output if defined, else current directory output_base = args.output if args.output else os.curdir output_base = os.path.abspath(output_base) # If --exportfile is specified, read org_export from that file if args.exportfile: LOG.info("Using local export file: %s", args.exportfile) org_export = sdk_helpers.read_local_exportfile(args.exportfile) else: # Instantiate connection to the Resilient Appliance res_client = sdk_helpers.get_resilient_client() # Generate + get latest export from Resilient Server org_export = sdk_helpers.get_latest_org_export(res_client) # Get data required for Jinja2 templates from export jinja_data = sdk_helpers.get_from_export(org_export, message_destinations=args.messagedestination, functions=args.function, workflows=args.workflow, rules=args.rule, fields=args.field, artifact_types=args.artifacttype, datatables=args.datatable, tasks=args.task, scripts=args.script) # Get 'minified' version of the export. This is used in customize.py jinja_data["export_data"] = sdk_helpers.minify_export(org_export, message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")), functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")), workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")), rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")), fields=jinja_data.get("all_fields"), artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")), datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")), tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")), phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")), scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts"))) # Add package_name to jinja_data jinja_data["package_name"] = package_name # Add version jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION) # Validate we have write permissions sdk_helpers.validate_dir_paths(os.W_OK, output_base) # Join package_name to output base (add base version if running against a folder which includes a version). output_base = os.path.join(output_base, package_name+base_version) # If the output_base directory does not exist, create it if not os.path.exists(output_base): os.makedirs(output_base) # Instansiate Jinja2 Environment with path to Jinja2 templates jinja_env = sdk_helpers.setup_jinja_env("data/codegen/templates/package_template") # This dict maps our package file structure to Jinja2 templates package_mapping_dict = { "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data), "README.md": ("README.md.jinja2", jinja_data), "setup.py": ("setup.py.jinja2", jinja_data), "tox.ini": ("tox.ini.jinja2", jinja_data), "Dockerfile": ("Dockerfile.jinja2", jinja_data), "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data), "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data), "data": {}, "icons": { "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO, "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO, }, "doc": { "screenshots": { "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT } }, package_name: { "__init__.py": ("package/__init__.py.jinja2", jinja_data), "LICENSE": ("package/LICENSE.jinja2", jinja_data), "components": { "__init__.py": ("package/components/__init__.py.jinja2", jinja_data), }, "util": { "data": { "export.res": ("package/util/data/export.res.jinja2", jinja_data) }, "__init__.py": ("package/util/__init__.py.jinja2", jinja_data), "config.py": ("package/util/config.py.jinja2", jinja_data), "customize.py": ("package/util/customize.py.jinja2", jinja_data), "selftest.py": ("package/util/selftest.py.jinja2", jinja_data), } } } # If there are Functions, add a 'tests' directory if jinja_data.get("functions"): package_mapping_dict["tests"] = {} # Loop each Function for f in jinja_data.get("functions"): # Add package_name to function data f["package_name"] = package_name # Generate function_component.py file name file_name = u"funct_{0}.py".format(f.get("export_key")) # Add to 'components' directory package_mapping_dict[package_name]["components"][file_name] = ("package/components/function.py.jinja2", f) # Add to 'tests' directory package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f) for w in jinja_data.get("workflows"): # Generate wf_xx.md file name file_name = u"wf_{0}.md".format(w.get(ResilientObjMap.WORKFLOWS)) # Add workflow to data directory package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w) newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping( jinja_mapping_dict=package_mapping_dict, jinja_env=jinja_env, target_dir=output_base, package_dir=output_base) # Log new and skipped files if newly_generated_files: LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files)) if skipped_files: LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files)) LOG.info("'codegen' complete for '%s'", package_name)
def _set_version(args): new_version = args.set_version if not sdk_helpers.is_valid_version_syntax(new_version): raise SDKException( "{0} is not a valid version".format(new_version)) new_version_int = list(map(int, (re.findall(r"\d+", new_version)))) # Get absolute path_to_src path_to_src = os.path.abspath(args.package) # Get path to setup.py file path_setup_py_file = os.path.join(path_to_src, package_helpers.BASE_NAME_SETUP_PY) # Parse the setup.py file setup_py_attributes = package_helpers.parse_setup_py( path_setup_py_file, package_helpers.SUPPORTED_SETUP_PY_ATTRIBUTE_NAMES) package_name = setup_py_attributes.get("name", "") LOG.info("Setting Resilient Platform version for %s to %s", package_name, new_version) # Get the customize file location. path_customize_py = package_helpers.get_configuration_py_file_path( "customize", setup_py_attributes) # Get customize.py ImportDefinition customize_py_import_definition = package_helpers.get_import_definition_from_customize_py( path_customize_py) old_version = customize_py_import_definition["server_version"][ "version"] LOG.info("Old Version: %s", old_version) LOG.info("New Version: %s", new_version) # Set the new version customize_py_import_definition["server_version"][ "version"] = new_version customize_py_import_definition["server_version"][ "major"] = new_version_int[0] customize_py_import_definition["server_version"][ "minor"] = new_version_int[1] customize_py_import_definition["server_version"][ "build_number"] = new_version_int[2] LOG.info("Loading old customize.py file") # Load the customize.py module customize_py_module = package_helpers.load_customize_py_module( path_customize_py, warn=False) # Get the 'old_params' from customize.py old_params = customize_py_module.codegen_reload_data() # Rename the old customize.py with .bak path_customize_py_bak = sdk_helpers.rename_to_bak_file( path_customize_py) # If local export file exists then save it to a .bak file. # (Older packages may not have the /util/data/export.res file) # Figure out the path of the files first dir_customize_py = os.path.dirname(path_customize_py) path_local_export_res = os.path.join( dir_customize_py, package_helpers.PATH_LOCAL_EXPORT_RES) path_local_export_res_bak = None if os.path.isfile(path_local_export_res): path_local_export_res_bak = sdk_helpers.rename_to_bak_file( path_local_export_res) try: jinja_data = sdk_helpers.get_from_export( customize_py_import_definition, message_destinations=old_params.get("message_destinations"), functions=old_params.get("functions"), workflows=old_params.get("workflows"), rules=old_params.get("actions"), fields=old_params.get("incident_fields"), artifact_types=old_params.get("incident_artifact_types"), datatables=old_params.get("datatables"), tasks=old_params.get("automatic_tasks"), scripts=old_params.get("scripts")) jinja_data["export_data"] = sdk_helpers.minify_export( customize_py_import_definition, message_destinations=sdk_helpers.get_object_api_names( ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")), functions=sdk_helpers.get_object_api_names( ResilientObjMap.FUNCTIONS, jinja_data.get("functions")), workflows=sdk_helpers.get_object_api_names( ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")), rules=sdk_helpers.get_object_api_names( ResilientObjMap.RULES, jinja_data.get("rules")), fields=jinja_data.get("all_fields"), artifact_types=sdk_helpers.get_object_api_names( ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")), datatables=sdk_helpers.get_object_api_names( ResilientObjMap.DATATABLES, jinja_data.get("datatables")), tasks=sdk_helpers.get_object_api_names( ResilientObjMap.TASKS, jinja_data.get("tasks")), phases=sdk_helpers.get_object_api_names( ResilientObjMap.PHASES, jinja_data.get("phases")), scripts=sdk_helpers.get_object_api_names( ResilientObjMap.SCRIPTS, jinja_data.get("scripts"))) # Add package_name to jinja_data jinja_data["package_name"] = package_name # Add version jinja_data["version"] = setup_py_attributes.get( "version", package_helpers.MIN_SETUP_PY_VERSION) # Instansiate Jinja2 Environment with path to Jinja2 templates for customize.py jinja_env = sdk_helpers.setup_jinja_env( "data/codegen/templates/package_template/package/util") jinja_template = jinja_env.get_template("customize.py.jinja2") LOG.info("Writing new customize.py file") # Render & write jinja2 template jinja_rendered_text = jinja_template.render(jinja_data) sdk_helpers.write_file(path_customize_py, jinja_rendered_text) # Instansiate Jinja2 Environment with path to Jinja2 templates for /util/dat/export.res #jinja_env = sdk_helpers.setup_jinja_env("data/codegen/templates/package_template/package/util/data") jinja_template = jinja_env.get_template("/data/export.res.jinja2") LOG.debug("Writing new /util/data/export.res file") # Render jinja2 template jinja_rendered_text = jinja_template.render(jinja_data) # Make sure the /util/data directory is there if it is not dir_local_export_res = os.path.dirname(path_local_export_res) if not os.path.exists(dir_local_export_res): os.makedirs(dir_local_export_res) # Write the file sdk_helpers.write_file(path_local_export_res, jinja_rendered_text) LOG.info("'dev --set-version' complete for '%s'", package_name) except Exception as err: LOG.error( u"Error running resilient-sdk dev --set-version\n\nERROR:%s", err) # This is required in finally block as user may kill using keyboard interrupt finally: # If an error occurred, customize.py does not exist, rename the backup file to original if not os.path.isfile(path_customize_py): LOG.info( u"An error occurred. Renaming customize.py.bak to customize.py" ) sdk_helpers.rename_file(path_customize_py_bak, package_helpers.BASE_NAME_CUSTOMIZE_PY) if path_local_export_res_bak and not os.path.isfile( path_local_export_res): LOG.info( u"An error occurred. Renaming /util/data/export.res.bak to export.res" ) sdk_helpers.rename_file( path_local_export_res_bak, package_helpers.BASE_NAME_LOCAL_EXPORT_RES)
def _gen_package(args, setup_py_attributes={}): LOG.info("Generating codegen package...") sdk_helpers.is_python_min_supported_version() if os.path.exists(args.package) and not args.reload: raise SDKException(u"'{0}' already exists. Add --reload flag to regenerate it".format(args.package)) if not sdk_helpers.is_valid_package_name(args.package): raise SDKException(u"'{0}' is not a valid package name".format(args.package)) # The package_name will be specified in the args package_name = args.package # Get output_base, use args.output if defined, else current directory output_base = args.output if args.output else os.curdir output_base = os.path.abspath(output_base) # If --exportfile is specified, read org_export from that file if args.exportfile: LOG.info("Using local export file: %s", args.exportfile) org_export = sdk_helpers.read_local_exportfile(args.exportfile) else: # Instantiate connection to the Resilient Appliance res_client = sdk_helpers.get_resilient_client(path_config_file=args.config) # Generate + get latest export from Resilient Server org_export = sdk_helpers.get_latest_org_export(res_client) # Get data required for Jinja2 templates from export jinja_data = sdk_helpers.get_from_export(org_export, message_destinations=args.messagedestination, functions=args.function, workflows=args.workflow, rules=args.rule, fields=args.field, artifact_types=args.artifacttype, datatables=args.datatable, tasks=args.task, scripts=args.script, incident_types=args.incidenttype, playbooks=args.playbook) # Get 'minified' version of the export. This is used in customize.py jinja_data["export_data"] = sdk_helpers.minify_export(org_export, message_destinations=sdk_helpers.get_object_api_names(ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")), functions=sdk_helpers.get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")), workflows=sdk_helpers.get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")), rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")), fields=jinja_data.get("all_fields"), artifact_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_ARTIFACT_TYPES, jinja_data.get("artifact_types")), datatables=sdk_helpers.get_object_api_names(ResilientObjMap.DATATABLES, jinja_data.get("datatables")), tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, jinja_data.get("tasks")), phases=sdk_helpers.get_object_api_names(ResilientObjMap.PHASES, jinja_data.get("phases")), scripts=sdk_helpers.get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts")), incident_types=sdk_helpers.get_object_api_names(ResilientObjMap.INCIDENT_TYPES, jinja_data.get("incident_types")), playbooks=sdk_helpers.get_object_api_names(ResilientObjMap.PLAYBOOKS, jinja_data.get("playbooks"))) # Add package_name to jinja_data jinja_data["package_name"] = package_name # Add version jinja_data["version"] = setup_py_attributes.get("version", package_helpers.MIN_SETUP_PY_VERSION) jinja_data["resilient_libraries_version"] = sdk_helpers.get_resilient_libraries_version_to_use() # Validate we have write permissions sdk_helpers.validate_dir_paths(os.W_OK, output_base) if not args.reload: # If this is not a reload, join package_name to output base output_base = os.path.join(output_base, package_name) # If the output_base directory does not exist, create it if not os.path.exists(output_base): os.makedirs(output_base) # Instansiate Jinja2 Environment with path to Jinja2 templates jinja_env = sdk_helpers.setup_jinja_env(constants.PACKAGE_TEMPLATE_PATH) # This dict maps our package file structure to Jinja2 templates package_mapping_dict = { "MANIFEST.in": ("MANIFEST.in.jinja2", jinja_data), "README.md": ("README.md.jinja2", jinja_data), "setup.py": ("setup.py.jinja2", jinja_data), "tox.ini": ("tox.ini.jinja2", jinja_data), "Dockerfile": ("Dockerfile.jinja2", jinja_data), "entrypoint.sh": ("entrypoint.sh.jinja2", jinja_data), "apikey_permissions.txt": ("apikey_permissions.txt.jinja2", jinja_data), "data": {}, "icons": { "company_logo.png": package_helpers.PATH_DEFAULT_ICON_COMPANY_LOGO, "app_logo.png": package_helpers.PATH_DEFAULT_ICON_EXTENSION_LOGO, }, "doc": { "screenshots": { "main.png": package_helpers.PATH_DEFAULT_SCREENSHOT } }, package_name: { "__init__.py": ("package/__init__.py.jinja2", jinja_data), "LICENSE": ("package/LICENSE.jinja2", jinja_data), "components": { "__init__.py": ("package/components/__init__.py.jinja2", jinja_data), }, "util": { "data": { "export.res": ("package/util/data/export.res.jinja2", jinja_data) }, "__init__.py": ("package/util/__init__.py.jinja2", jinja_data), "config.py": ("package/util/config.py.jinja2", jinja_data), "customize.py": ("package/util/customize.py.jinja2", jinja_data), "selftest.py": ("package/util/selftest.py.jinja2", jinja_data), } } } # If there are Functions, add a 'tests' and a 'payload_samples' directory (if in dev mode) if jinja_data.get("functions"): package_mapping_dict["tests"] = {} package_mapping_dict[package_helpers.BASE_NAME_PAYLOAD_SAMPLES_DIR] = {} # Get a list of function names in export. fn_names = [f.get(ResilientObjMap.FUNCTIONS) for f in jinja_data.get("functions")] # Loop each Function for f in jinja_data.get("functions"): # Add package_name to function data f["package_name"] = package_name # Get function name fn_name = f.get(ResilientObjMap.FUNCTIONS) # Generate funct_function_component.py file name # Don't add prefix if function name already begins with "func_" or "funct_". if re.search(r"^(func|funct)_", fn_name): file_name = u"{0}.py".format(fn_name) else: file_name = u"funct_{0}.py".format(fn_name) # Check if file_name without extension already exists in functions names list. if os.path.splitext(file_name)[0] in fn_names: raise SDKException(u"File name '{0}' already in use please rename the function '{1}'." .format(file_name, fn_name)) # Add an 'atomic function' to 'components' directory else add a 'normal function' package_mapping_dict[package_name]["components"][file_name] = ("package/components/atomic_function.py.jinja2", f) # Add to 'tests' directory package_mapping_dict["tests"][u"test_{0}".format(file_name)] = ("tests/test_function.py.jinja2", f) # Add a 'payload_samples/fn_name' directory and the files to it CmdCodegen.add_payload_samples(package_mapping_dict, fn_name, f) # Get a list of workflow names in export. wf_names = [w.get(ResilientObjMap.WORKFLOWS) for w in jinja_data.get("workflows")] for w in jinja_data.get("workflows"): # Get workflow name wf_name = w.get(ResilientObjMap.WORKFLOWS) # Generate wf_xx.md file name # Don't add prefix if workflow name already begins with "wf_". if re.search(r"^wf_", wf_name): file_name = u"{0}.md".format(wf_name) else: file_name = u"wf_{0}.md".format(wf_name) # Check if file_name without extension already exists in workflow names list. if os.path.splitext(file_name)[0] in wf_names: raise SDKException(u"File name '{0}' already in use please recreate the workflow '{1}'." .format(file_name, wf_name)) # Add workflow to data directory package_mapping_dict["data"][file_name] = ("data/workflow.md.jinja2", w) newly_generated_files, skipped_files = CmdCodegen.render_jinja_mapping( jinja_mapping_dict=package_mapping_dict, jinja_env=jinja_env, target_dir=output_base, package_dir=output_base) # Log new and skipped files if newly_generated_files: LOG.debug("Newly generated files:\n\t> %s", "\n\t> ".join(newly_generated_files)) if skipped_files: LOG.debug("Files Skipped:\n\t> %s", "\n\t> ".join(skipped_files)) LOG.info("'codegen' complete for '%s'", package_name) return output_base
def execute_command(self, args): LOG.info("Starting 'extract'...") LOG.debug("'extract' called with %s", args) # Set docgen name for SDKException SDKException.command_ran = self.CMD_NAME # Get output_base, use args.output if defined, else current directory output_base = args.output if args.output else os.curdir output_base = os.path.abspath(output_base) # If --exportfile is specified, read org_export from that file if args.exportfile: LOG.info("Using local export file: %s", args.exportfile) org_export = sdk_helpers.read_local_exportfile(args.exportfile) else: # Instantiate connection to the Resilient Appliance res_client = sdk_helpers.get_resilient_client( path_config_file=args.config) # Generate + get latest export from Resilient Server org_export = sdk_helpers.get_latest_org_export(res_client) LOG.info("Extracting data from export...") # Get extracted data from export extract_data = sdk_helpers.get_from_export( org_export, message_destinations=args.messagedestination, functions=args.function, workflows=args.workflow, rules=args.rule, fields=args.field, artifact_types=args.artifacttype, datatables=args.datatable, tasks=args.task, scripts=args.script, incident_types=args.incidenttype, playbooks=args.playbook) # Get 'minified' version of the export. This is used in to create export.res min_extract_data = sdk_helpers.minify_export( org_export, message_destinations=sdk_helpers.get_object_api_names( ResilientObjMap.MESSAGE_DESTINATIONS, extract_data.get("message_destinations")), functions=sdk_helpers.get_object_api_names( ResilientObjMap.FUNCTIONS, extract_data.get("functions")), workflows=sdk_helpers.get_object_api_names( ResilientObjMap.WORKFLOWS, extract_data.get("workflows")), rules=sdk_helpers.get_object_api_names(ResilientObjMap.RULES, extract_data.get("rules")), fields=extract_data.get("all_fields"), artifact_types=sdk_helpers.get_object_api_names( ResilientObjMap.INCIDENT_ARTIFACT_TYPES, extract_data.get("artifact_types")), datatables=sdk_helpers.get_object_api_names( ResilientObjMap.DATATABLES, extract_data.get("datatables")), tasks=sdk_helpers.get_object_api_names(ResilientObjMap.TASKS, extract_data.get("tasks")), phases=sdk_helpers.get_object_api_names( ResilientObjMap.PHASES, extract_data.get("phases")), scripts=sdk_helpers.get_object_api_names( ResilientObjMap.SCRIPTS, extract_data.get("scripts")), incident_types=sdk_helpers.get_object_api_names( ResilientObjMap.INCIDENT_TYPES, extract_data.get("incident_types")), playbooks=sdk_helpers.get_object_api_names( ResilientObjMap.PLAYBOOKS, extract_data.get(constants.CUST_PLAYBOOKS))) # Convert dict to JSON string if sys.version_info.major >= 3: res_data = json.dumps(min_extract_data, ensure_ascii=False) else: res_data = unicode(json.dumps(min_extract_data, ensure_ascii=False)) # Generate path to file file_name = "export-{0}".format( sdk_helpers.get_timestamp( org_export.get("export_date", 0) / 1000.0)) # If custom name supplied, prepend it if args.name: file_name = "{0}-{1}".format(args.name, file_name) path_file_to_write = os.path.join(output_base, "{0}.res".format(file_name)) LOG.info("Generating %s.res", file_name) # Write the file sdk_helpers.write_file(path_file_to_write, res_data) LOG.debug('Wrote: %s', path_file_to_write) # If we should create .zip archive if args.zip: LOG.info("Generating %s.zip", file_name) # Get path to .zip path_dir_to_zip = os.path.join(output_base, file_name) # Create directory os.makedirs(path_dir_to_zip) # Copy the written export file into new dir shutil.copy(path_file_to_write, path_dir_to_zip) # zip the dir the_zip = shutil.make_archive(base_name=file_name, format="zip", root_dir=path_dir_to_zip) if output_base != os.path.dirname(the_zip): # Move the zip into the output base shutil.move(the_zip, output_base) LOG.debug('Wrote: %s.zip', path_dir_to_zip) # Remove directory shutil.rmtree(path_dir_to_zip) LOG.info("'extract' complete")
def _clone_multiple_action_objects(self, args, new_export_data, org_export): LOG.info("Prefix provided {}, copying multiple Action Objects".format( args.prefix)) # Get data required from the export jinja_data = get_from_export( org_export, message_destinations=args.messagedestination, functions=args.function, workflows=args.workflow, rules=args.rule, scripts=args.script, get_related_objects=False) # Get 'minified' version of the export. This is used in customize.py minified = minify_export( org_export, message_destinations=get_object_api_names( ResilientObjMap.MESSAGE_DESTINATIONS, jinja_data.get("message_destinations")), functions=get_object_api_names(ResilientObjMap.FUNCTIONS, jinja_data.get("functions")), workflows=get_object_api_names(ResilientObjMap.WORKFLOWS, jinja_data.get("workflows")), rules=get_object_api_names(ResilientObjMap.RULES, jinja_data.get("rules")), scripts=get_object_api_names(ResilientObjMap.SCRIPTS, jinja_data.get("scripts"))) # For each support object for object_type in ACTION_OBJECT_KEYS: for obj in minified.get(object_type, []): old_api_name = obj.get('export_key') new_api_name = "{}_{}".format(args.prefix, old_api_name) # If the object we are dealing with was one of the requested objects if self.action_obj_was_specified(args, obj): # Ensure the new_api_name for each object is unique, raise an Exception otherwise CmdClone.perform_duplication_check( object_type, resilient_export_obj_mapping.get(object_type), "Object", new_api_name, org_export) # Handle functions for cloning if obj.get('display_name', False): new_function = CmdClone.replace_function_object_attrs( obj, new_api_name) new_export_data[object_type].append(new_function) # Handle workflows for cloning elif obj.get('content', {}).get('xml', False): new_export_data['workflows'].append( CmdClone.replace_workflow_object_attrs( obj, old_api_name, new_api_name, obj['name'], args.changetype)) # Handle Message Destination. Of the supported Action Object types; only Message Destination and Workflow use programmatic_name elif obj.get('programmatic_name', False): if obj.get('api_keys', False) and obj.get( 'users', False): # Save the User and API key auth for upload after initial clone resilient_msg_dest_auth_info['users'].update( {obj['name']: obj['users']}) resilient_msg_dest_auth_info['api_keys'].update( {obj['name']: obj['api_keys']}) new_export_data['message_destinations'].append( CmdClone.replace_md_object_attrs( obj, new_api_name)) # Handle Rules and everything else else: new_export_data[object_type].append( CmdClone.replace_rule_object_attrs( obj, new_api_name, args.changetype))
def execute_command(self, args): """ When the clone command is executed, we want to perform these actions: 1: Setup a client to Resilient and get the latest export file. 2: Cloning a single object per type: For each specified action type: 2.1: Ensure the user provided both the source and new action name 2.2: Check that the provided source action type exists and the new action name is unique. 2.3: Prepare a new Object from the source action object replacing the names as needed. 3: Cloning multiple objects with a prefix: For each specified action type: 3.1: Ensure the user provided source action objects which exist 3.2: Prepend the prefix to the unique identifiers for each object 3: Prepare our configuration import object for upload with the newly cloned action objects 4: Submit a configuration import through the API 5: Confirm the change has been accepted :param args: The command line args passed with the clone command of Resilient Objects to be cloned :type args: argparse.ArgumentParser.Namespace :raises SDKException: An SDKException detailing what failed in the operation """ SDKException.command_ran = "clone" LOG.debug("Called clone with %s", args) start = datetime.now() # Instansiate connection to the Resilient Appliance CmdClone.res_client = get_resilient_client( path_config_file=args.config) org_export = get_latest_org_export(CmdClone.res_client) # For the new export data DTO minify the export to only its mandatory attributes new_export_data = minify_export(org_export) # If any of the supported args are provided if any([ args.function, args.workflow, args.rule, args.messagedestination, args.script, args.playbook ]): if args.prefix: self._clone_multiple_action_objects(args, new_export_data, org_export) else: if args.script: # If a Script was provided, call _clone_action_object with Script related params and # add the newly cloned Script to new_export_data new_export_data['scripts'] = self._clone_action_object( args.script, org_export, 'Script', ResilientObjMap.SCRIPTS, 'scripts', CmdClone.replace_common_object_attrs, args.changetype) if args.function: # If a Function was provided, call _clone_action_object with Function related params and # add the newly cloned Function to new_export_data new_export_data['functions'] = self._clone_action_object( args.function, org_export, 'Function', ResilientObjMap.FUNCTIONS, 'functions', CmdClone.replace_function_object_attrs) if args.rule: # If a Rule was provided, call _clone_action_object with Rule related params and # add the newly cloned Rule to new_export_data new_export_data["actions"] = self._clone_action_object( args.rule, org_export, 'Rule', ResilientObjMap.RULES, 'actions', CmdClone.replace_rule_object_attrs, new_object_type=args.changetype) if args.workflow: # If a Workflow was provided, call _clone_workflow with Workflow related params and # add the newly cloned Workflow to new_export_data new_export_data["workflows"] = self._clone_workflow( args, org_export) if args.messagedestination: # If a Message Destination was provided, call _clone_action_object with Message Destination related params and # add the newly cloned Message Destination to new_export_data new_export_data[ "message_destinations"] = self._clone_action_object( args.messagedestination, org_export, 'Message Destination', ResilientObjMap.MESSAGE_DESTINATIONS, 'message_destinations', CmdClone.replace_md_object_attrs) if args.playbook: if get_resilient_server_version( CmdClone.res_client ) < constants.MIN_SOAR_SERVER_VERSION_PLAYBOOKS: raise SDKException( u"Playbooks are only supported with IBM SOAR >= {0}" .format( constants.MIN_SOAR_SERVER_VERSION_PLAYBOOKS)) # If a Playbook was provided, call _clone_action_object with Playbook related params and # add the newly cloned Playbook to new_export_data new_export_data[ constants.CUST_PLAYBOOKS] = self._clone_action_object( input_args=args.playbook, org_export=org_export, obj_name='Playbook', obj_identifier=ResilientObjMap.PLAYBOOKS, obj_key=constants.CUST_PLAYBOOKS, replace_fn=CmdClone.replace_playbook_object_attrs, new_object_type=args.changetype) add_configuration_import(new_export_data, CmdClone.res_client) # If any message destinations were cloned, after creation attach a Authorised User or API Key # to the destination. Providing this info in the Configurations API call above will be ignored. self.add_authorised_info_to_md(new_export_data) else: self.parser.print_help() time_delta = (datetime.now() - start).total_seconds() LOG.info("'clone' command finished in {} seconds".format(time_delta))